diff --git a/llava_next/lib/python3.10/site-packages/torch/include/ATen/ops/_foreach_sin_native.h b/llava_next/lib/python3.10/site-packages/torch/include/ATen/ops/_foreach_sin_native.h new file mode 100644 index 0000000000000000000000000000000000000000..25df4730467f389893021fce0eca5519ddc1203a --- /dev/null +++ b/llava_next/lib/python3.10/site-packages/torch/include/ATen/ops/_foreach_sin_native.h @@ -0,0 +1,25 @@ +#pragma once + +// @generated by torchgen/gen.py from NativeFunction.h + +#include +#include +#include +#include +#include +#include +#include +#include +#include +#include + + +namespace at { +namespace native { +TORCH_API void _foreach_sin_out(at::TensorList self, at::TensorList out); +TORCH_API ::std::vector foreach_tensor_sin_slow(at::TensorList self); +TORCH_API void foreach_tensor_sin_slow_(at::TensorList self); +TORCH_API ::std::vector foreach_tensor_sin_cuda(at::TensorList self); +TORCH_API void foreach_tensor_sin_cuda_(at::TensorList self); +} // namespace native +} // namespace at diff --git a/llava_next/lib/python3.10/site-packages/torch/include/ATen/ops/_int_mm_ops.h b/llava_next/lib/python3.10/site-packages/torch/include/ATen/ops/_int_mm_ops.h new file mode 100644 index 0000000000000000000000000000000000000000..6b4e90ef4752b129aaddc0264ca592c930c5efaa --- /dev/null +++ b/llava_next/lib/python3.10/site-packages/torch/include/ATen/ops/_int_mm_ops.h @@ -0,0 +1,39 @@ +#pragma once + +// @generated by torchgen/gen.py from Operator.h + +#include +#include + +// Forward declarations of any types needed in the operator signatures. +// We can't directly include these classes because it will cause circular include dependencies. +// This file is included by TensorBody.h, which defines the Tensor class. +#include + +namespace at { +namespace _ops { + + +struct TORCH_API _int_mm { + using schema = at::Tensor (const at::Tensor &, const at::Tensor &); + using ptr_schema = schema*; + // See Note [static constexpr char* members for windows NVCC] + STATIC_CONSTEXPR_STR_INL_EXCEPT_WIN_CUDA(name, "aten::_int_mm") + STATIC_CONSTEXPR_STR_INL_EXCEPT_WIN_CUDA(overload_name, "") + STATIC_CONSTEXPR_STR_INL_EXCEPT_WIN_CUDA(schema_str, "_int_mm(Tensor self, Tensor mat2) -> Tensor") + static at::Tensor call(const at::Tensor & self, const at::Tensor & mat2); + static at::Tensor redispatch(c10::DispatchKeySet dispatchKeySet, const at::Tensor & self, const at::Tensor & mat2); +}; + +struct TORCH_API _int_mm_out { + using schema = at::Tensor & (const at::Tensor &, const at::Tensor &, at::Tensor &); + using ptr_schema = schema*; + // See Note [static constexpr char* members for windows NVCC] + STATIC_CONSTEXPR_STR_INL_EXCEPT_WIN_CUDA(name, "aten::_int_mm") + STATIC_CONSTEXPR_STR_INL_EXCEPT_WIN_CUDA(overload_name, "out") + STATIC_CONSTEXPR_STR_INL_EXCEPT_WIN_CUDA(schema_str, "_int_mm.out(Tensor self, Tensor mat2, *, Tensor(a!) out) -> Tensor(a!)") + static at::Tensor & call(const at::Tensor & self, const at::Tensor & mat2, at::Tensor & out); + static at::Tensor & redispatch(c10::DispatchKeySet dispatchKeySet, const at::Tensor & self, const at::Tensor & mat2, at::Tensor & out); +}; + +}} // namespace at::_ops diff --git a/llava_next/lib/python3.10/site-packages/torch/include/ATen/ops/_upsample_nearest_exact2d_meta.h b/llava_next/lib/python3.10/site-packages/torch/include/ATen/ops/_upsample_nearest_exact2d_meta.h new file mode 100644 index 0000000000000000000000000000000000000000..5af9c522dea2cb6441a9f10cb6b87aaab8bf61c5 --- /dev/null +++ b/llava_next/lib/python3.10/site-packages/torch/include/ATen/ops/_upsample_nearest_exact2d_meta.h @@ -0,0 +1,27 @@ +#pragma once + +// @generated by torchgen/gen.py from NativeMetaFunction.h + +#include +#include +#include +#include +#include +#include +#include +#include +#include +#include +#include + +namespace at { +namespace meta { + +struct TORCH_API structured__upsample_nearest_exact2d : public at::impl::MetaBase { + + + void meta(const at::Tensor & self, at::ArrayRef output_size, c10::optional scales_h, c10::optional scales_w); +}; + +} // namespace native +} // namespace at diff --git a/llava_next/lib/python3.10/site-packages/torch/include/ATen/ops/_values_copy_compositeexplicitautogradnonfunctional_dispatch.h b/llava_next/lib/python3.10/site-packages/torch/include/ATen/ops/_values_copy_compositeexplicitautogradnonfunctional_dispatch.h new file mode 100644 index 0000000000000000000000000000000000000000..aa460e7de9ee8159633f4e10091b464b36db561d --- /dev/null +++ b/llava_next/lib/python3.10/site-packages/torch/include/ATen/ops/_values_copy_compositeexplicitautogradnonfunctional_dispatch.h @@ -0,0 +1,23 @@ +#pragma once +// @generated by torchgen/gen.py from DispatchKeyFunction.h + +// NB: The implementing C++ file is RegisterDispatchKey.cpp + +// The only #includes we need are for custom classes that have defaults in the C++ API +#include +#include +#include + +// Forward declarations of any types needed in the operator signatures. +// We can't directly include these classes because it will cause circular include dependencies. +// This file is included by TensorBody.h, which defines the Tensor class. +#include + +namespace at { + +namespace compositeexplicitautogradnonfunctional { + +TORCH_API at::Tensor _values_copy(const at::Tensor & self); + +} // namespace compositeexplicitautogradnonfunctional +} // namespace at diff --git a/llava_next/lib/python3.10/site-packages/torch/include/ATen/ops/arctan2_compositeimplicitautograd_dispatch.h b/llava_next/lib/python3.10/site-packages/torch/include/ATen/ops/arctan2_compositeimplicitautograd_dispatch.h new file mode 100644 index 0000000000000000000000000000000000000000..b10f365d03b774d0904a0defab63a07f693f1090 --- /dev/null +++ b/llava_next/lib/python3.10/site-packages/torch/include/ATen/ops/arctan2_compositeimplicitautograd_dispatch.h @@ -0,0 +1,26 @@ +#pragma once +// @generated by torchgen/gen.py from DispatchKeyFunction.h + +// NB: The implementing C++ file is RegisterDispatchKey.cpp + +// The only #includes we need are for custom classes that have defaults in the C++ API +#include +#include +#include + +// Forward declarations of any types needed in the operator signatures. +// We can't directly include these classes because it will cause circular include dependencies. +// This file is included by TensorBody.h, which defines the Tensor class. +#include + +namespace at { + +namespace compositeimplicitautograd { + +TORCH_API at::Tensor arctan2(const at::Tensor & self, const at::Tensor & other); +TORCH_API at::Tensor & arctan2_out(at::Tensor & out, const at::Tensor & self, const at::Tensor & other); +TORCH_API at::Tensor & arctan2_outf(const at::Tensor & self, const at::Tensor & other, at::Tensor & out); +TORCH_API at::Tensor & arctan2_(at::Tensor & self, const at::Tensor & other); + +} // namespace compositeimplicitautograd +} // namespace at diff --git a/llava_next/lib/python3.10/site-packages/torch/include/ATen/ops/avg_pool2d_backward_meta_dispatch.h b/llava_next/lib/python3.10/site-packages/torch/include/ATen/ops/avg_pool2d_backward_meta_dispatch.h new file mode 100644 index 0000000000000000000000000000000000000000..738d2a34aa7499388f4b5ac3eec2bc532278b4a1 --- /dev/null +++ b/llava_next/lib/python3.10/site-packages/torch/include/ATen/ops/avg_pool2d_backward_meta_dispatch.h @@ -0,0 +1,25 @@ +#pragma once +// @generated by torchgen/gen.py from DispatchKeyFunction.h + +// NB: The implementing C++ file is RegisterDispatchKey.cpp + +// The only #includes we need are for custom classes that have defaults in the C++ API +#include +#include +#include + +// Forward declarations of any types needed in the operator signatures. +// We can't directly include these classes because it will cause circular include dependencies. +// This file is included by TensorBody.h, which defines the Tensor class. +#include + +namespace at { + +namespace meta { + +TORCH_API at::Tensor avg_pool2d_backward(const at::Tensor & grad_output, const at::Tensor & self, at::IntArrayRef kernel_size, at::IntArrayRef stride, at::IntArrayRef padding, bool ceil_mode, bool count_include_pad, c10::optional divisor_override); +TORCH_API at::Tensor & avg_pool2d_backward_out(at::Tensor & grad_input, const at::Tensor & grad_output, const at::Tensor & self, at::IntArrayRef kernel_size, at::IntArrayRef stride, at::IntArrayRef padding, bool ceil_mode, bool count_include_pad, c10::optional divisor_override); +TORCH_API at::Tensor & avg_pool2d_backward_outf(const at::Tensor & grad_output, const at::Tensor & self, at::IntArrayRef kernel_size, at::IntArrayRef stride, at::IntArrayRef padding, bool ceil_mode, bool count_include_pad, c10::optional divisor_override, at::Tensor & grad_input); + +} // namespace meta +} // namespace at diff --git a/llava_next/lib/python3.10/site-packages/torch/include/ATen/ops/copy_ops.h b/llava_next/lib/python3.10/site-packages/torch/include/ATen/ops/copy_ops.h new file mode 100644 index 0000000000000000000000000000000000000000..6cfe461037e3fc7c4ae56277edb182c78879e65b --- /dev/null +++ b/llava_next/lib/python3.10/site-packages/torch/include/ATen/ops/copy_ops.h @@ -0,0 +1,50 @@ +#pragma once + +// @generated by torchgen/gen.py from Operator.h + +#include +#include + +// Forward declarations of any types needed in the operator signatures. +// We can't directly include these classes because it will cause circular include dependencies. +// This file is included by TensorBody.h, which defines the Tensor class. +#include + +namespace at { +namespace _ops { + + +struct TORCH_API copy { + using schema = at::Tensor (const at::Tensor &, const at::Tensor &, bool); + using ptr_schema = schema*; + // See Note [static constexpr char* members for windows NVCC] + STATIC_CONSTEXPR_STR_INL_EXCEPT_WIN_CUDA(name, "aten::copy") + STATIC_CONSTEXPR_STR_INL_EXCEPT_WIN_CUDA(overload_name, "") + STATIC_CONSTEXPR_STR_INL_EXCEPT_WIN_CUDA(schema_str, "copy(Tensor self, Tensor src, bool non_blocking=False) -> Tensor") + static at::Tensor call(const at::Tensor & self, const at::Tensor & src, bool non_blocking); + static at::Tensor redispatch(c10::DispatchKeySet dispatchKeySet, const at::Tensor & self, const at::Tensor & src, bool non_blocking); +}; + +struct TORCH_API copy_ { + using schema = at::Tensor & (at::Tensor &, const at::Tensor &, bool); + using ptr_schema = schema*; + // See Note [static constexpr char* members for windows NVCC] + STATIC_CONSTEXPR_STR_INL_EXCEPT_WIN_CUDA(name, "aten::copy_") + STATIC_CONSTEXPR_STR_INL_EXCEPT_WIN_CUDA(overload_name, "") + STATIC_CONSTEXPR_STR_INL_EXCEPT_WIN_CUDA(schema_str, "copy_(Tensor(a!) self, Tensor src, bool non_blocking=False) -> Tensor(a!)") + static at::Tensor & call(at::Tensor & self, const at::Tensor & src, bool non_blocking); + static at::Tensor & redispatch(c10::DispatchKeySet dispatchKeySet, at::Tensor & self, const at::Tensor & src, bool non_blocking); +}; + +struct TORCH_API copy_out { + using schema = at::Tensor & (const at::Tensor &, const at::Tensor &, bool, at::Tensor &); + using ptr_schema = schema*; + // See Note [static constexpr char* members for windows NVCC] + STATIC_CONSTEXPR_STR_INL_EXCEPT_WIN_CUDA(name, "aten::copy") + STATIC_CONSTEXPR_STR_INL_EXCEPT_WIN_CUDA(overload_name, "out") + STATIC_CONSTEXPR_STR_INL_EXCEPT_WIN_CUDA(schema_str, "copy.out(Tensor self, Tensor src, bool non_blocking=False, *, Tensor(a!) out) -> Tensor(a!)") + static at::Tensor & call(const at::Tensor & self, const at::Tensor & src, bool non_blocking, at::Tensor & out); + static at::Tensor & redispatch(c10::DispatchKeySet dispatchKeySet, const at::Tensor & self, const at::Tensor & src, bool non_blocking, at::Tensor & out); +}; + +}} // namespace at::_ops diff --git a/llava_next/lib/python3.10/site-packages/torch/include/ATen/ops/fractional_max_pool2d_meta_dispatch.h b/llava_next/lib/python3.10/site-packages/torch/include/ATen/ops/fractional_max_pool2d_meta_dispatch.h new file mode 100644 index 0000000000000000000000000000000000000000..72aa13a125e391972648f248c28c4c2c9a3e5da5 --- /dev/null +++ b/llava_next/lib/python3.10/site-packages/torch/include/ATen/ops/fractional_max_pool2d_meta_dispatch.h @@ -0,0 +1,25 @@ +#pragma once +// @generated by torchgen/gen.py from DispatchKeyFunction.h + +// NB: The implementing C++ file is RegisterDispatchKey.cpp + +// The only #includes we need are for custom classes that have defaults in the C++ API +#include +#include +#include + +// Forward declarations of any types needed in the operator signatures. +// We can't directly include these classes because it will cause circular include dependencies. +// This file is included by TensorBody.h, which defines the Tensor class. +#include + +namespace at { + +namespace meta { + +TORCH_API ::std::tuple fractional_max_pool2d(const at::Tensor & self, at::IntArrayRef kernel_size, at::IntArrayRef output_size, const at::Tensor & random_samples); +TORCH_API ::std::tuple fractional_max_pool2d_out(at::Tensor & output, at::Tensor & indices, const at::Tensor & self, at::IntArrayRef kernel_size, at::IntArrayRef output_size, const at::Tensor & random_samples); +TORCH_API ::std::tuple fractional_max_pool2d_outf(const at::Tensor & self, at::IntArrayRef kernel_size, at::IntArrayRef output_size, const at::Tensor & random_samples, at::Tensor & output, at::Tensor & indices); + +} // namespace meta +} // namespace at diff --git a/llava_next/lib/python3.10/site-packages/torch/include/ATen/ops/greater_ops.h b/llava_next/lib/python3.10/site-packages/torch/include/ATen/ops/greater_ops.h new file mode 100644 index 0000000000000000000000000000000000000000..bc70eceee982fc64432d21aeb8e8c75b40bee40e --- /dev/null +++ b/llava_next/lib/python3.10/site-packages/torch/include/ATen/ops/greater_ops.h @@ -0,0 +1,83 @@ +#pragma once + +// @generated by torchgen/gen.py from Operator.h + +#include +#include + +// Forward declarations of any types needed in the operator signatures. +// We can't directly include these classes because it will cause circular include dependencies. +// This file is included by TensorBody.h, which defines the Tensor class. +#include + +namespace at { +namespace _ops { + + +struct TORCH_API greater_Scalar_out { + using schema = at::Tensor & (const at::Tensor &, const at::Scalar &, at::Tensor &); + using ptr_schema = schema*; + // See Note [static constexpr char* members for windows NVCC] + STATIC_CONSTEXPR_STR_INL_EXCEPT_WIN_CUDA(name, "aten::greater") + STATIC_CONSTEXPR_STR_INL_EXCEPT_WIN_CUDA(overload_name, "Scalar_out") + STATIC_CONSTEXPR_STR_INL_EXCEPT_WIN_CUDA(schema_str, "greater.Scalar_out(Tensor self, Scalar other, *, Tensor(a!) out) -> Tensor(a!)") + static at::Tensor & call(const at::Tensor & self, const at::Scalar & other, at::Tensor & out); + static at::Tensor & redispatch(c10::DispatchKeySet dispatchKeySet, const at::Tensor & self, const at::Scalar & other, at::Tensor & out); +}; + +struct TORCH_API greater_Scalar { + using schema = at::Tensor (const at::Tensor &, const at::Scalar &); + using ptr_schema = schema*; + // See Note [static constexpr char* members for windows NVCC] + STATIC_CONSTEXPR_STR_INL_EXCEPT_WIN_CUDA(name, "aten::greater") + STATIC_CONSTEXPR_STR_INL_EXCEPT_WIN_CUDA(overload_name, "Scalar") + STATIC_CONSTEXPR_STR_INL_EXCEPT_WIN_CUDA(schema_str, "greater.Scalar(Tensor self, Scalar other) -> Tensor") + static at::Tensor call(const at::Tensor & self, const at::Scalar & other); + static at::Tensor redispatch(c10::DispatchKeySet dispatchKeySet, const at::Tensor & self, const at::Scalar & other); +}; + +struct TORCH_API greater_Tensor_out { + using schema = at::Tensor & (const at::Tensor &, const at::Tensor &, at::Tensor &); + using ptr_schema = schema*; + // See Note [static constexpr char* members for windows NVCC] + STATIC_CONSTEXPR_STR_INL_EXCEPT_WIN_CUDA(name, "aten::greater") + STATIC_CONSTEXPR_STR_INL_EXCEPT_WIN_CUDA(overload_name, "Tensor_out") + STATIC_CONSTEXPR_STR_INL_EXCEPT_WIN_CUDA(schema_str, "greater.Tensor_out(Tensor self, Tensor other, *, Tensor(a!) out) -> Tensor(a!)") + static at::Tensor & call(const at::Tensor & self, const at::Tensor & other, at::Tensor & out); + static at::Tensor & redispatch(c10::DispatchKeySet dispatchKeySet, const at::Tensor & self, const at::Tensor & other, at::Tensor & out); +}; + +struct TORCH_API greater_Tensor { + using schema = at::Tensor (const at::Tensor &, const at::Tensor &); + using ptr_schema = schema*; + // See Note [static constexpr char* members for windows NVCC] + STATIC_CONSTEXPR_STR_INL_EXCEPT_WIN_CUDA(name, "aten::greater") + STATIC_CONSTEXPR_STR_INL_EXCEPT_WIN_CUDA(overload_name, "Tensor") + STATIC_CONSTEXPR_STR_INL_EXCEPT_WIN_CUDA(schema_str, "greater.Tensor(Tensor self, Tensor other) -> Tensor") + static at::Tensor call(const at::Tensor & self, const at::Tensor & other); + static at::Tensor redispatch(c10::DispatchKeySet dispatchKeySet, const at::Tensor & self, const at::Tensor & other); +}; + +struct TORCH_API greater__Scalar { + using schema = at::Tensor & (at::Tensor &, const at::Scalar &); + using ptr_schema = schema*; + // See Note [static constexpr char* members for windows NVCC] + STATIC_CONSTEXPR_STR_INL_EXCEPT_WIN_CUDA(name, "aten::greater_") + STATIC_CONSTEXPR_STR_INL_EXCEPT_WIN_CUDA(overload_name, "Scalar") + STATIC_CONSTEXPR_STR_INL_EXCEPT_WIN_CUDA(schema_str, "greater_.Scalar(Tensor(a!) self, Scalar other) -> Tensor(a!)") + static at::Tensor & call(at::Tensor & self, const at::Scalar & other); + static at::Tensor & redispatch(c10::DispatchKeySet dispatchKeySet, at::Tensor & self, const at::Scalar & other); +}; + +struct TORCH_API greater__Tensor { + using schema = at::Tensor & (at::Tensor &, const at::Tensor &); + using ptr_schema = schema*; + // See Note [static constexpr char* members for windows NVCC] + STATIC_CONSTEXPR_STR_INL_EXCEPT_WIN_CUDA(name, "aten::greater_") + STATIC_CONSTEXPR_STR_INL_EXCEPT_WIN_CUDA(overload_name, "Tensor") + STATIC_CONSTEXPR_STR_INL_EXCEPT_WIN_CUDA(schema_str, "greater_.Tensor(Tensor(a!) self, Tensor other) -> Tensor(a!)") + static at::Tensor & call(at::Tensor & self, const at::Tensor & other); + static at::Tensor & redispatch(c10::DispatchKeySet dispatchKeySet, at::Tensor & self, const at::Tensor & other); +}; + +}} // namespace at::_ops diff --git a/llava_next/lib/python3.10/site-packages/torch/include/ATen/ops/miopen_batch_norm_backward.h b/llava_next/lib/python3.10/site-packages/torch/include/ATen/ops/miopen_batch_norm_backward.h new file mode 100644 index 0000000000000000000000000000000000000000..563ee638ae2d7f48faefcdb2e3c4f0e15f74cff0 --- /dev/null +++ b/llava_next/lib/python3.10/site-packages/torch/include/ATen/ops/miopen_batch_norm_backward.h @@ -0,0 +1,39 @@ +#pragma once + +// @generated by torchgen/gen.py from Function.h + +#include +#include +#include +#include +#include +#include +#include +#include +#include +#include +#include +#include + + + +#include + +namespace at { + + +// aten::miopen_batch_norm_backward(Tensor input, Tensor grad_output, Tensor weight, Tensor? running_mean, Tensor? running_var, Tensor? save_mean, Tensor? save_var, float epsilon) -> (Tensor, Tensor, Tensor) +inline ::std::tuple miopen_batch_norm_backward(const at::Tensor & input, const at::Tensor & grad_output, const at::Tensor & weight, const c10::optional & running_mean, const c10::optional & running_var, const c10::optional & save_mean, const c10::optional & save_var, double epsilon) { + return at::_ops::miopen_batch_norm_backward::call(input, grad_output, weight, running_mean, running_var, save_mean, save_var, epsilon); +} + +// aten::miopen_batch_norm_backward.out(Tensor input, Tensor grad_output, Tensor weight, Tensor? running_mean, Tensor? running_var, Tensor? save_mean, Tensor? save_var, float epsilon, *, Tensor(a!) out0, Tensor(b!) out1, Tensor(c!) out2) -> (Tensor(a!), Tensor(b!), Tensor(c!)) +inline ::std::tuple miopen_batch_norm_backward_out(at::Tensor & out0, at::Tensor & out1, at::Tensor & out2, const at::Tensor & input, const at::Tensor & grad_output, const at::Tensor & weight, const c10::optional & running_mean, const c10::optional & running_var, const c10::optional & save_mean, const c10::optional & save_var, double epsilon) { + return at::_ops::miopen_batch_norm_backward_out::call(input, grad_output, weight, running_mean, running_var, save_mean, save_var, epsilon, out0, out1, out2); +} +// aten::miopen_batch_norm_backward.out(Tensor input, Tensor grad_output, Tensor weight, Tensor? running_mean, Tensor? running_var, Tensor? save_mean, Tensor? save_var, float epsilon, *, Tensor(a!) out0, Tensor(b!) out1, Tensor(c!) out2) -> (Tensor(a!), Tensor(b!), Tensor(c!)) +inline ::std::tuple miopen_batch_norm_backward_outf(const at::Tensor & input, const at::Tensor & grad_output, const at::Tensor & weight, const c10::optional & running_mean, const c10::optional & running_var, const c10::optional & save_mean, const c10::optional & save_var, double epsilon, at::Tensor & out0, at::Tensor & out1, at::Tensor & out2) { + return at::_ops::miopen_batch_norm_backward_out::call(input, grad_output, weight, running_mean, running_var, save_mean, save_var, epsilon, out0, out1, out2); +} + +} diff --git a/llava_next/lib/python3.10/site-packages/torch/include/ATen/ops/mm_cpu_dispatch.h b/llava_next/lib/python3.10/site-packages/torch/include/ATen/ops/mm_cpu_dispatch.h new file mode 100644 index 0000000000000000000000000000000000000000..28732f816b7a9ee8b2241e0ffeaa47977fdd3ddb --- /dev/null +++ b/llava_next/lib/python3.10/site-packages/torch/include/ATen/ops/mm_cpu_dispatch.h @@ -0,0 +1,25 @@ +#pragma once +// @generated by torchgen/gen.py from DispatchKeyFunction.h + +// NB: The implementing C++ file is RegisterDispatchKey.cpp + +// The only #includes we need are for custom classes that have defaults in the C++ API +#include +#include +#include + +// Forward declarations of any types needed in the operator signatures. +// We can't directly include these classes because it will cause circular include dependencies. +// This file is included by TensorBody.h, which defines the Tensor class. +#include + +namespace at { + +namespace cpu { + +TORCH_API at::Tensor mm(const at::Tensor & self, const at::Tensor & mat2); +TORCH_API at::Tensor & mm_out(at::Tensor & out, const at::Tensor & self, const at::Tensor & mat2); +TORCH_API at::Tensor & mm_outf(const at::Tensor & self, const at::Tensor & mat2, at::Tensor & out); + +} // namespace cpu +} // namespace at diff --git a/llava_next/lib/python3.10/site-packages/torch/include/ATen/ops/narrow.h b/llava_next/lib/python3.10/site-packages/torch/include/ATen/ops/narrow.h new file mode 100644 index 0000000000000000000000000000000000000000..5e22996339f96fa8bb686e2353729b949eb82c62 --- /dev/null +++ b/llava_next/lib/python3.10/site-packages/torch/include/ATen/ops/narrow.h @@ -0,0 +1,69 @@ +#pragma once + +// @generated by torchgen/gen.py from Function.h + +#include +#include +#include +#include +#include +#include +#include +#include +#include +#include +#include +#include + + + +#include + +namespace at { + + +// aten::narrow(Tensor(a) self, int dim, SymInt start, SymInt length) -> Tensor(a) +inline at::Tensor narrow(const at::Tensor & self, int64_t dim, int64_t start, int64_t length) { + return at::_ops::narrow::call(self, dim, start, length); +} +namespace symint { + template ::value>> + at::Tensor narrow(const at::Tensor & self, int64_t dim, int64_t start, int64_t length) { + return at::_ops::narrow::call(self, dim, start, length); + } +} + +// aten::narrow(Tensor(a) self, int dim, SymInt start, SymInt length) -> Tensor(a) +inline at::Tensor narrow_symint(const at::Tensor & self, int64_t dim, c10::SymInt start, c10::SymInt length) { + return at::_ops::narrow::call(self, dim, start, length); +} +namespace symint { + template ::value>> + at::Tensor narrow(const at::Tensor & self, int64_t dim, c10::SymInt start, c10::SymInt length) { + return at::_ops::narrow::call(self, dim, start, length); + } +} + +// aten::narrow.Tensor(Tensor(a) self, int dim, Tensor start, SymInt length) -> Tensor(a) +inline at::Tensor narrow(const at::Tensor & self, int64_t dim, const at::Tensor & start, int64_t length) { + return at::_ops::narrow_Tensor::call(self, dim, start, length); +} +namespace symint { + template ::value>> + at::Tensor narrow(const at::Tensor & self, int64_t dim, const at::Tensor & start, int64_t length) { + return at::_ops::narrow_Tensor::call(self, dim, start, length); + } +} + +// aten::narrow.Tensor(Tensor(a) self, int dim, Tensor start, SymInt length) -> Tensor(a) +inline at::Tensor narrow_symint(const at::Tensor & self, int64_t dim, const at::Tensor & start, c10::SymInt length) { + return at::_ops::narrow_Tensor::call(self, dim, start, length); +} +namespace symint { + template ::value>> + at::Tensor narrow(const at::Tensor & self, int64_t dim, const at::Tensor & start, c10::SymInt length) { + return at::_ops::narrow_Tensor::call(self, dim, start, length); + } +} + +} diff --git a/llava_next/lib/python3.10/site-packages/torch/include/ATen/ops/put.h b/llava_next/lib/python3.10/site-packages/torch/include/ATen/ops/put.h new file mode 100644 index 0000000000000000000000000000000000000000..88535f00d6708ec74c2be3be07f0673cbf8176fa --- /dev/null +++ b/llava_next/lib/python3.10/site-packages/torch/include/ATen/ops/put.h @@ -0,0 +1,39 @@ +#pragma once + +// @generated by torchgen/gen.py from Function.h + +#include +#include +#include +#include +#include +#include +#include +#include +#include +#include +#include +#include + + + +#include + +namespace at { + + +// aten::put(Tensor self, Tensor index, Tensor source, bool accumulate=False) -> Tensor +inline at::Tensor put(const at::Tensor & self, const at::Tensor & index, const at::Tensor & source, bool accumulate=false) { + return at::_ops::put::call(self, index, source, accumulate); +} + +// aten::put.out(Tensor self, Tensor index, Tensor source, bool accumulate=False, *, Tensor(a!) out) -> Tensor(a!) +inline at::Tensor & put_out(at::Tensor & out, const at::Tensor & self, const at::Tensor & index, const at::Tensor & source, bool accumulate=false) { + return at::_ops::put_out::call(self, index, source, accumulate, out); +} +// aten::put.out(Tensor self, Tensor index, Tensor source, bool accumulate=False, *, Tensor(a!) out) -> Tensor(a!) +inline at::Tensor & put_outf(const at::Tensor & self, const at::Tensor & index, const at::Tensor & source, bool accumulate, at::Tensor & out) { + return at::_ops::put_out::call(self, index, source, accumulate, out); +} + +} diff --git a/llava_next/lib/python3.10/site-packages/torch/include/ATen/ops/replication_pad1d_ops.h b/llava_next/lib/python3.10/site-packages/torch/include/ATen/ops/replication_pad1d_ops.h new file mode 100644 index 0000000000000000000000000000000000000000..a252ee3ef0bd969bdf20e38fc5c4258836ed02f7 --- /dev/null +++ b/llava_next/lib/python3.10/site-packages/torch/include/ATen/ops/replication_pad1d_ops.h @@ -0,0 +1,39 @@ +#pragma once + +// @generated by torchgen/gen.py from Operator.h + +#include +#include + +// Forward declarations of any types needed in the operator signatures. +// We can't directly include these classes because it will cause circular include dependencies. +// This file is included by TensorBody.h, which defines the Tensor class. +#include + +namespace at { +namespace _ops { + + +struct TORCH_API replication_pad1d_out { + using schema = at::Tensor & (const at::Tensor &, c10::SymIntArrayRef, at::Tensor &); + using ptr_schema = schema*; + // See Note [static constexpr char* members for windows NVCC] + STATIC_CONSTEXPR_STR_INL_EXCEPT_WIN_CUDA(name, "aten::replication_pad1d") + STATIC_CONSTEXPR_STR_INL_EXCEPT_WIN_CUDA(overload_name, "out") + STATIC_CONSTEXPR_STR_INL_EXCEPT_WIN_CUDA(schema_str, "replication_pad1d.out(Tensor self, SymInt[2] padding, *, Tensor(a!) out) -> Tensor(a!)") + static at::Tensor & call(const at::Tensor & self, c10::SymIntArrayRef padding, at::Tensor & out); + static at::Tensor & redispatch(c10::DispatchKeySet dispatchKeySet, const at::Tensor & self, c10::SymIntArrayRef padding, at::Tensor & out); +}; + +struct TORCH_API replication_pad1d { + using schema = at::Tensor (const at::Tensor &, c10::SymIntArrayRef); + using ptr_schema = schema*; + // See Note [static constexpr char* members for windows NVCC] + STATIC_CONSTEXPR_STR_INL_EXCEPT_WIN_CUDA(name, "aten::replication_pad1d") + STATIC_CONSTEXPR_STR_INL_EXCEPT_WIN_CUDA(overload_name, "") + STATIC_CONSTEXPR_STR_INL_EXCEPT_WIN_CUDA(schema_str, "replication_pad1d(Tensor self, SymInt[2] padding) -> Tensor") + static at::Tensor call(const at::Tensor & self, c10::SymIntArrayRef padding); + static at::Tensor redispatch(c10::DispatchKeySet dispatchKeySet, const at::Tensor & self, c10::SymIntArrayRef padding); +}; + +}} // namespace at::_ops diff --git a/llava_next/lib/python3.10/site-packages/torch/include/ATen/ops/scatter_add_compositeexplicitautogradnonfunctional_dispatch.h b/llava_next/lib/python3.10/site-packages/torch/include/ATen/ops/scatter_add_compositeexplicitautogradnonfunctional_dispatch.h new file mode 100644 index 0000000000000000000000000000000000000000..3033e1c889e3187594fc085bc31f2de0f7eed04c --- /dev/null +++ b/llava_next/lib/python3.10/site-packages/torch/include/ATen/ops/scatter_add_compositeexplicitautogradnonfunctional_dispatch.h @@ -0,0 +1,24 @@ +#pragma once +// @generated by torchgen/gen.py from DispatchKeyFunction.h + +// NB: The implementing C++ file is RegisterDispatchKey.cpp + +// The only #includes we need are for custom classes that have defaults in the C++ API +#include +#include +#include + +// Forward declarations of any types needed in the operator signatures. +// We can't directly include these classes because it will cause circular include dependencies. +// This file is included by TensorBody.h, which defines the Tensor class. +#include + +namespace at { + +namespace compositeexplicitautogradnonfunctional { + +TORCH_API at::Tensor scatter_add(const at::Tensor & self, int64_t dim, const at::Tensor & index, const at::Tensor & src); +TORCH_API at::Tensor & scatter_add_(at::Tensor & self, int64_t dim, const at::Tensor & index, const at::Tensor & src); + +} // namespace compositeexplicitautogradnonfunctional +} // namespace at diff --git a/llava_next/lib/python3.10/site-packages/torch/include/ATen/ops/smooth_l1_loss_backward_cpu_dispatch.h b/llava_next/lib/python3.10/site-packages/torch/include/ATen/ops/smooth_l1_loss_backward_cpu_dispatch.h new file mode 100644 index 0000000000000000000000000000000000000000..b31cf7d0ed110f9532c2ef00d857f79075a06d2b --- /dev/null +++ b/llava_next/lib/python3.10/site-packages/torch/include/ATen/ops/smooth_l1_loss_backward_cpu_dispatch.h @@ -0,0 +1,24 @@ +#pragma once +// @generated by torchgen/gen.py from DispatchKeyFunction.h + +// NB: The implementing C++ file is RegisterDispatchKey.cpp + +// The only #includes we need are for custom classes that have defaults in the C++ API +#include +#include +#include + +// Forward declarations of any types needed in the operator signatures. +// We can't directly include these classes because it will cause circular include dependencies. +// This file is included by TensorBody.h, which defines the Tensor class. +#include + +namespace at { + +namespace cpu { + +TORCH_API at::Tensor & smooth_l1_loss_backward_out(at::Tensor & grad_input, const at::Tensor & grad_output, const at::Tensor & self, const at::Tensor & target, int64_t reduction, double beta); +TORCH_API at::Tensor & smooth_l1_loss_backward_outf(const at::Tensor & grad_output, const at::Tensor & self, const at::Tensor & target, int64_t reduction, double beta, at::Tensor & grad_input); + +} // namespace cpu +} // namespace at diff --git a/llava_next/lib/python3.10/site-packages/torch/include/ATen/ops/softmax_compositeimplicitautograd_dispatch.h b/llava_next/lib/python3.10/site-packages/torch/include/ATen/ops/softmax_compositeimplicitautograd_dispatch.h new file mode 100644 index 0000000000000000000000000000000000000000..5321c8a3892631097788b0cbe470645a5ebcd8f0 --- /dev/null +++ b/llava_next/lib/python3.10/site-packages/torch/include/ATen/ops/softmax_compositeimplicitautograd_dispatch.h @@ -0,0 +1,24 @@ +#pragma once +// @generated by torchgen/gen.py from DispatchKeyFunction.h + +// NB: The implementing C++ file is RegisterDispatchKey.cpp + +// The only #includes we need are for custom classes that have defaults in the C++ API +#include +#include +#include + +// Forward declarations of any types needed in the operator signatures. +// We can't directly include these classes because it will cause circular include dependencies. +// This file is included by TensorBody.h, which defines the Tensor class. +#include + +namespace at { + +namespace compositeimplicitautograd { + +TORCH_API at::Tensor softmax(const at::Tensor & self, int64_t dim, c10::optional dtype=c10::nullopt); +TORCH_API at::Tensor softmax(const at::Tensor & self, at::Dimname dim, c10::optional dtype=c10::nullopt); + +} // namespace compositeimplicitautograd +} // namespace at diff --git a/llava_next/lib/python3.10/site-packages/torch/include/ATen/ops/sparse_csc_tensor_native.h b/llava_next/lib/python3.10/site-packages/torch/include/ATen/ops/sparse_csc_tensor_native.h new file mode 100644 index 0000000000000000000000000000000000000000..c689d9db83a82ee0b47318376796e6ef7377278d --- /dev/null +++ b/llava_next/lib/python3.10/site-packages/torch/include/ATen/ops/sparse_csc_tensor_native.h @@ -0,0 +1,22 @@ +#pragma once + +// @generated by torchgen/gen.py from NativeFunction.h + +#include +#include +#include +#include +#include +#include +#include +#include +#include +#include + + +namespace at { +namespace native { +TORCH_API at::Tensor sparse_csc_tensor(const at::Tensor & ccol_indices, const at::Tensor & row_indices, const at::Tensor & values, at::IntArrayRef size, c10::optional dtype={}, c10::optional layout={}, c10::optional device={}, c10::optional pin_memory={}); +TORCH_API at::Tensor sparse_csc_tensor(const at::Tensor & ccol_indices, const at::Tensor & row_indices, const at::Tensor & values, c10::optional dtype={}, c10::optional layout={}, c10::optional device={}, c10::optional pin_memory={}); +} // namespace native +} // namespace at diff --git a/llava_next/lib/python3.10/site-packages/torch/include/ATen/ops/special_chebyshev_polynomial_v.h b/llava_next/lib/python3.10/site-packages/torch/include/ATen/ops/special_chebyshev_polynomial_v.h new file mode 100644 index 0000000000000000000000000000000000000000..cdfe3524670215ea738afc21d904254e09f8fbcb --- /dev/null +++ b/llava_next/lib/python3.10/site-packages/torch/include/ATen/ops/special_chebyshev_polynomial_v.h @@ -0,0 +1,67 @@ +#pragma once + +// @generated by torchgen/gen.py from Function.h + +#include +#include +#include +#include +#include +#include +#include +#include +#include +#include +#include +#include + + + +#include + +namespace at { + + +// aten::special_chebyshev_polynomial_v(Tensor x, Tensor n) -> Tensor +inline at::Tensor special_chebyshev_polynomial_v(const at::Tensor & x, const at::Tensor & n) { + return at::_ops::special_chebyshev_polynomial_v::call(x, n); +} + +// aten::special_chebyshev_polynomial_v.x_scalar(Scalar x, Tensor n) -> Tensor +inline at::Tensor special_chebyshev_polynomial_v(const at::Scalar & x, const at::Tensor & n) { + return at::_ops::special_chebyshev_polynomial_v_x_scalar::call(x, n); +} + +// aten::special_chebyshev_polynomial_v.n_scalar(Tensor x, Scalar n) -> Tensor +inline at::Tensor special_chebyshev_polynomial_v(const at::Tensor & x, const at::Scalar & n) { + return at::_ops::special_chebyshev_polynomial_v_n_scalar::call(x, n); +} + +// aten::special_chebyshev_polynomial_v.out(Tensor x, Tensor n, *, Tensor(a!) out) -> Tensor(a!) +inline at::Tensor & special_chebyshev_polynomial_v_out(at::Tensor & out, const at::Tensor & x, const at::Tensor & n) { + return at::_ops::special_chebyshev_polynomial_v_out::call(x, n, out); +} +// aten::special_chebyshev_polynomial_v.out(Tensor x, Tensor n, *, Tensor(a!) out) -> Tensor(a!) +inline at::Tensor & special_chebyshev_polynomial_v_outf(const at::Tensor & x, const at::Tensor & n, at::Tensor & out) { + return at::_ops::special_chebyshev_polynomial_v_out::call(x, n, out); +} + +// aten::special_chebyshev_polynomial_v.x_scalar_out(Scalar x, Tensor n, *, Tensor(a!) out) -> Tensor(a!) +inline at::Tensor & special_chebyshev_polynomial_v_out(at::Tensor & out, const at::Scalar & x, const at::Tensor & n) { + return at::_ops::special_chebyshev_polynomial_v_x_scalar_out::call(x, n, out); +} +// aten::special_chebyshev_polynomial_v.x_scalar_out(Scalar x, Tensor n, *, Tensor(a!) out) -> Tensor(a!) +inline at::Tensor & special_chebyshev_polynomial_v_outf(const at::Scalar & x, const at::Tensor & n, at::Tensor & out) { + return at::_ops::special_chebyshev_polynomial_v_x_scalar_out::call(x, n, out); +} + +// aten::special_chebyshev_polynomial_v.n_scalar_out(Tensor x, Scalar n, *, Tensor(a!) out) -> Tensor(a!) +inline at::Tensor & special_chebyshev_polynomial_v_out(at::Tensor & out, const at::Tensor & x, const at::Scalar & n) { + return at::_ops::special_chebyshev_polynomial_v_n_scalar_out::call(x, n, out); +} +// aten::special_chebyshev_polynomial_v.n_scalar_out(Tensor x, Scalar n, *, Tensor(a!) out) -> Tensor(a!) +inline at::Tensor & special_chebyshev_polynomial_v_outf(const at::Tensor & x, const at::Scalar & n, at::Tensor & out) { + return at::_ops::special_chebyshev_polynomial_v_n_scalar_out::call(x, n, out); +} + +} diff --git a/llava_next/lib/python3.10/site-packages/torch/include/ATen/ops/tanh_backward_cpu_dispatch.h b/llava_next/lib/python3.10/site-packages/torch/include/ATen/ops/tanh_backward_cpu_dispatch.h new file mode 100644 index 0000000000000000000000000000000000000000..83dc8433ec76324f0b4174e828a8abaa31880a34 --- /dev/null +++ b/llava_next/lib/python3.10/site-packages/torch/include/ATen/ops/tanh_backward_cpu_dispatch.h @@ -0,0 +1,25 @@ +#pragma once +// @generated by torchgen/gen.py from DispatchKeyFunction.h + +// NB: The implementing C++ file is RegisterDispatchKey.cpp + +// The only #includes we need are for custom classes that have defaults in the C++ API +#include +#include +#include + +// Forward declarations of any types needed in the operator signatures. +// We can't directly include these classes because it will cause circular include dependencies. +// This file is included by TensorBody.h, which defines the Tensor class. +#include + +namespace at { + +namespace cpu { + +TORCH_API at::Tensor tanh_backward(const at::Tensor & grad_output, const at::Tensor & output); +TORCH_API at::Tensor & tanh_backward_out(at::Tensor & grad_input, const at::Tensor & grad_output, const at::Tensor & output); +TORCH_API at::Tensor & tanh_backward_outf(const at::Tensor & grad_output, const at::Tensor & output, at::Tensor & grad_input); + +} // namespace cpu +} // namespace at diff --git a/llava_next/lib/python3.10/site-packages/torch/include/ATen/ops/threshold_backward_native.h b/llava_next/lib/python3.10/site-packages/torch/include/ATen/ops/threshold_backward_native.h new file mode 100644 index 0000000000000000000000000000000000000000..4220f1c6ce32a081539c42754ae1e69c4c2176ac --- /dev/null +++ b/llava_next/lib/python3.10/site-packages/torch/include/ATen/ops/threshold_backward_native.h @@ -0,0 +1,29 @@ +#pragma once + +// @generated by torchgen/gen.py from NativeFunction.h + +#include +#include +#include +#include +#include +#include +#include +#include +#include +#include +#include + +namespace at { +namespace native { +struct TORCH_API structured_threshold_backward_out : public at::meta::structured_threshold_backward { +void impl(const at::Tensor & grad_output, const at::Tensor & self, const at::Scalar & threshold, const at::Tensor & grad_input); +}; +TORCH_API at::Tensor threshold_backwards_nested(const at::Tensor & grad_output, const at::Tensor & self, const at::Scalar & threshold); +TORCH_API at::Tensor threshold_backward_sparse(const at::Tensor & grad_output, const at::Tensor & self, const at::Scalar & threshold); +TORCH_API at::Tensor & threshold_backward_sparse_out(const at::Tensor & grad_output, const at::Tensor & self, const at::Scalar & threshold, at::Tensor & grad_input); +TORCH_API at::Tensor threshold_backward_sparse_compressed(const at::Tensor & grad_output, const at::Tensor & self, const at::Scalar & threshold); +TORCH_API at::Tensor & threshold_backward_sparse_compressed_out(const at::Tensor & grad_output, const at::Tensor & self, const at::Scalar & threshold, at::Tensor & grad_input); +TORCH_API at::Tensor mkldnn_relu_backward(const at::Tensor & grad_output, const at::Tensor & self, const at::Scalar & threshold); +} // namespace native +} // namespace at diff --git a/vlmpy310/lib/python3.10/site-packages/asttokens-3.0.0.dist-info/INSTALLER b/vlmpy310/lib/python3.10/site-packages/asttokens-3.0.0.dist-info/INSTALLER new file mode 100644 index 0000000000000000000000000000000000000000..a1b589e38a32041e49332e5e81c2d363dc418d68 --- /dev/null +++ b/vlmpy310/lib/python3.10/site-packages/asttokens-3.0.0.dist-info/INSTALLER @@ -0,0 +1 @@ +pip diff --git a/vlmpy310/lib/python3.10/site-packages/asttokens-3.0.0.dist-info/LICENSE b/vlmpy310/lib/python3.10/site-packages/asttokens-3.0.0.dist-info/LICENSE new file mode 100644 index 0000000000000000000000000000000000000000..8dada3edaf50dbc082c9a125058f25def75e625a --- /dev/null +++ b/vlmpy310/lib/python3.10/site-packages/asttokens-3.0.0.dist-info/LICENSE @@ -0,0 +1,201 @@ + Apache License + Version 2.0, January 2004 + http://www.apache.org/licenses/ + + TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION + + 1. Definitions. + + "License" shall mean the terms and conditions for use, reproduction, + and distribution as defined by Sections 1 through 9 of this document. + + "Licensor" shall mean the copyright owner or entity authorized by + the copyright owner that is granting the License. + + "Legal Entity" shall mean the union of the acting entity and all + other entities that control, are controlled by, or are under common + control with that entity. For the purposes of this definition, + "control" means (i) the power, direct or indirect, to cause the + direction or management of such entity, whether by contract or + otherwise, or (ii) ownership of fifty percent (50%) or more of the + outstanding shares, or (iii) beneficial ownership of such entity. + + "You" (or "Your") shall mean an individual or Legal Entity + exercising permissions granted by this License. + + "Source" form shall mean the preferred form for making modifications, + including but not limited to software source code, documentation + source, and configuration files. + + "Object" form shall mean any form resulting from mechanical + transformation or translation of a Source form, including but + not limited to compiled object code, generated documentation, + and conversions to other media types. + + "Work" shall mean the work of authorship, whether in Source or + Object form, made available under the License, as indicated by a + copyright notice that is included in or attached to the work + (an example is provided in the Appendix below). + + "Derivative Works" shall mean any work, whether in Source or Object + form, that is based on (or derived from) the Work and for which the + editorial revisions, annotations, elaborations, or other modifications + represent, as a whole, an original work of authorship. For the purposes + of this License, Derivative Works shall not include works that remain + separable from, or merely link (or bind by name) to the interfaces of, + the Work and Derivative Works thereof. + + "Contribution" shall mean any work of authorship, including + the original version of the Work and any modifications or additions + to that Work or Derivative Works thereof, that is intentionally + submitted to Licensor for inclusion in the Work by the copyright owner + or by an individual or Legal Entity authorized to submit on behalf of + the copyright owner. For the purposes of this definition, "submitted" + means any form of electronic, verbal, or written communication sent + to the Licensor or its representatives, including but not limited to + communication on electronic mailing lists, source code control systems, + and issue tracking systems that are managed by, or on behalf of, the + Licensor for the purpose of discussing and improving the Work, but + excluding communication that is conspicuously marked or otherwise + designated in writing by the copyright owner as "Not a Contribution." + + "Contributor" shall mean Licensor and any individual or Legal Entity + on behalf of whom a Contribution has been received by Licensor and + subsequently incorporated within the Work. + + 2. Grant of Copyright License. Subject to the terms and conditions of + this License, each Contributor hereby grants to You a perpetual, + worldwide, non-exclusive, no-charge, royalty-free, irrevocable + copyright license to reproduce, prepare Derivative Works of, + publicly display, publicly perform, sublicense, and distribute the + Work and such Derivative Works in Source or Object form. + + 3. Grant of Patent License. Subject to the terms and conditions of + this License, each Contributor hereby grants to You a perpetual, + worldwide, non-exclusive, no-charge, royalty-free, irrevocable + (except as stated in this section) patent license to make, have made, + use, offer to sell, sell, import, and otherwise transfer the Work, + where such license applies only to those patent claims licensable + by such Contributor that are necessarily infringed by their + Contribution(s) alone or by combination of their Contribution(s) + with the Work to which such Contribution(s) was submitted. If You + institute patent litigation against any entity (including a + cross-claim or counterclaim in a lawsuit) alleging that the Work + or a Contribution incorporated within the Work constitutes direct + or contributory patent infringement, then any patent licenses + granted to You under this License for that Work shall terminate + as of the date such litigation is filed. + + 4. Redistribution. You may reproduce and distribute copies of the + Work or Derivative Works thereof in any medium, with or without + modifications, and in Source or Object form, provided that You + meet the following conditions: + + (a) You must give any other recipients of the Work or + Derivative Works a copy of this License; and + + (b) You must cause any modified files to carry prominent notices + stating that You changed the files; and + + (c) You must retain, in the Source form of any Derivative Works + that You distribute, all copyright, patent, trademark, and + attribution notices from the Source form of the Work, + excluding those notices that do not pertain to any part of + the Derivative Works; and + + (d) If the Work includes a "NOTICE" text file as part of its + distribution, then any Derivative Works that You distribute must + include a readable copy of the attribution notices contained + within such NOTICE file, excluding those notices that do not + pertain to any part of the Derivative Works, in at least one + of the following places: within a NOTICE text file distributed + as part of the Derivative Works; within the Source form or + documentation, if provided along with the Derivative Works; or, + within a display generated by the Derivative Works, if and + wherever such third-party notices normally appear. The contents + of the NOTICE file are for informational purposes only and + do not modify the License. You may add Your own attribution + notices within Derivative Works that You distribute, alongside + or as an addendum to the NOTICE text from the Work, provided + that such additional attribution notices cannot be construed + as modifying the License. + + You may add Your own copyright statement to Your modifications and + may provide additional or different license terms and conditions + for use, reproduction, or distribution of Your modifications, or + for any such Derivative Works as a whole, provided Your use, + reproduction, and distribution of the Work otherwise complies with + the conditions stated in this License. + + 5. Submission of Contributions. Unless You explicitly state otherwise, + any Contribution intentionally submitted for inclusion in the Work + by You to the Licensor shall be under the terms and conditions of + this License, without any additional terms or conditions. + Notwithstanding the above, nothing herein shall supersede or modify + the terms of any separate license agreement you may have executed + with Licensor regarding such Contributions. + + 6. Trademarks. This License does not grant permission to use the trade + names, trademarks, service marks, or product names of the Licensor, + except as required for reasonable and customary use in describing the + origin of the Work and reproducing the content of the NOTICE file. + + 7. Disclaimer of Warranty. Unless required by applicable law or + agreed to in writing, Licensor provides the Work (and each + Contributor provides its Contributions) on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or + implied, including, without limitation, any warranties or conditions + of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A + PARTICULAR PURPOSE. You are solely responsible for determining the + appropriateness of using or redistributing the Work and assume any + risks associated with Your exercise of permissions under this License. + + 8. Limitation of Liability. In no event and under no legal theory, + whether in tort (including negligence), contract, or otherwise, + unless required by applicable law (such as deliberate and grossly + negligent acts) or agreed to in writing, shall any Contributor be + liable to You for damages, including any direct, indirect, special, + incidental, or consequential damages of any character arising as a + result of this License or out of the use or inability to use the + Work (including but not limited to damages for loss of goodwill, + work stoppage, computer failure or malfunction, or any and all + other commercial damages or losses), even if such Contributor + has been advised of the possibility of such damages. + + 9. Accepting Warranty or Additional Liability. While redistributing + the Work or Derivative Works thereof, You may choose to offer, + and charge a fee for, acceptance of support, warranty, indemnity, + or other liability obligations and/or rights consistent with this + License. However, in accepting such obligations, You may act only + on Your own behalf and on Your sole responsibility, not on behalf + of any other Contributor, and only if You agree to indemnify, + defend, and hold each Contributor harmless for any liability + incurred by, or claims asserted against, such Contributor by reason + of your accepting any such warranty or additional liability. + + END OF TERMS AND CONDITIONS + + APPENDIX: How to apply the Apache License to your work. + + To apply the Apache License to your work, attach the following + boilerplate notice, with the fields enclosed by brackets "{}" + replaced with your own identifying information. (Don't include + the brackets!) The text should be enclosed in the appropriate + comment syntax for the file format. We also recommend that a + file or class name and description of purpose be included on the + same "printed page" as the copyright notice for easier + identification within third-party archives. + + Copyright {yyyy} {name of copyright owner} + + Licensed under the Apache License, Version 2.0 (the "License"); + you may not use this file except in compliance with the License. + You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + + Unless required by applicable law or agreed to in writing, software + distributed under the License is distributed on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + See the License for the specific language governing permissions and + limitations under the License. diff --git a/vlmpy310/lib/python3.10/site-packages/asttokens-3.0.0.dist-info/METADATA b/vlmpy310/lib/python3.10/site-packages/asttokens-3.0.0.dist-info/METADATA new file mode 100644 index 0000000000000000000000000000000000000000..2139497f4daccad0abc5140be0f6655c7285329c --- /dev/null +++ b/vlmpy310/lib/python3.10/site-packages/asttokens-3.0.0.dist-info/METADATA @@ -0,0 +1,118 @@ +Metadata-Version: 2.1 +Name: asttokens +Version: 3.0.0 +Summary: Annotate AST trees with source code positions +Home-page: https://github.com/gristlabs/asttokens +Author: Dmitry Sagalovskiy, Grist Labs +Author-email: dmitry@getgrist.com +License: Apache 2.0 +Keywords: code,ast,parse,tokenize,refactor +Classifier: Development Status :: 5 - Production/Stable +Classifier: Intended Audience :: Developers +Classifier: Topic :: Software Development :: Libraries :: Python Modules +Classifier: Topic :: Software Development :: Code Generators +Classifier: Topic :: Software Development :: Compilers +Classifier: Topic :: Software Development :: Interpreters +Classifier: Topic :: Software Development :: Pre-processors +Classifier: Environment :: Console +Classifier: Operating System :: OS Independent +Classifier: Programming Language :: Python :: 3.8 +Classifier: Programming Language :: Python :: 3.9 +Classifier: Programming Language :: Python :: 3.10 +Classifier: Programming Language :: Python :: 3.11 +Classifier: Programming Language :: Python :: 3.12 +Classifier: Programming Language :: Python :: 3.13 +Classifier: Programming Language :: Python :: Implementation :: CPython +Classifier: Programming Language :: Python :: Implementation :: PyPy +Requires-Python: >=3.8 +License-File: LICENSE +Provides-Extra: astroid +Requires-Dist: astroid<4,>=2; extra == "astroid" +Provides-Extra: test +Requires-Dist: astroid<4,>=2; extra == "test" +Requires-Dist: pytest; extra == "test" +Requires-Dist: pytest-cov; extra == "test" +Requires-Dist: pytest-xdist; extra == "test" + +ASTTokens +========= + +.. image:: https://img.shields.io/pypi/v/asttokens.svg + :target: https://pypi.python.org/pypi/asttokens/ +.. image:: https://img.shields.io/pypi/pyversions/asttokens.svg + :target: https://pypi.python.org/pypi/asttokens/ +.. image:: https://github.com/gristlabs/asttokens/actions/workflows/build-and-test.yml/badge.svg + :target: https://github.com/gristlabs/asttokens/actions/workflows/build-and-test.yml +.. image:: https://readthedocs.org/projects/asttokens/badge/?version=latest + :target: http://asttokens.readthedocs.io/en/latest/index.html +.. image:: https://coveralls.io/repos/github/gristlabs/asttokens/badge.svg + :target: https://coveralls.io/github/gristlabs/asttokens + +.. Start of user-guide + +The ``asttokens`` module annotates Python abstract syntax trees (ASTs) with the positions of tokens +and text in the source code that generated them. + +It makes it possible for tools that work with logical AST nodes to find the particular text that +resulted in those nodes, for example for automated refactoring or highlighting. + +Installation +------------ +asttokens is available on PyPI: https://pypi.python.org/pypi/asttokens/:: + + pip install asttokens + +The code is on GitHub: https://github.com/gristlabs/asttokens. + +The API Reference is here: http://asttokens.readthedocs.io/en/latest/api-index.html. + +Usage +----- + +ASTTokens can annotate both trees built by `ast `_, +AND those built by `astroid `_. + +Here's an example: + +.. code-block:: python + + import asttokens, ast + source = "Robot('blue').walk(steps=10*n)" + atok = asttokens.ASTTokens(source, parse=True) + +Once the tree has been marked, nodes get ``.first_token``, ``.last_token`` attributes, and +the ``ASTTokens`` object offers helpful methods: + +.. code-block:: python + + attr_node = next(n for n in ast.walk(atok.tree) if isinstance(n, ast.Attribute)) + print(atok.get_text(attr_node)) + start, end = attr_node.last_token.startpos, attr_node.last_token.endpos + print(atok.text[:start] + 'RUN' + atok.text[end:]) + +Which produces this output: + +.. code-block:: text + + Robot('blue').walk + Robot('blue').RUN(steps=10*n) + +The ``ASTTokens`` object also offers methods to walk and search the list of tokens that make up +the code (or a particular AST node), which is more useful and powerful than dealing with the text +directly. + + +Contribute +---------- + +To contribute: + +1. Fork this repository, and clone your fork. +2. Install the package with test dependencies (ideally in a virtualenv) with:: + + pip install -e '.[test]' + +3. Run tests in your current interpreter with the command ``pytest`` or ``python -m pytest``. +4. Run tests across all supported interpreters with the ``tox`` command. You will need to have the interpreters installed separately. We recommend ``pyenv`` for that. Use ``tox -p auto`` to run the tests in parallel. +5. By default certain tests which take a very long time to run are skipped, but they are run in CI. + These are marked using the ``pytest`` marker ``slow`` and can be run on their own with ``pytest -m slow`` or as part of the full suite with ``pytest -m ''``. diff --git a/vlmpy310/lib/python3.10/site-packages/asttokens-3.0.0.dist-info/RECORD b/vlmpy310/lib/python3.10/site-packages/asttokens-3.0.0.dist-info/RECORD new file mode 100644 index 0000000000000000000000000000000000000000..406d4d96557666ff195a20df8cd1bf021f7a4a85 --- /dev/null +++ b/vlmpy310/lib/python3.10/site-packages/asttokens-3.0.0.dist-info/RECORD @@ -0,0 +1,22 @@ +asttokens-3.0.0.dist-info/INSTALLER,sha256=zuuue4knoyJ-UwPPXg8fezS7VCrXJQrAP7zeNuwvFQg,4 +asttokens-3.0.0.dist-info/LICENSE,sha256=tAkwu8-AdEyGxGoSvJ2gVmQdcicWw3j1ZZueVV74M-E,11357 +asttokens-3.0.0.dist-info/METADATA,sha256=cg1yWNJgO6xzqQzaKsQoKJuKZMEfuJAh07iQLAgNv6k,4726 +asttokens-3.0.0.dist-info/RECORD,, +asttokens-3.0.0.dist-info/REQUESTED,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0 +asttokens-3.0.0.dist-info/WHEEL,sha256=PZUExdf71Ui_so67QXpySuHtCi3-J3wvF4ORK6k_S8U,91 +asttokens-3.0.0.dist-info/top_level.txt,sha256=nJDweSD7_NBhOlR3c8bkKJMKM-pxlAS8Kyh8GcCT2dk,10 +asttokens/__init__.py,sha256=8eONA3X-9s93-v-2gEoz4649fDUpvzBthFB5Ld7dHAg,962 +asttokens/__pycache__/__init__.cpython-310.pyc,, +asttokens/__pycache__/astroid_compat.cpython-310.pyc,, +asttokens/__pycache__/asttokens.cpython-310.pyc,, +asttokens/__pycache__/line_numbers.cpython-310.pyc,, +asttokens/__pycache__/mark_tokens.cpython-310.pyc,, +asttokens/__pycache__/util.cpython-310.pyc,, +asttokens/__pycache__/version.cpython-310.pyc,, +asttokens/astroid_compat.py,sha256=ilaVBRWcHpQ3ZLBSBs9usUwnLW3Orfn6sM89cMN8zNI,586 +asttokens/asttokens.py,sha256=CQZ0ppXgTzHGbK4dqI4toSLywHIiqNK8jIVqbQClzYI,17760 +asttokens/line_numbers.py,sha256=ODbdlHI4Iht4UnSfsxmOHCIVw4c2XX7j-MdaCa6F8bo,2834 +asttokens/mark_tokens.py,sha256=YKE88IHnYyQiNvlFlxqU-BDhRRWkYYjMEsjxKlF1cqw,21012 +asttokens/py.typed,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0 +asttokens/util.py,sha256=zkszPUVGR0-UxZJI-I4lTrA7yH2IUOz8IBmwGas-pbs,17286 +asttokens/version.py,sha256=EPmgXOdWKks5S__ZMH7Nu6xpAeVrZpfxaFy4pykuyeI,22 diff --git a/vlmpy310/lib/python3.10/site-packages/asttokens-3.0.0.dist-info/REQUESTED b/vlmpy310/lib/python3.10/site-packages/asttokens-3.0.0.dist-info/REQUESTED new file mode 100644 index 0000000000000000000000000000000000000000..e69de29bb2d1d6434b8b29ae775ad8c2e48c5391 diff --git a/vlmpy310/lib/python3.10/site-packages/asttokens-3.0.0.dist-info/WHEEL b/vlmpy310/lib/python3.10/site-packages/asttokens-3.0.0.dist-info/WHEEL new file mode 100644 index 0000000000000000000000000000000000000000..ae527e7d64811439e61b93aa375defb30e06edfe --- /dev/null +++ b/vlmpy310/lib/python3.10/site-packages/asttokens-3.0.0.dist-info/WHEEL @@ -0,0 +1,5 @@ +Wheel-Version: 1.0 +Generator: setuptools (75.6.0) +Root-Is-Purelib: true +Tag: py3-none-any + diff --git a/vlmpy310/lib/python3.10/site-packages/asttokens-3.0.0.dist-info/top_level.txt b/vlmpy310/lib/python3.10/site-packages/asttokens-3.0.0.dist-info/top_level.txt new file mode 100644 index 0000000000000000000000000000000000000000..7adf4c51fd2d7b06ec051d95245af6cb8c5931ed --- /dev/null +++ b/vlmpy310/lib/python3.10/site-packages/asttokens-3.0.0.dist-info/top_level.txt @@ -0,0 +1 @@ +asttokens diff --git a/vlmpy310/lib/python3.10/site-packages/jupyter_server_terminals-0.5.3.dist-info/INSTALLER b/vlmpy310/lib/python3.10/site-packages/jupyter_server_terminals-0.5.3.dist-info/INSTALLER new file mode 100644 index 0000000000000000000000000000000000000000..a1b589e38a32041e49332e5e81c2d363dc418d68 --- /dev/null +++ b/vlmpy310/lib/python3.10/site-packages/jupyter_server_terminals-0.5.3.dist-info/INSTALLER @@ -0,0 +1 @@ +pip diff --git a/vlmpy310/lib/python3.10/site-packages/jupyter_server_terminals-0.5.3.dist-info/METADATA b/vlmpy310/lib/python3.10/site-packages/jupyter_server_terminals-0.5.3.dist-info/METADATA new file mode 100644 index 0000000000000000000000000000000000000000..3f11f3e4ffbdc367a6c301cb45a6283b7f9d9ac0 --- /dev/null +++ b/vlmpy310/lib/python3.10/site-packages/jupyter_server_terminals-0.5.3.dist-info/METADATA @@ -0,0 +1,123 @@ +Metadata-Version: 2.1 +Name: jupyter_server_terminals +Version: 0.5.3 +Summary: A Jupyter Server Extension Providing Terminals. +Project-URL: Homepage, https://jupyter.org +Author-email: Jupyter Development Team +License: BSD 3-Clause License + + - Copyright (c) 2021-, Jupyter Development Team + + Redistribution and use in source and binary forms, with or without + modification, are permitted provided that the following conditions are met: + + All rights reserved. + + 1. Redistributions of source code must retain the above copyright notice, this + list of conditions and the following disclaimer. + + 2. Redistributions in binary form must reproduce the above copyright notice, + this list of conditions and the following disclaimer in the documentation + and/or other materials provided with the distribution. + + 3. Neither the name of the copyright holder nor the names of its + contributors may be used to endorse or promote products derived from + this software without specific prior written permission. + + THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" + AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE + IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE + DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE + FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL + DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR + SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER + CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, + OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE + OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. +License-File: LICENSE +Keywords: ipython,jupyter +Classifier: Intended Audience :: Developers +Classifier: Intended Audience :: Science/Research +Classifier: Intended Audience :: System Administrators +Classifier: License :: OSI Approved :: BSD License +Classifier: Programming Language :: Python +Classifier: Programming Language :: Python :: 3.8 +Classifier: Programming Language :: Python :: 3.9 +Classifier: Programming Language :: Python :: 3.10 +Classifier: Programming Language :: Python :: 3.11 +Requires-Python: >=3.8 +Requires-Dist: pywinpty>=2.0.3; os_name == 'nt' +Requires-Dist: terminado>=0.8.3 +Provides-Extra: docs +Requires-Dist: jinja2; extra == 'docs' +Requires-Dist: jupyter-server; extra == 'docs' +Requires-Dist: mistune<4.0; extra == 'docs' +Requires-Dist: myst-parser; extra == 'docs' +Requires-Dist: nbformat; extra == 'docs' +Requires-Dist: packaging; extra == 'docs' +Requires-Dist: pydata-sphinx-theme; extra == 'docs' +Requires-Dist: sphinxcontrib-github-alt; extra == 'docs' +Requires-Dist: sphinxcontrib-openapi; extra == 'docs' +Requires-Dist: sphinxcontrib-spelling; extra == 'docs' +Requires-Dist: sphinxemoji; extra == 'docs' +Requires-Dist: tornado; extra == 'docs' +Provides-Extra: test +Requires-Dist: jupyter-server>=2.0.0; extra == 'test' +Requires-Dist: pytest-jupyter[server]>=0.5.3; extra == 'test' +Requires-Dist: pytest-timeout; extra == 'test' +Requires-Dist: pytest>=7.0; extra == 'test' +Description-Content-Type: text/markdown + +# Jupyter Server Terminals + +[![Build Status](https://github.com/jupyter-server/jupyter_server_terminals/actions/workflows/test.yml/badge.svg?query=branch%3Amain++)](https://github.com/jupyter-server/jupyter_server_terminals/actions?query=branch%3Amain++) +[![Documentation Status](https://readthedocs.org/projects/jupyter-server-terminals/badge/?version=latest)](http://jupyter-server-terminals.readthedocs.io/en/latest/?badge=latest) + +Jupyter Server Terminals is a Jupyter Server Extension providing support for terminals. + +## Installation and Basic usage + +To install the latest release locally, make sure you have +[pip installed](https://pip.readthedocs.io/en/stable/installing/) and run: + +``` +pip install jupyter_server_terminals +``` + +Jupyter Server Terminals currently supports Python>=3.6 on Linux, OSX and Windows. + +### Testing + +See [CONTRIBUTING](./CONTRIBUTING.rst#running-tests). + +## Contributing + +If you are interested in contributing to the project, see [CONTRIBUTING](./CONTRIBUTING.rst). + +## About the Jupyter Development Team + +The Jupyter Development Team is the set of all contributors to the Jupyter project. +This includes all of the Jupyter subprojects. + +The core team that coordinates development on GitHub can be found here: +https://github.com/jupyter/. + +## Our Copyright Policy + +Jupyter uses a shared copyright model. Each contributor maintains copyright +over their contributions to Jupyter. But, it is important to note that these +contributions are typically only changes to the repositories. Thus, the Jupyter +source code, in its entirety is not the copyright of any single person or +institution. Instead, it is the collective copyright of the entire Jupyter +Development Team. If individual contributors want to maintain a record of what +changes/contributions they have specific copyright on, they should indicate +their copyright in the commit message of the change, when they commit the +change to one of the Jupyter repositories. + +With this in mind, the following banner should be used in any source code file +to indicate the copyright and license terms: + +``` +# Copyright (c) Jupyter Development Team. +# Distributed under the terms of the Modified BSD License. +``` diff --git a/vlmpy310/lib/python3.10/site-packages/jupyter_server_terminals-0.5.3.dist-info/RECORD b/vlmpy310/lib/python3.10/site-packages/jupyter_server_terminals-0.5.3.dist-info/RECORD new file mode 100644 index 0000000000000000000000000000000000000000..74c5c5873cc7be3a915841f38c8eee2b622a01a2 --- /dev/null +++ b/vlmpy310/lib/python3.10/site-packages/jupyter_server_terminals-0.5.3.dist-info/RECORD @@ -0,0 +1,23 @@ +../../../etc/jupyter/jupyter_server_config.d/jupyter_server_terminals.json,sha256=y8WPAYC5-QiOb71QCYYqLOLfa6IqQvEoO9xkcpPCJXM,99 +jupyter_server_terminals-0.5.3.dist-info/INSTALLER,sha256=zuuue4knoyJ-UwPPXg8fezS7VCrXJQrAP7zeNuwvFQg,4 +jupyter_server_terminals-0.5.3.dist-info/METADATA,sha256=xiU6R5410zH4B9QFs-wGiCRrCu-IYkZHE5_ub9i8Ly0,5643 +jupyter_server_terminals-0.5.3.dist-info/RECORD,, +jupyter_server_terminals-0.5.3.dist-info/REQUESTED,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0 +jupyter_server_terminals-0.5.3.dist-info/WHEEL,sha256=TJPnKdtrSue7xZ_AVGkp9YXcvDrobsjBds1du3Nx6dc,87 +jupyter_server_terminals-0.5.3.dist-info/licenses/LICENSE,sha256=6XiTJK33r7MrG_6yzLgR5kvm0ZabAoDTVaEP7o9ZaW4,1536 +jupyter_server_terminals/__init__.py,sha256=zL1UIfLwPzH2AW9_gpQEpqysv01nVRrrCkNxAyMMR-0,699 +jupyter_server_terminals/__pycache__/__init__.cpython-310.pyc,, +jupyter_server_terminals/__pycache__/_version.cpython-310.pyc,, +jupyter_server_terminals/__pycache__/api_handlers.cpython-310.pyc,, +jupyter_server_terminals/__pycache__/app.cpython-310.pyc,, +jupyter_server_terminals/__pycache__/base.cpython-310.pyc,, +jupyter_server_terminals/__pycache__/handlers.cpython-310.pyc,, +jupyter_server_terminals/__pycache__/terminalmanager.cpython-310.pyc,, +jupyter_server_terminals/_version.py,sha256=09_CyJnnlJRPrr_11DXA0JYarbSxhUGsVItiSEmTCEk,71 +jupyter_server_terminals/api_handlers.py,sha256=bx5yldJOon6wNsSmNk0CA0P113WHCCCL2XFqKdp5t6U,2846 +jupyter_server_terminals/app.py,sha256=oHP8lSw8ian8Aah9dJy6IomkWuBB5vgQ3fdINp8SrSw,5240 +jupyter_server_terminals/base.py,sha256=rlnY_cAuWdjZtPeRVdErYrDnVfQV0y2pQwuUcDn6aiI,485 +jupyter_server_terminals/handlers.py,sha256=sEGSp1VgBrbOUUjzVgJV3Q6s8FVyBUamFXSvlGDAWHE,2887 +jupyter_server_terminals/py.typed,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0 +jupyter_server_terminals/rest-api.yml,sha256=FsoJdwvBsRbDhYooMj8oUvGnzcYybd9KAXhZfD14esA,3045 +jupyter_server_terminals/terminalmanager.py,sha256=fdlxYVr2sYuEOPRGeb9FUfg316L8rSlzxyMs1OoFH1A,6892 diff --git a/vlmpy310/lib/python3.10/site-packages/jupyter_server_terminals-0.5.3.dist-info/REQUESTED b/vlmpy310/lib/python3.10/site-packages/jupyter_server_terminals-0.5.3.dist-info/REQUESTED new file mode 100644 index 0000000000000000000000000000000000000000..e69de29bb2d1d6434b8b29ae775ad8c2e48c5391 diff --git a/vlmpy310/lib/python3.10/site-packages/jupyter_server_terminals-0.5.3.dist-info/WHEEL b/vlmpy310/lib/python3.10/site-packages/jupyter_server_terminals-0.5.3.dist-info/WHEEL new file mode 100644 index 0000000000000000000000000000000000000000..5998f3aab327ceb8cb346647a3461e220359aebf --- /dev/null +++ b/vlmpy310/lib/python3.10/site-packages/jupyter_server_terminals-0.5.3.dist-info/WHEEL @@ -0,0 +1,4 @@ +Wheel-Version: 1.0 +Generator: hatchling 1.21.1 +Root-Is-Purelib: true +Tag: py3-none-any diff --git a/vlmpy310/lib/python3.10/site-packages/jupyter_server_terminals-0.5.3.dist-info/licenses/LICENSE b/vlmpy310/lib/python3.10/site-packages/jupyter_server_terminals-0.5.3.dist-info/licenses/LICENSE new file mode 100644 index 0000000000000000000000000000000000000000..6540246ca7373ded715ca272f09c36d312c28d02 --- /dev/null +++ b/vlmpy310/lib/python3.10/site-packages/jupyter_server_terminals-0.5.3.dist-info/licenses/LICENSE @@ -0,0 +1,30 @@ +BSD 3-Clause License + +- Copyright (c) 2021-, Jupyter Development Team + +Redistribution and use in source and binary forms, with or without +modification, are permitted provided that the following conditions are met: + +All rights reserved. + +1. Redistributions of source code must retain the above copyright notice, this + list of conditions and the following disclaimer. + +2. Redistributions in binary form must reproduce the above copyright notice, + this list of conditions and the following disclaimer in the documentation + and/or other materials provided with the distribution. + +3. Neither the name of the copyright holder nor the names of its + contributors may be used to endorse or promote products derived from + this software without specific prior written permission. + +THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" +AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE +IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE +DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE +FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL +DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR +SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER +CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, +OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE +OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. diff --git a/vlmpy310/lib/python3.10/site-packages/nvidia_cufft_cu11-10.9.0.58.dist-info/INSTALLER b/vlmpy310/lib/python3.10/site-packages/nvidia_cufft_cu11-10.9.0.58.dist-info/INSTALLER new file mode 100644 index 0000000000000000000000000000000000000000..a1b589e38a32041e49332e5e81c2d363dc418d68 --- /dev/null +++ b/vlmpy310/lib/python3.10/site-packages/nvidia_cufft_cu11-10.9.0.58.dist-info/INSTALLER @@ -0,0 +1 @@ +pip diff --git a/vlmpy310/lib/python3.10/site-packages/nvidia_cufft_cu11-10.9.0.58.dist-info/License.txt b/vlmpy310/lib/python3.10/site-packages/nvidia_cufft_cu11-10.9.0.58.dist-info/License.txt new file mode 100644 index 0000000000000000000000000000000000000000..b491c70e0aef319022ded661e111ddbd45b8a17f --- /dev/null +++ b/vlmpy310/lib/python3.10/site-packages/nvidia_cufft_cu11-10.9.0.58.dist-info/License.txt @@ -0,0 +1,1568 @@ +End User License Agreement +-------------------------- + + +Preface +------- + +The Software License Agreement in Chapter 1 and the Supplement +in Chapter 2 contain license terms and conditions that govern +the use of NVIDIA software. By accepting this agreement, you +agree to comply with all the terms and conditions applicable +to the product(s) included herein. + + +NVIDIA Driver + + +Description + +This package contains the operating system driver and +fundamental system software components for NVIDIA GPUs. + + +NVIDIA CUDA Toolkit + + +Description + +The NVIDIA CUDA Toolkit provides command-line and graphical +tools for building, debugging and optimizing the performance +of applications accelerated by NVIDIA GPUs, runtime and math +libraries, and documentation including programming guides, +user manuals, and API references. + + +Default Install Location of CUDA Toolkit + +Windows platform: + +%ProgramFiles%\NVIDIA GPU Computing Toolkit\CUDA\v#.# + +Linux platform: + +/usr/local/cuda-#.# + +Mac platform: + +/Developer/NVIDIA/CUDA-#.# + + +NVIDIA CUDA Samples + + +Description + +This package includes over 100+ CUDA examples that demonstrate +various CUDA programming principles, and efficient CUDA +implementation of algorithms in specific application domains. + + +Default Install Location of CUDA Samples + +Windows platform: + +%ProgramData%\NVIDIA Corporation\CUDA Samples\v#.# + +Linux platform: + +/usr/local/cuda-#.#/samples + +and + +$HOME/NVIDIA_CUDA-#.#_Samples + +Mac platform: + +/Developer/NVIDIA/CUDA-#.#/samples + + +NVIDIA Nsight Visual Studio Edition (Windows only) + + +Description + +NVIDIA Nsight Development Platform, Visual Studio Edition is a +development environment integrated into Microsoft Visual +Studio that provides tools for debugging, profiling, analyzing +and optimizing your GPU computing and graphics applications. + + +Default Install Location of Nsight Visual Studio Edition + +Windows platform: + +%ProgramFiles(x86)%\NVIDIA Corporation\Nsight Visual Studio Edition #.# + + +1. License Agreement for NVIDIA Software Development Kits +--------------------------------------------------------- + + +Release Date: July 26, 2018 +--------------------------- + + +Important NoticeRead before downloading, installing, +copying or using the licensed software: +------------------------------------------------------- + +This license agreement, including exhibits attached +("Agreement”) is a legal agreement between you and NVIDIA +Corporation ("NVIDIA") and governs your use of a NVIDIA +software development kit (“SDK”). + +Each SDK has its own set of software and materials, but here +is a description of the types of items that may be included in +a SDK: source code, header files, APIs, data sets and assets +(examples include images, textures, models, scenes, videos, +native API input/output files), binary software, sample code, +libraries, utility programs, programming code and +documentation. + +This Agreement can be accepted only by an adult of legal age +of majority in the country in which the SDK is used. + +If you are entering into this Agreement on behalf of a company +or other legal entity, you represent that you have the legal +authority to bind the entity to this Agreement, in which case +“you” will mean the entity you represent. + +If you don’t have the required age or authority to accept +this Agreement, or if you don’t accept all the terms and +conditions of this Agreement, do not download, install or use +the SDK. + +You agree to use the SDK only for purposes that are permitted +by (a) this Agreement, and (b) any applicable law, regulation +or generally accepted practices or guidelines in the relevant +jurisdictions. + + +1.1. License + + +1.1.1. License Grant + +Subject to the terms of this Agreement, NVIDIA hereby grants +you a non-exclusive, non-transferable license, without the +right to sublicense (except as expressly provided in this +Agreement) to: + + 1. Install and use the SDK, + + 2. Modify and create derivative works of sample source code + delivered in the SDK, and + + 3. Distribute those portions of the SDK that are identified + in this Agreement as distributable, as incorporated in + object code format into a software application that meets + the distribution requirements indicated in this Agreement. + + +1.1.2. Distribution Requirements + +These are the distribution requirements for you to exercise +the distribution grant: + + 1. Your application must have material additional + functionality, beyond the included portions of the SDK. + + 2. The distributable portions of the SDK shall only be + accessed by your application. + + 3. The following notice shall be included in modifications + and derivative works of sample source code distributed: + “This software contains source code provided by NVIDIA + Corporation.” + + 4. Unless a developer tool is identified in this Agreement + as distributable, it is delivered for your internal use + only. + + 5. The terms under which you distribute your application + must be consistent with the terms of this Agreement, + including (without limitation) terms relating to the + license grant and license restrictions and protection of + NVIDIA’s intellectual property rights. Additionally, you + agree that you will protect the privacy, security and + legal rights of your application users. + + 6. You agree to notify NVIDIA in writing of any known or + suspected distribution or use of the SDK not in compliance + with the requirements of this Agreement, and to enforce + the terms of your agreements with respect to distributed + SDK. + + +1.1.3. Authorized Users + +You may allow employees and contractors of your entity or of +your subsidiary(ies) to access and use the SDK from your +secure network to perform work on your behalf. + +If you are an academic institution you may allow users +enrolled or employed by the academic institution to access and +use the SDK from your secure network. + +You are responsible for the compliance with the terms of this +Agreement by your authorized users. If you become aware that +your authorized users didn’t follow the terms of this +Agreement, you agree to take reasonable steps to resolve the +non-compliance and prevent new occurrences. + + +1.1.4. Pre-Release SDK + +The SDK versions identified as alpha, beta, preview or +otherwise as pre-release, may not be fully functional, may +contain errors or design flaws, and may have reduced or +different security, privacy, accessibility, availability, and +reliability standards relative to commercial versions of +NVIDIA software and materials. Use of a pre-release SDK may +result in unexpected results, loss of data, project delays or +other unpredictable damage or loss. + +You may use a pre-release SDK at your own risk, understanding +that pre-release SDKs are not intended for use in production +or business-critical systems. + +NVIDIA may choose not to make available a commercial version +of any pre-release SDK. NVIDIA may also choose to abandon +development and terminate the availability of a pre-release +SDK at any time without liability. + + +1.1.5. Updates + +NVIDIA may, at its option, make available patches, workarounds +or other updates to this SDK. Unless the updates are provided +with their separate governing terms, they are deemed part of +the SDK licensed to you as provided in this Agreement. You +agree that the form and content of the SDK that NVIDIA +provides may change without prior notice to you. While NVIDIA +generally maintains compatibility between versions, NVIDIA may +in some cases make changes that introduce incompatibilities in +future versions of the SDK. + + +1.1.6. Third Party Licenses + +The SDK may come bundled with, or otherwise include or be +distributed with, third party software licensed by a NVIDIA +supplier and/or open source software provided under an open +source license. Use of third party software is subject to the +third-party license terms, or in the absence of third party +terms, the terms of this Agreement. Copyright to third party +software is held by the copyright holders indicated in the +third-party software or license. + + +1.1.7. Reservation of Rights + +NVIDIA reserves all rights, title, and interest in and to the +SDK, not expressly granted to you under this Agreement. + + +1.2. Limitations + +The following license limitations apply to your use of the +SDK: + + 1. You may not reverse engineer, decompile or disassemble, + or remove copyright or other proprietary notices from any + portion of the SDK or copies of the SDK. + + 2. Except as expressly provided in this Agreement, you may + not copy, sell, rent, sublicense, transfer, distribute, + modify, or create derivative works of any portion of the + SDK. For clarity, you may not distribute or sublicense the + SDK as a stand-alone product. + + 3. Unless you have an agreement with NVIDIA for this + purpose, you may not indicate that an application created + with the SDK is sponsored or endorsed by NVIDIA. + + 4. You may not bypass, disable, or circumvent any + encryption, security, digital rights management or + authentication mechanism in the SDK. + + 5. You may not use the SDK in any manner that would cause it + to become subject to an open source software license. As + examples, licenses that require as a condition of use, + modification, and/or distribution that the SDK be: + + a. Disclosed or distributed in source code form; + + b. Licensed for the purpose of making derivative works; + or + + c. Redistributable at no charge. + + 6. Unless you have an agreement with NVIDIA for this + purpose, you may not use the SDK with any system or + application where the use or failure of the system or + application can reasonably be expected to threaten or + result in personal injury, death, or catastrophic loss. + Examples include use in avionics, navigation, military, + medical, life support or other life critical applications. + NVIDIA does not design, test or manufacture the SDK for + these critical uses and NVIDIA shall not be liable to you + or any third party, in whole or in part, for any claims or + damages arising from such uses. + + 7. You agree to defend, indemnify and hold harmless NVIDIA + and its affiliates, and their respective employees, + contractors, agents, officers and directors, from and + against any and all claims, damages, obligations, losses, + liabilities, costs or debt, fines, restitutions and + expenses (including but not limited to attorney’s fees + and costs incident to establishing the right of + indemnification) arising out of or related to your use of + the SDK outside of the scope of this Agreement, or not in + compliance with its terms. + + +1.3. Ownership + + 1. NVIDIA or its licensors hold all rights, title and + interest in and to the SDK and its modifications and + derivative works, including their respective intellectual + property rights, subject to your rights described in this + section. This SDK may include software and materials from + NVIDIA’s licensors, and these licensors are intended + third party beneficiaries that may enforce this Agreement + with respect to their intellectual property rights. + + 2. You hold all rights, title and interest in and to your + applications and your derivative works of the sample + source code delivered in the SDK, including their + respective intellectual property rights, subject to + NVIDIA’s rights described in this section. + + 3. You may, but don’t have to, provide to NVIDIA + suggestions, feature requests or other feedback regarding + the SDK, including possible enhancements or modifications + to the SDK. For any feedback that you voluntarily provide, + you hereby grant NVIDIA and its affiliates a perpetual, + non-exclusive, worldwide, irrevocable license to use, + reproduce, modify, license, sublicense (through multiple + tiers of sublicensees), and distribute (through multiple + tiers of distributors) it without the payment of any + royalties or fees to you. NVIDIA will use feedback at its + choice. NVIDIA is constantly looking for ways to improve + its products, so you may send feedback to NVIDIA through + the developer portal at https://developer.nvidia.com. + + +1.4. No Warranties + +THE SDK IS PROVIDED BY NVIDIA “AS IS” AND “WITH ALL +FAULTS.” TO THE MAXIMUM EXTENT PERMITTED BY LAW, NVIDIA AND +ITS AFFILIATES EXPRESSLY DISCLAIM ALL WARRANTIES OF ANY KIND +OR NATURE, WHETHER EXPRESS, IMPLIED OR STATUTORY, INCLUDING, +BUT NOT LIMITED TO, ANY WARRANTIES OF MERCHANTABILITY, FITNESS +FOR A PARTICULAR PURPOSE, TITLE, NON-INFRINGEMENT, OR THE +ABSENCE OF ANY DEFECTS THEREIN, WHETHER LATENT OR PATENT. NO +WARRANTY IS MADE ON THE BASIS OF TRADE USAGE, COURSE OF +DEALING OR COURSE OF TRADE. + + +1.5. Limitation of Liability + +TO THE MAXIMUM EXTENT PERMITTED BY LAW, NVIDIA AND ITS +AFFILIATES SHALL NOT BE LIABLE FOR ANY SPECIAL, INCIDENTAL, +PUNITIVE OR CONSEQUENTIAL DAMAGES, OR ANY LOST PROFITS, LOSS +OF USE, LOSS OF DATA OR LOSS OF GOODWILL, OR THE COSTS OF +PROCURING SUBSTITUTE PRODUCTS, ARISING OUT OF OR IN CONNECTION +WITH THIS AGREEMENT OR THE USE OR PERFORMANCE OF THE SDK, +WHETHER SUCH LIABILITY ARISES FROM ANY CLAIM BASED UPON BREACH +OF CONTRACT, BREACH OF WARRANTY, TORT (INCLUDING NEGLIGENCE), +PRODUCT LIABILITY OR ANY OTHER CAUSE OF ACTION OR THEORY OF +LIABILITY. IN NO EVENT WILL NVIDIA’S AND ITS AFFILIATES +TOTAL CUMULATIVE LIABILITY UNDER OR ARISING OUT OF THIS +AGREEMENT EXCEED US$10.00. THE NATURE OF THE LIABILITY OR THE +NUMBER OF CLAIMS OR SUITS SHALL NOT ENLARGE OR EXTEND THIS +LIMIT. + +These exclusions and limitations of liability shall apply +regardless if NVIDIA or its affiliates have been advised of +the possibility of such damages, and regardless of whether a +remedy fails its essential purpose. These exclusions and +limitations of liability form an essential basis of the +bargain between the parties, and, absent any of these +exclusions or limitations of liability, the provisions of this +Agreement, including, without limitation, the economic terms, +would be substantially different. + + +1.6. Termination + + 1. This Agreement will continue to apply until terminated by + either you or NVIDIA as described below. + + 2. If you want to terminate this Agreement, you may do so by + stopping to use the SDK. + + 3. NVIDIA may, at any time, terminate this Agreement if: + + a. (i) you fail to comply with any term of this + Agreement and the non-compliance is not fixed within + thirty (30) days following notice from NVIDIA (or + immediately if you violate NVIDIA’s intellectual + property rights); + + b. (ii) you commence or participate in any legal + proceeding against NVIDIA with respect to the SDK; or + + c. (iii) NVIDIA decides to no longer provide the SDK in + a country or, in NVIDIA’s sole discretion, the + continued use of it is no longer commercially viable. + + 4. Upon any termination of this Agreement, you agree to + promptly discontinue use of the SDK and destroy all copies + in your possession or control. Your prior distributions in + accordance with this Agreement are not affected by the + termination of this Agreement. Upon written request, you + will certify in writing that you have complied with your + commitments under this section. Upon any termination of + this Agreement all provisions survive except for the + license grant provisions. + + +1.7. General + +If you wish to assign this Agreement or your rights and +obligations, including by merger, consolidation, dissolution +or operation of law, contact NVIDIA to ask for permission. Any +attempted assignment not approved by NVIDIA in writing shall +be void and of no effect. NVIDIA may assign, delegate or +transfer this Agreement and its rights and obligations, and if +to a non-affiliate you will be notified. + +You agree to cooperate with NVIDIA and provide reasonably +requested information to verify your compliance with this +Agreement. + +This Agreement will be governed in all respects by the laws of +the United States and of the State of Delaware as those laws +are applied to contracts entered into and performed entirely +within Delaware by Delaware residents, without regard to the +conflicts of laws principles. The United Nations Convention on +Contracts for the International Sale of Goods is specifically +disclaimed. You agree to all terms of this Agreement in the +English language. + +The state or federal courts residing in Santa Clara County, +California shall have exclusive jurisdiction over any dispute +or claim arising out of this Agreement. Notwithstanding this, +you agree that NVIDIA shall still be allowed to apply for +injunctive remedies or an equivalent type of urgent legal +relief in any jurisdiction. + +If any court of competent jurisdiction determines that any +provision of this Agreement is illegal, invalid or +unenforceable, such provision will be construed as limited to +the extent necessary to be consistent with and fully +enforceable under the law and the remaining provisions will +remain in full force and effect. Unless otherwise specified, +remedies are cumulative. + +Each party acknowledges and agrees that the other is an +independent contractor in the performance of this Agreement. + +The SDK has been developed entirely at private expense and is +“commercial items” consisting of “commercial computer +software” and “commercial computer software +documentation” provided with RESTRICTED RIGHTS. Use, +duplication or disclosure by the U.S. Government or a U.S. +Government subcontractor is subject to the restrictions in +this Agreement pursuant to DFARS 227.7202-3(a) or as set forth +in subparagraphs (c)(1) and (2) of the Commercial Computer +Software - Restricted Rights clause at FAR 52.227-19, as +applicable. Contractor/manufacturer is NVIDIA, 2788 San Tomas +Expressway, Santa Clara, CA 95051. + +The SDK is subject to United States export laws and +regulations. You agree that you will not ship, transfer or +export the SDK into any country, or use the SDK in any manner, +prohibited by the United States Bureau of Industry and +Security or economic sanctions regulations administered by the +U.S. Department of Treasury’s Office of Foreign Assets +Control (OFAC), or any applicable export laws, restrictions or +regulations. These laws include restrictions on destinations, +end users and end use. By accepting this Agreement, you +confirm that you are not a resident or citizen of any country +currently embargoed by the U.S. and that you are not otherwise +prohibited from receiving the SDK. + +Any notice delivered by NVIDIA to you under this Agreement +will be delivered via mail, email or fax. You agree that any +notices that NVIDIA sends you electronically will satisfy any +legal communication requirements. Please direct your legal +notices or other correspondence to NVIDIA Corporation, 2788 +San Tomas Expressway, Santa Clara, California 95051, United +States of America, Attention: Legal Department. + +This Agreement and any exhibits incorporated into this +Agreement constitute the entire agreement of the parties with +respect to the subject matter of this Agreement and supersede +all prior negotiations or documentation exchanged between the +parties relating to this SDK license. Any additional and/or +conflicting terms on documents issued by you are null, void, +and invalid. Any amendment or waiver under this Agreement +shall be in writing and signed by representatives of both +parties. + + +2. CUDA Toolkit Supplement to Software License Agreement for +NVIDIA Software Development Kits +------------------------------------------------------------ + + +Release date: August 16, 2018 +----------------------------- + +The terms in this supplement govern your use of the NVIDIA +CUDA Toolkit SDK under the terms of your license agreement +(“Agreement”) as modified by this supplement. Capitalized +terms used but not defined below have the meaning assigned to +them in the Agreement. + +This supplement is an exhibit to the Agreement and is +incorporated as an integral part of the Agreement. In the +event of conflict between the terms in this supplement and the +terms in the Agreement, the terms in this supplement govern. + + +2.1. License Scope + +The SDK is licensed for you to develop applications only for +use in systems with NVIDIA GPUs. + + +2.2. Distribution + +The portions of the SDK that are distributable under the +Agreement are listed in Attachment A. + + +2.3. Operating Systems + +Those portions of the SDK designed exclusively for use on the +Linux or FreeBSD operating systems, or other operating systems +derived from the source code to these operating systems, may +be copied and redistributed for use in accordance with this +Agreement, provided that the object code files are not +modified in any way (except for unzipping of compressed +files). + + +2.4. Audio and Video Encoders and Decoders + +You acknowledge and agree that it is your sole responsibility +to obtain any additional third-party licenses required to +make, have made, use, have used, sell, import, and offer for +sale your products or services that include or incorporate any +third-party software and content relating to audio and/or +video encoders and decoders from, including but not limited +to, Microsoft, Thomson, Fraunhofer IIS, Sisvel S.p.A., +MPEG-LA, and Coding Technologies. NVIDIA does not grant to you +under this Agreement any necessary patent or other rights with +respect to any audio and/or video encoders and decoders. + + +2.5. Licensing + +If the distribution terms in this Agreement are not suitable +for your organization, or for any questions regarding this +Agreement, please contact NVIDIA at +nvidia-compute-license-questions@nvidia.com. + + +2.6. Attachment A + +The following portions of the SDK are distributable under the +Agreement: + +Component + +CUDA Runtime + +Windows + +cudart.dll, cudart_static.lib, cudadevrt.lib + +Mac OSX + +libcudart.dylib, libcudart_static.a, libcudadevrt.a + +Linux + +libcudart.so, libcudart_static.a, libcudadevrt.a + +Android + +libcudart.so, libcudart_static.a, libcudadevrt.a + +Component + +CUDA FFT Library + +Windows + +cufft.dll, cufftw.dll, cufft.lib, cufftw.lib + +Mac OSX + +libcufft.dylib, libcufft_static.a, libcufftw.dylib, +libcufftw_static.a + +Linux + +libcufft.so, libcufft_static.a, libcufftw.so, +libcufftw_static.a + +Android + +libcufft.so, libcufft_static.a, libcufftw.so, +libcufftw_static.a + +Component + +CUDA BLAS Library + +Windows + +cublas.dll, cublasLt.dll + +Mac OSX + +libcublas.dylib, libcublasLt.dylib, libcublas_static.a, +libcublasLt_static.a + +Linux + +libcublas.so, libcublasLt.so, libcublas_static.a, +libcublasLt_static.a + +Android + +libcublas.so, libcublasLt.so, libcublas_static.a, +libcublasLt_static.a + +Component + +NVIDIA "Drop-in" BLAS Library + +Windows + +nvblas.dll + +Mac OSX + +libnvblas.dylib + +Linux + +libnvblas.so + +Component + +CUDA Sparse Matrix Library + +Windows + +cusparse.dll, cusparse.lib + +Mac OSX + +libcusparse.dylib, libcusparse_static.a + +Linux + +libcusparse.so, libcusparse_static.a + +Android + +libcusparse.so, libcusparse_static.a + +Component + +CUDA Linear Solver Library + +Windows + +cusolver.dll, cusolver.lib + +Mac OSX + +libcusolver.dylib, libcusolver_static.a + +Linux + +libcusolver.so, libcusolver_static.a + +Android + +libcusolver.so, libcusolver_static.a + +Component + +CUDA Random Number Generation Library + +Windows + +curand.dll, curand.lib + +Mac OSX + +libcurand.dylib, libcurand_static.a + +Linux + +libcurand.so, libcurand_static.a + +Android + +libcurand.so, libcurand_static.a + +Component + +CUDA Accelerated Graph Library + +Component + +NVIDIA Performance Primitives Library + +Windows + +nppc.dll, nppc.lib, nppial.dll, nppial.lib, nppicc.dll, +nppicc.lib, nppicom.dll, nppicom.lib, nppidei.dll, +nppidei.lib, nppif.dll, nppif.lib, nppig.dll, nppig.lib, +nppim.dll, nppim.lib, nppist.dll, nppist.lib, nppisu.dll, +nppisu.lib, nppitc.dll, nppitc.lib, npps.dll, npps.lib + +Mac OSX + +libnppc.dylib, libnppc_static.a, libnppial.dylib, +libnppial_static.a, libnppicc.dylib, libnppicc_static.a, +libnppicom.dylib, libnppicom_static.a, libnppidei.dylib, +libnppidei_static.a, libnppif.dylib, libnppif_static.a, +libnppig.dylib, libnppig_static.a, libnppim.dylib, +libnppisu_static.a, libnppitc.dylib, libnppitc_static.a, +libnpps.dylib, libnpps_static.a + +Linux + +libnppc.so, libnppc_static.a, libnppial.so, +libnppial_static.a, libnppicc.so, libnppicc_static.a, +libnppicom.so, libnppicom_static.a, libnppidei.so, +libnppidei_static.a, libnppif.so, libnppif_static.a +libnppig.so, libnppig_static.a, libnppim.so, +libnppim_static.a, libnppist.so, libnppist_static.a, +libnppisu.so, libnppisu_static.a, libnppitc.so +libnppitc_static.a, libnpps.so, libnpps_static.a + +Android + +libnppc.so, libnppc_static.a, libnppial.so, +libnppial_static.a, libnppicc.so, libnppicc_static.a, +libnppicom.so, libnppicom_static.a, libnppidei.so, +libnppidei_static.a, libnppif.so, libnppif_static.a +libnppig.so, libnppig_static.a, libnppim.so, +libnppim_static.a, libnppist.so, libnppist_static.a, +libnppisu.so, libnppisu_static.a, libnppitc.so +libnppitc_static.a, libnpps.so, libnpps_static.a + +Component + +NVIDIA JPEG Library + +Linux + +libnvjpeg.so, libnvjpeg_static.a + +Component + +Internal common library required for statically linking to +cuBLAS, cuSPARSE, cuFFT, cuRAND, nvJPEG and NPP + +Mac OSX + +libculibos.a + +Linux + +libculibos.a + +Component + +NVIDIA Runtime Compilation Library and Header + +All + +nvrtc.h + +Windows + +nvrtc.dll, nvrtc-builtins.dll + +Mac OSX + +libnvrtc.dylib, libnvrtc-builtins.dylib + +Linux + +libnvrtc.so, libnvrtc-builtins.so + +Component + +NVIDIA Optimizing Compiler Library + +Windows + +nvvm.dll + +Mac OSX + +libnvvm.dylib + +Linux + +libnvvm.so + +Component + +NVIDIA Common Device Math Functions Library + +Windows + +libdevice.10.bc + +Mac OSX + +libdevice.10.bc + +Linux + +libdevice.10.bc + +Component + +CUDA Occupancy Calculation Header Library + +All + +cuda_occupancy.h + +Component + +CUDA Half Precision Headers + +All + +cuda_fp16.h, cuda_fp16.hpp + +Component + +CUDA Profiling Tools Interface (CUPTI) Library + +Windows + +cupti.dll + +Mac OSX + +libcupti.dylib + +Linux + +libcupti.so + +Component + +NVIDIA Tools Extension Library + +Windows + +nvToolsExt.dll, nvToolsExt.lib + +Mac OSX + +libnvToolsExt.dylib + +Linux + +libnvToolsExt.so + +Component + +NVIDIA CUDA Driver Libraries + +Linux + +libcuda.so, libnvidia-fatbinaryloader.so, +libnvidia-ptxjitcompiler.so + +The NVIDIA CUDA Driver Libraries are only distributable in +applications that meet this criteria: + + 1. The application was developed starting from a NVIDIA CUDA + container obtained from Docker Hub or the NVIDIA GPU + Cloud, and + + 2. The resulting application is packaged as a Docker + container and distributed to users on Docker Hub or the + NVIDIA GPU Cloud only. + + +2.7. Attachment B + + +Additional Licensing Obligations + +The following third party components included in the SOFTWARE +are licensed to Licensee pursuant to the following terms and +conditions: + + 1. Licensee's use of the GDB third party component is + subject to the terms and conditions of GNU GPL v3: + + This product includes copyrighted third-party software licensed + under the terms of the GNU General Public License v3 ("GPL v3"). + All third-party software packages are copyright by their respective + authors. GPL v3 terms and conditions are hereby incorporated into + the Agreement by this reference: http://www.gnu.org/licenses/gpl.txt + + Consistent with these licensing requirements, the software + listed below is provided under the terms of the specified + open source software licenses. To obtain source code for + software provided under licenses that require + redistribution of source code, including the GNU General + Public License (GPL) and GNU Lesser General Public License + (LGPL), contact oss-requests@nvidia.com. This offer is + valid for a period of three (3) years from the date of the + distribution of this product by NVIDIA CORPORATION. + + Component License + CUDA-GDB GPL v3 + + 2. Licensee represents and warrants that any and all third + party licensing and/or royalty payment obligations in + connection with Licensee's use of the H.264 video codecs + are solely the responsibility of Licensee. + + 3. Licensee's use of the Thrust library is subject to the + terms and conditions of the Apache License Version 2.0. + All third-party software packages are copyright by their + respective authors. Apache License Version 2.0 terms and + conditions are hereby incorporated into the Agreement by + this reference. + http://www.apache.org/licenses/LICENSE-2.0.html + + In addition, Licensee acknowledges the following notice: + Thrust includes source code from the Boost Iterator, + Tuple, System, and Random Number libraries. + + Boost Software License - Version 1.0 - August 17th, 2003 + . . . . + + Permission is hereby granted, free of charge, to any person or + organization obtaining a copy of the software and accompanying + documentation covered by this license (the "Software") to use, + reproduce, display, distribute, execute, and transmit the Software, + and to prepare derivative works of the Software, and to permit + third-parties to whom the Software is furnished to do so, all + subject to the following: + + The copyright notices in the Software and this entire statement, + including the above license grant, this restriction and the following + disclaimer, must be included in all copies of the Software, in whole + or in part, and all derivative works of the Software, unless such + copies or derivative works are solely in the form of machine-executable + object code generated by a source language processor. + + THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, + EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF + MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE, TITLE AND + NON-INFRINGEMENT. IN NO EVENT SHALL THE COPYRIGHT HOLDERS OR + ANYONE DISTRIBUTING THE SOFTWARE BE LIABLE FOR ANY DAMAGES OR + OTHER LIABILITY, WHETHER IN CONTRACT, TORT OR OTHERWISE, ARISING + FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR + OTHER DEALINGS IN THE SOFTWARE. + + 4. Licensee's use of the LLVM third party component is + subject to the following terms and conditions: + + ====================================================== + LLVM Release License + ====================================================== + University of Illinois/NCSA + Open Source License + + Copyright (c) 2003-2010 University of Illinois at Urbana-Champaign. + All rights reserved. + + Developed by: + + LLVM Team + + University of Illinois at Urbana-Champaign + + http://llvm.org + + Permission is hereby granted, free of charge, to any person obtaining a copy + of this software and associated documentation files (the "Software"), to + deal with the Software without restriction, including without limitation the + rights to use, copy, modify, merge, publish, distribute, sublicense, and/or + sell copies of the Software, and to permit persons to whom the Software is + furnished to do so, subject to the following conditions: + + * Redistributions of source code must retain the above copyright notice, + this list of conditions and the following disclaimers. + + * Redistributions in binary form must reproduce the above copyright + notice, this list of conditions and the following disclaimers in the + documentation and/or other materials provided with the distribution. + + * Neither the names of the LLVM Team, University of Illinois at Urbana- + Champaign, nor the names of its contributors may be used to endorse or + promote products derived from this Software without specific prior + written permission. + + THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR + IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, + FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL + THE CONTRIBUTORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR + OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, + ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER + DEALINGS WITH THE SOFTWARE. + + 5. Licensee's use (e.g. nvprof) of the PCRE third party + component is subject to the following terms and + conditions: + + ------------ + PCRE LICENCE + ------------ + PCRE is a library of functions to support regular expressions whose syntax + and semantics are as close as possible to those of the Perl 5 language. + Release 8 of PCRE is distributed under the terms of the "BSD" licence, as + specified below. The documentation for PCRE, supplied in the "doc" + directory, is distributed under the same terms as the software itself. The + basic library functions are written in C and are freestanding. Also + included in the distribution is a set of C++ wrapper functions, and a just- + in-time compiler that can be used to optimize pattern matching. These are + both optional features that can be omitted when the library is built. + + THE BASIC LIBRARY FUNCTIONS + --------------------------- + Written by: Philip Hazel + Email local part: ph10 + Email domain: cam.ac.uk + University of Cambridge Computing Service, + Cambridge, England. + Copyright (c) 1997-2012 University of Cambridge + All rights reserved. + + PCRE JUST-IN-TIME COMPILATION SUPPORT + ------------------------------------- + Written by: Zoltan Herczeg + Email local part: hzmester + Emain domain: freemail.hu + Copyright(c) 2010-2012 Zoltan Herczeg + All rights reserved. + + STACK-LESS JUST-IN-TIME COMPILER + -------------------------------- + Written by: Zoltan Herczeg + Email local part: hzmester + Emain domain: freemail.hu + Copyright(c) 2009-2012 Zoltan Herczeg + All rights reserved. + + THE C++ WRAPPER FUNCTIONS + ------------------------- + Contributed by: Google Inc. + Copyright (c) 2007-2012, Google Inc. + All rights reserved. + + THE "BSD" LICENCE + ----------------- + Redistribution and use in source and binary forms, with or without + modification, are permitted provided that the following conditions are met: + + * Redistributions of source code must retain the above copyright notice, + this list of conditions and the following disclaimer. + + * Redistributions in binary form must reproduce the above copyright + notice, this list of conditions and the following disclaimer in the + documentation and/or other materials provided with the distribution. + + * Neither the name of the University of Cambridge nor the name of Google + Inc. nor the names of their contributors may be used to endorse or + promote products derived from this software without specific prior + written permission. + + THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" + AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE + IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE + ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS BE + LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR + CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF + SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS + INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN + CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) + ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE + POSSIBILITY OF SUCH DAMAGE. + + 6. Some of the cuBLAS library routines were written by or + derived from code written by Vasily Volkov and are subject + to the Modified Berkeley Software Distribution License as + follows: + + Copyright (c) 2007-2009, Regents of the University of California + + All rights reserved. + + Redistribution and use in source and binary forms, with or without + modification, are permitted provided that the following conditions are + met: + * Redistributions of source code must retain the above copyright + notice, this list of conditions and the following disclaimer. + * Redistributions in binary form must reproduce the above + copyright notice, this list of conditions and the following + disclaimer in the documentation and/or other materials provided + with the distribution. + * Neither the name of the University of California, Berkeley nor + the names of its contributors may be used to endorse or promote + products derived from this software without specific prior + written permission. + + THIS SOFTWARE IS PROVIDED BY THE AUTHOR "AS IS" AND ANY EXPRESS OR + IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED + WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE + DISCLAIMED. IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR ANY DIRECT, + INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES + (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR + SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) + HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, + STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING + IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE + POSSIBILITY OF SUCH DAMAGE. + + 7. Some of the cuBLAS library routines were written by or + derived from code written by Davide Barbieri and are + subject to the Modified Berkeley Software Distribution + License as follows: + + Copyright (c) 2008-2009 Davide Barbieri @ University of Rome Tor Vergata. + + All rights reserved. + + Redistribution and use in source and binary forms, with or without + modification, are permitted provided that the following conditions are + met: + * Redistributions of source code must retain the above copyright + notice, this list of conditions and the following disclaimer. + * Redistributions in binary form must reproduce the above + copyright notice, this list of conditions and the following + disclaimer in the documentation and/or other materials provided + with the distribution. + * The name of the author may not be used to endorse or promote + products derived from this software without specific prior + written permission. + + THIS SOFTWARE IS PROVIDED BY THE AUTHOR "AS IS" AND ANY EXPRESS OR + IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED + WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE + DISCLAIMED. IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR ANY DIRECT, + INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES + (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR + SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) + HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, + STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING + IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE + POSSIBILITY OF SUCH DAMAGE. + + 8. Some of the cuBLAS library routines were derived from + code developed by the University of Tennessee and are + subject to the Modified Berkeley Software Distribution + License as follows: + + Copyright (c) 2010 The University of Tennessee. + + All rights reserved. + + Redistribution and use in source and binary forms, with or without + modification, are permitted provided that the following conditions are + met: + * Redistributions of source code must retain the above copyright + notice, this list of conditions and the following disclaimer. + * Redistributions in binary form must reproduce the above + copyright notice, this list of conditions and the following + disclaimer listed in this license in the documentation and/or + other materials provided with the distribution. + * Neither the name of the copyright holders nor the names of its + contributors may be used to endorse or promote products derived + from this software without specific prior written permission. + + THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS + "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT + LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR + A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT + OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, + SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT + LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, + DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY + THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT + (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE + OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. + + 9. Some of the cuBLAS library routines were written by or + derived from code written by Jonathan Hogg and are subject + to the Modified Berkeley Software Distribution License as + follows: + + Copyright (c) 2012, The Science and Technology Facilities Council (STFC). + + All rights reserved. + + Redistribution and use in source and binary forms, with or without + modification, are permitted provided that the following conditions are + met: + * Redistributions of source code must retain the above copyright + notice, this list of conditions and the following disclaimer. + * Redistributions in binary form must reproduce the above + copyright notice, this list of conditions and the following + disclaimer in the documentation and/or other materials provided + with the distribution. + * Neither the name of the STFC nor the names of its contributors + may be used to endorse or promote products derived from this + software without specific prior written permission. + + THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS + "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT + LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR + A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE STFC BE + LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR + CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF + SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR + BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, + WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE + OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN + IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. + + 10. Some of the cuBLAS library routines were written by or + derived from code written by Ahmad M. Abdelfattah, David + Keyes, and Hatem Ltaief, and are subject to the Apache + License, Version 2.0, as follows: + + -- (C) Copyright 2013 King Abdullah University of Science and Technology + Authors: + Ahmad Abdelfattah (ahmad.ahmad@kaust.edu.sa) + David Keyes (david.keyes@kaust.edu.sa) + Hatem Ltaief (hatem.ltaief@kaust.edu.sa) + + Redistribution and use in source and binary forms, with or without + modification, are permitted provided that the following conditions + are met: + + * Redistributions of source code must retain the above copyright + notice, this list of conditions and the following disclaimer. + * Redistributions in binary form must reproduce the above copyright + notice, this list of conditions and the following disclaimer in the + documentation and/or other materials provided with the distribution. + * Neither the name of the King Abdullah University of Science and + Technology nor the names of its contributors may be used to endorse + or promote products derived from this software without specific prior + written permission. + + THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS + ``AS IS'' AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT + LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR + A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT + HOLDERS OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, + SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT + LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, + DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY + THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT + (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE + OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE + + 11. Some of the cuSPARSE library routines were written by or + derived from code written by Li-Wen Chang and are subject + to the NCSA Open Source License as follows: + + Copyright (c) 2012, University of Illinois. + + All rights reserved. + + Developed by: IMPACT Group, University of Illinois, http://impact.crhc.illinois.edu + + Permission is hereby granted, free of charge, to any person obtaining + a copy of this software and associated documentation files (the + "Software"), to deal with the Software without restriction, including + without limitation the rights to use, copy, modify, merge, publish, + distribute, sublicense, and/or sell copies of the Software, and to + permit persons to whom the Software is furnished to do so, subject to + the following conditions: + * Redistributions of source code must retain the above copyright + notice, this list of conditions and the following disclaimer. + * Redistributions in binary form must reproduce the above + copyright notice, this list of conditions and the following + disclaimers in the documentation and/or other materials provided + with the distribution. + * Neither the names of IMPACT Group, University of Illinois, nor + the names of its contributors may be used to endorse or promote + products derived from this Software without specific prior + written permission. + + THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, + EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF + MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND + NONINFRINGEMENT. IN NO EVENT SHALL THE CONTRIBUTORS OR COPYRIGHT + HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER + IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR + IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS WITH THE + SOFTWARE. + + 12. Some of the cuRAND library routines were written by or + derived from code written by Mutsuo Saito and Makoto + Matsumoto and are subject to the following license: + + Copyright (c) 2009, 2010 Mutsuo Saito, Makoto Matsumoto and Hiroshima + University. All rights reserved. + + Copyright (c) 2011 Mutsuo Saito, Makoto Matsumoto, Hiroshima + University and University of Tokyo. All rights reserved. + + Redistribution and use in source and binary forms, with or without + modification, are permitted provided that the following conditions are + met: + * Redistributions of source code must retain the above copyright + notice, this list of conditions and the following disclaimer. + * Redistributions in binary form must reproduce the above + copyright notice, this list of conditions and the following + disclaimer in the documentation and/or other materials provided + with the distribution. + * Neither the name of the Hiroshima University nor the names of + its contributors may be used to endorse or promote products + derived from this software without specific prior written + permission. + + THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS + "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT + LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR + A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT + OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, + SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT + LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, + DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY + THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT + (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE + OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. + + 13. Some of the cuRAND library routines were derived from + code developed by D. E. Shaw Research and are subject to + the following license: + + Copyright 2010-2011, D. E. Shaw Research. + + All rights reserved. + + Redistribution and use in source and binary forms, with or without + modification, are permitted provided that the following conditions are + met: + * Redistributions of source code must retain the above copyright + notice, this list of conditions, and the following disclaimer. + * Redistributions in binary form must reproduce the above + copyright notice, this list of conditions, and the following + disclaimer in the documentation and/or other materials provided + with the distribution. + * Neither the name of D. E. Shaw Research nor the names of its + contributors may be used to endorse or promote products derived + from this software without specific prior written permission. + + THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS + "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT + LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR + A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT + OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, + SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT + LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, + DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY + THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT + (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE + OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. + + 14. Some of the Math library routines were written by or + derived from code developed by Norbert Juffa and are + subject to the following license: + + Copyright (c) 2015-2017, Norbert Juffa + All rights reserved. + + Redistribution and use in source and binary forms, with or without + modification, are permitted provided that the following conditions + are met: + + 1. Redistributions of source code must retain the above copyright + notice, this list of conditions and the following disclaimer. + + 2. Redistributions in binary form must reproduce the above copyright + notice, this list of conditions and the following disclaimer in the + documentation and/or other materials provided with the distribution. + + THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS + "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT + LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR + A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT + HOLDER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, + SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT + LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, + DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY + THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT + (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE + OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. + + 15. Licensee's use of the lz4 third party component is + subject to the following terms and conditions: + + Copyright (C) 2011-2013, Yann Collet. + BSD 2-Clause License (http://www.opensource.org/licenses/bsd-license.php) + + Redistribution and use in source and binary forms, with or without + modification, are permitted provided that the following conditions are + met: + + * Redistributions of source code must retain the above copyright + notice, this list of conditions and the following disclaimer. + * Redistributions in binary form must reproduce the above + copyright notice, this list of conditions and the following disclaimer + in the documentation and/or other materials provided with the + distribution. + + THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS + "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT + LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR + A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT + OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, + SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT + LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, + DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY + THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT + (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE + OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. + + 16. The NPP library uses code from the Boost Math Toolkit, + and is subject to the following license: + + Boost Software License - Version 1.0 - August 17th, 2003 + . . . . + + Permission is hereby granted, free of charge, to any person or + organization obtaining a copy of the software and accompanying + documentation covered by this license (the "Software") to use, + reproduce, display, distribute, execute, and transmit the Software, + and to prepare derivative works of the Software, and to permit + third-parties to whom the Software is furnished to do so, all + subject to the following: + + The copyright notices in the Software and this entire statement, + including the above license grant, this restriction and the following + disclaimer, must be included in all copies of the Software, in whole + or in part, and all derivative works of the Software, unless such + copies or derivative works are solely in the form of machine-executable + object code generated by a source language processor. + + THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, + EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF + MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE, TITLE AND + NON-INFRINGEMENT. IN NO EVENT SHALL THE COPYRIGHT HOLDERS OR + ANYONE DISTRIBUTING THE SOFTWARE BE LIABLE FOR ANY DAMAGES OR + OTHER LIABILITY, WHETHER IN CONTRACT, TORT OR OTHERWISE, ARISING + FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR + OTHER DEALINGS IN THE SOFTWARE. + + 17. Portions of the Nsight Eclipse Edition is subject to the + following license: + + The Eclipse Foundation makes available all content in this plug-in + ("Content"). Unless otherwise indicated below, the Content is provided + to you under the terms and conditions of the Eclipse Public License + Version 1.0 ("EPL"). A copy of the EPL is available at http:// + www.eclipse.org/legal/epl-v10.html. For purposes of the EPL, "Program" + will mean the Content. + + If you did not receive this Content directly from the Eclipse + Foundation, the Content is being redistributed by another party + ("Redistributor") and different terms and conditions may apply to your + use of any object code in the Content. Check the Redistributor's + license that was provided with the Content. If no such license exists, + contact the Redistributor. Unless otherwise indicated below, the terms + and conditions of the EPL still apply to any source code in the + Content and such source code may be obtained at http://www.eclipse.org. + + 18. Some of the cuBLAS library routines uses code from + OpenAI, which is subject to the following license: + + License URL + https://github.com/openai/openai-gemm/blob/master/LICENSE + + License Text + The MIT License + + Copyright (c) 2016 OpenAI (http://openai.com), 2016 Google Inc. + + Permission is hereby granted, free of charge, to any person obtaining a copy + of this software and associated documentation files (the "Software"), to deal + in the Software without restriction, including without limitation the rights + to use, copy, modify, merge, publish, distribute, sublicense, and/or sell + copies of the Software, and to permit persons to whom the Software is + furnished to do so, subject to the following conditions: + + The above copyright notice and this permission notice shall be included in + all copies or substantial portions of the Software. + + THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR + IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, + FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE + AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER + LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, + OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN + THE SOFTWARE. + + 19. Licensee's use of the Visual Studio Setup Configuration + Samples is subject to the following license: + + The MIT License (MIT) + Copyright (C) Microsoft Corporation. All rights reserved. + + Permission is hereby granted, free of charge, to any person + obtaining a copy of this software and associated documentation + files (the "Software"), to deal in the Software without restriction, + including without limitation the rights to use, copy, modify, merge, + publish, distribute, sublicense, and/or sell copies of the Software, + and to permit persons to whom the Software is furnished to do so, + subject to the following conditions: + + The above copyright notice and this permission notice shall be included + in all copies or substantial portions of the Software. + + THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS + OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, + FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE + AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER + LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, + OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. + + 20. Licensee's use of linmath.h header for CPU functions for + GL vector/matrix operations from lunarG is subject to the + Apache License Version 2.0. + + 21. The DX12-CUDA sample uses the d3dx12.h header, which is + subject to the MIT license . + +----------------- diff --git a/vlmpy310/lib/python3.10/site-packages/nvidia_cufft_cu11-10.9.0.58.dist-info/METADATA b/vlmpy310/lib/python3.10/site-packages/nvidia_cufft_cu11-10.9.0.58.dist-info/METADATA new file mode 100644 index 0000000000000000000000000000000000000000..f7d94c14d1f7cc38b65cc0866bf3d5f8f52fe243 --- /dev/null +++ b/vlmpy310/lib/python3.10/site-packages/nvidia_cufft_cu11-10.9.0.58.dist-info/METADATA @@ -0,0 +1,35 @@ +Metadata-Version: 2.1 +Name: nvidia-cufft-cu11 +Version: 10.9.0.58 +Summary: CUFFT native runtime libraries +Home-page: https://developer.nvidia.com/cuda-zone +Author: Nvidia CUDA Installer Team +Author-email: cuda_installer@nvidia.com +License: NVIDIA Proprietary Software +Keywords: cuda,nvidia,runtime,machine learning,deep learning +Classifier: Development Status :: 4 - Beta +Classifier: Intended Audience :: Developers +Classifier: Intended Audience :: Education +Classifier: Intended Audience :: Science/Research +Classifier: License :: Other/Proprietary License +Classifier: Natural Language :: English +Classifier: Programming Language :: Python :: 3 +Classifier: Programming Language :: Python :: 3.5 +Classifier: Programming Language :: Python :: 3.6 +Classifier: Programming Language :: Python :: 3.7 +Classifier: Programming Language :: Python :: 3.8 +Classifier: Programming Language :: Python :: 3.9 +Classifier: Programming Language :: Python :: 3.10 +Classifier: Programming Language :: Python :: 3.11 +Classifier: Programming Language :: Python :: 3 :: Only +Classifier: Topic :: Scientific/Engineering +Classifier: Topic :: Scientific/Engineering :: Mathematics +Classifier: Topic :: Scientific/Engineering :: Artificial Intelligence +Classifier: Topic :: Software Development +Classifier: Topic :: Software Development :: Libraries +Classifier: Operating System :: Microsoft :: Windows +Classifier: Operating System :: POSIX :: Linux +Requires-Python: >=3 +License-File: License.txt + +CUFFT native runtime libraries diff --git a/vlmpy310/lib/python3.10/site-packages/nvidia_cufft_cu11-10.9.0.58.dist-info/RECORD b/vlmpy310/lib/python3.10/site-packages/nvidia_cufft_cu11-10.9.0.58.dist-info/RECORD new file mode 100644 index 0000000000000000000000000000000000000000..0284ca51ff72d178d7e37c670ed7bfae587f08e6 --- /dev/null +++ b/vlmpy310/lib/python3.10/site-packages/nvidia_cufft_cu11-10.9.0.58.dist-info/RECORD @@ -0,0 +1,20 @@ +nvidia/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0 +nvidia/__pycache__/__init__.cpython-310.pyc,, +nvidia/cufft/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0 +nvidia/cufft/__pycache__/__init__.cpython-310.pyc,, +nvidia/cufft/include/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0 +nvidia/cufft/include/__pycache__/__init__.cpython-310.pyc,, +nvidia/cufft/include/cudalibxt.h,sha256=9GDuRiOzJuO61zRDhIpWpF7XHp8FXSOIlHJNoIMwOZQ,4105 +nvidia/cufft/include/cufft.h,sha256=Ui7ajKuYZcP-2bm9mpH96YN1igLKeDLgrttyc4jMQJE,12570 +nvidia/cufft/include/cufftXt.h,sha256=LfRdibvAlaNQ35vYqI4n8YcMpPYROrIjpZu2L0tISi4,11463 +nvidia/cufft/include/cufftw.h,sha256=DBrJQf-dnCWD-OYgdhnEzn8OiAX0U3xdteEaNdhs7mU,19412 +nvidia/cufft/lib/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0 +nvidia/cufft/lib/__pycache__/__init__.cpython-310.pyc,, +nvidia/cufft/lib/libcufft.so.10,sha256=ylxvgdWEkGtNMrmErYcE3WW_db2rQzTtIs5-73UBqVo,279161544 +nvidia/cufft/lib/libcufftw.so.10,sha256=GlkqWy81mpB3VQ7h_a3VjrLPnMC_q4_jl6N0-5SdoUM,1618440 +nvidia_cufft_cu11-10.9.0.58.dist-info/INSTALLER,sha256=zuuue4knoyJ-UwPPXg8fezS7VCrXJQrAP7zeNuwvFQg,4 +nvidia_cufft_cu11-10.9.0.58.dist-info/License.txt,sha256=rW9YU_ugyg0VnQ9Y1JrkmDDC-Mk_epJki5zpCttMbM0,59262 +nvidia_cufft_cu11-10.9.0.58.dist-info/METADATA,sha256=XITT6bPOjdOxPQa-kAVw4XjFf4_iU-JoLUXrOwPJ4JA,1503 +nvidia_cufft_cu11-10.9.0.58.dist-info/RECORD,, +nvidia_cufft_cu11-10.9.0.58.dist-info/WHEEL,sha256=-kQi_VMfvRQozZJT7HUPMfY-5vLo0LVTmAylNJ3Ft98,106 +nvidia_cufft_cu11-10.9.0.58.dist-info/top_level.txt,sha256=fTkAtiFuL16nUrB9ytDDtpytz2t0B4NvYTnRzwAhO14,7 diff --git a/vlmpy310/lib/python3.10/site-packages/nvidia_cufft_cu11-10.9.0.58.dist-info/WHEEL b/vlmpy310/lib/python3.10/site-packages/nvidia_cufft_cu11-10.9.0.58.dist-info/WHEEL new file mode 100644 index 0000000000000000000000000000000000000000..06e355fe0e3ed7077903f119ae6928a17da8eb6f --- /dev/null +++ b/vlmpy310/lib/python3.10/site-packages/nvidia_cufft_cu11-10.9.0.58.dist-info/WHEEL @@ -0,0 +1,5 @@ +Wheel-Version: 1.0 +Generator: bdist_wheel (0.37.1) +Root-Is-Purelib: true +Tag: py3-none-manylinux1_x86_64 + diff --git a/vlmpy310/lib/python3.10/site-packages/nvidia_cufft_cu11-10.9.0.58.dist-info/top_level.txt b/vlmpy310/lib/python3.10/site-packages/nvidia_cufft_cu11-10.9.0.58.dist-info/top_level.txt new file mode 100644 index 0000000000000000000000000000000000000000..862f7abf232cdfbb928609856247292e81c9decb --- /dev/null +++ b/vlmpy310/lib/python3.10/site-packages/nvidia_cufft_cu11-10.9.0.58.dist-info/top_level.txt @@ -0,0 +1 @@ +nvidia diff --git a/vlmpy310/lib/python3.10/site-packages/openpyxl-3.1.5.dist-info/INSTALLER b/vlmpy310/lib/python3.10/site-packages/openpyxl-3.1.5.dist-info/INSTALLER new file mode 100644 index 0000000000000000000000000000000000000000..a1b589e38a32041e49332e5e81c2d363dc418d68 --- /dev/null +++ b/vlmpy310/lib/python3.10/site-packages/openpyxl-3.1.5.dist-info/INSTALLER @@ -0,0 +1 @@ +pip diff --git a/vlmpy310/lib/python3.10/site-packages/openpyxl-3.1.5.dist-info/LICENCE.rst b/vlmpy310/lib/python3.10/site-packages/openpyxl-3.1.5.dist-info/LICENCE.rst new file mode 100644 index 0000000000000000000000000000000000000000..82213c597d9f13e1ff95a500b6f9061cee200ca7 --- /dev/null +++ b/vlmpy310/lib/python3.10/site-packages/openpyxl-3.1.5.dist-info/LICENCE.rst @@ -0,0 +1,23 @@ +This software is under the MIT Licence +====================================== + +Copyright (c) 2010 openpyxl + +Permission is hereby granted, free of charge, to any person obtaining a +copy of this software and associated documentation files (the +"Software"), to deal in the Software without restriction, including +without limitation the rights to use, copy, modify, merge, publish, +distribute, sublicense, and/or sell copies of the Software, and to +permit persons to whom the Software is furnished to do so, subject to +the following conditions: + +The above copyright notice and this permission notice shall be included +in all copies or substantial portions of the Software. + +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS +OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF +MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. +IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY +CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, +TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE +SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. diff --git a/vlmpy310/lib/python3.10/site-packages/openpyxl-3.1.5.dist-info/METADATA b/vlmpy310/lib/python3.10/site-packages/openpyxl-3.1.5.dist-info/METADATA new file mode 100644 index 0000000000000000000000000000000000000000..bac5c461eec53105b93996ac609f1a8b3025bcdb --- /dev/null +++ b/vlmpy310/lib/python3.10/site-packages/openpyxl-3.1.5.dist-info/METADATA @@ -0,0 +1,86 @@ +Metadata-Version: 2.1 +Name: openpyxl +Version: 3.1.5 +Summary: A Python library to read/write Excel 2010 xlsx/xlsm files +Home-page: https://openpyxl.readthedocs.io +Author: See AUTHORS +Author-email: charlie.clark@clark-consulting.eu +License: MIT +Project-URL: Documentation, https://openpyxl.readthedocs.io/en/stable/ +Project-URL: Source, https://foss.heptapod.net/openpyxl/openpyxl +Project-URL: Tracker, https://foss.heptapod.net/openpyxl/openpyxl/-/issues +Classifier: Development Status :: 5 - Production/Stable +Classifier: Operating System :: MacOS :: MacOS X +Classifier: Operating System :: Microsoft :: Windows +Classifier: Operating System :: POSIX +Classifier: License :: OSI Approved :: MIT License +Classifier: Programming Language :: Python +Classifier: Programming Language :: Python :: 3.6 +Classifier: Programming Language :: Python :: 3.7 +Classifier: Programming Language :: Python :: 3.8 +Classifier: Programming Language :: Python :: 3.9 +Classifier: Programming Language :: Python :: 3.10 +Classifier: Programming Language :: Python :: 3.11 +Requires-Python: >=3.8 +License-File: LICENCE.rst +Requires-Dist: et-xmlfile + +.. image:: https://coveralls.io/repos/bitbucket/openpyxl/openpyxl/badge.svg?branch=default + :target: https://coveralls.io/bitbucket/openpyxl/openpyxl?branch=default + :alt: coverage status + +Introduction +------------ + +openpyxl is a Python library to read/write Excel 2010 xlsx/xlsm/xltx/xltm files. + +It was born from lack of existing library to read/write natively from Python +the Office Open XML format. + +All kudos to the PHPExcel team as openpyxl was initially based on PHPExcel. + + +Security +-------- + +By default openpyxl does not guard against quadratic blowup or billion laughs +xml attacks. To guard against these attacks install defusedxml. + +Mailing List +------------ + +The user list can be found on http://groups.google.com/group/openpyxl-users + + +Sample code:: + + from openpyxl import Workbook + wb = Workbook() + + # grab the active worksheet + ws = wb.active + + # Data can be assigned directly to cells + ws['A1'] = 42 + + # Rows can also be appended + ws.append([1, 2, 3]) + + # Python types will automatically be converted + import datetime + ws['A2'] = datetime.datetime.now() + + # Save the file + wb.save("sample.xlsx") + + +Documentation +------------- + +The documentation is at: https://openpyxl.readthedocs.io + +* installation methods +* code examples +* instructions for contributing + +Release notes: https://openpyxl.readthedocs.io/en/stable/changes.html diff --git a/vlmpy310/lib/python3.10/site-packages/openpyxl-3.1.5.dist-info/WHEEL b/vlmpy310/lib/python3.10/site-packages/openpyxl-3.1.5.dist-info/WHEEL new file mode 100644 index 0000000000000000000000000000000000000000..832be111324a83de65a3a27be4dcbdee7f5a6692 --- /dev/null +++ b/vlmpy310/lib/python3.10/site-packages/openpyxl-3.1.5.dist-info/WHEEL @@ -0,0 +1,6 @@ +Wheel-Version: 1.0 +Generator: bdist_wheel (0.43.0) +Root-Is-Purelib: true +Tag: py2-none-any +Tag: py3-none-any + diff --git a/vlmpy310/lib/python3.10/site-packages/regex/__init__.py b/vlmpy310/lib/python3.10/site-packages/regex/__init__.py new file mode 100644 index 0000000000000000000000000000000000000000..eb06564ab033a2b0b501f7f41efb169dacd1f801 --- /dev/null +++ b/vlmpy310/lib/python3.10/site-packages/regex/__init__.py @@ -0,0 +1,3 @@ +from .regex import * +from . import regex +__all__ = regex.__all__ diff --git a/vlmpy310/lib/python3.10/site-packages/regex/__pycache__/__init__.cpython-310.pyc b/vlmpy310/lib/python3.10/site-packages/regex/__pycache__/__init__.cpython-310.pyc new file mode 100644 index 0000000000000000000000000000000000000000..41ddaa5b4405fdd54c24bc68b3c3ed653179e51f Binary files /dev/null and b/vlmpy310/lib/python3.10/site-packages/regex/__pycache__/__init__.cpython-310.pyc differ diff --git a/vlmpy310/lib/python3.10/site-packages/regex/__pycache__/regex.cpython-310.pyc b/vlmpy310/lib/python3.10/site-packages/regex/__pycache__/regex.cpython-310.pyc new file mode 100644 index 0000000000000000000000000000000000000000..a459bf72eb625e73fe0528be0e1a8140789c0b57 Binary files /dev/null and b/vlmpy310/lib/python3.10/site-packages/regex/__pycache__/regex.cpython-310.pyc differ diff --git a/vlmpy310/lib/python3.10/site-packages/regex/_regex_core.py b/vlmpy310/lib/python3.10/site-packages/regex/_regex_core.py new file mode 100644 index 0000000000000000000000000000000000000000..b2ffeaea1f73ce386a7c23d29d34d97a6378fee6 --- /dev/null +++ b/vlmpy310/lib/python3.10/site-packages/regex/_regex_core.py @@ -0,0 +1,4495 @@ +# +# Secret Labs' Regular Expression Engine core module +# +# Copyright (c) 1998-2001 by Secret Labs AB. All rights reserved. +# +# This version of the SRE library can be redistributed under CNRI's +# Python 1.6 license. For any other use, please contact Secret Labs +# AB (info@pythonware.com). +# +# Portions of this engine have been developed in cooperation with +# CNRI. Hewlett-Packard provided funding for 1.6 integration and +# other compatibility work. +# +# 2010-01-16 mrab Python front-end re-written and extended + +import enum +import string +import unicodedata +from collections import defaultdict + +import regex._regex as _regex + +__all__ = ["A", "ASCII", "B", "BESTMATCH", "D", "DEBUG", "E", "ENHANCEMATCH", + "F", "FULLCASE", "I", "IGNORECASE", "L", "LOCALE", "M", "MULTILINE", "P", + "POSIX", "R", "REVERSE", "S", "DOTALL", "T", "TEMPLATE", "U", "UNICODE", + "V0", "VERSION0", "V1", "VERSION1", "W", "WORD", "X", "VERBOSE", "error", + "Scanner", "RegexFlag"] + +# The regex exception. +class error(Exception): + """Exception raised for invalid regular expressions. + + Attributes: + + msg: The unformatted error message + pattern: The regular expression pattern + pos: The position in the pattern where compilation failed, or None + lineno: The line number where compilation failed, unless pos is None + colno: The column number where compilation failed, unless pos is None + """ + + def __init__(self, message, pattern=None, pos=None): + newline = '\n' if isinstance(pattern, str) else b'\n' + self.msg = message + self.pattern = pattern + self.pos = pos + if pattern is not None and pos is not None: + self.lineno = pattern.count(newline, 0, pos) + 1 + self.colno = pos - pattern.rfind(newline, 0, pos) + + message = "{} at position {}".format(message, pos) + + if newline in pattern: + message += " (line {}, column {})".format(self.lineno, + self.colno) + + Exception.__init__(self, message) + +# The exception for when a positional flag has been turned on in the old +# behaviour. +class _UnscopedFlagSet(Exception): + pass + +# The exception for when parsing fails and we want to try something else. +class ParseError(Exception): + pass + +# The exception for when there isn't a valid first set. +class _FirstSetError(Exception): + pass + +# Flags. +class RegexFlag(enum.IntFlag): + A = ASCII = 0x80 # Assume ASCII locale. + B = BESTMATCH = 0x1000 # Best fuzzy match. + D = DEBUG = 0x200 # Print parsed pattern. + E = ENHANCEMATCH = 0x8000 # Attempt to improve the fit after finding the first + # fuzzy match. + F = FULLCASE = 0x4000 # Unicode full case-folding. + I = IGNORECASE = 0x2 # Ignore case. + L = LOCALE = 0x4 # Assume current 8-bit locale. + M = MULTILINE = 0x8 # Make anchors look for newline. + P = POSIX = 0x10000 # POSIX-style matching (leftmost longest). + R = REVERSE = 0x400 # Search backwards. + S = DOTALL = 0x10 # Make dot match newline. + U = UNICODE = 0x20 # Assume Unicode locale. + V0 = VERSION0 = 0x2000 # Old legacy behaviour. + V1 = VERSION1 = 0x100 # New enhanced behaviour. + W = WORD = 0x800 # Default Unicode word breaks. + X = VERBOSE = 0x40 # Ignore whitespace and comments. + T = TEMPLATE = 0x1 # Template (present because re module has it). + + def __repr__(self): + if self._name_ is not None: + return 'regex.%s' % self._name_ + + value = self._value_ + members = [] + negative = value < 0 + + if negative: + value = ~value + + for m in self.__class__: + if value & m._value_: + value &= ~m._value_ + members.append('regex.%s' % m._name_) + + if value: + members.append(hex(value)) + + res = '|'.join(members) + + if negative: + if len(members) > 1: + res = '~(%s)' % res + else: + res = '~%s' % res + + return res + + __str__ = object.__str__ + +globals().update(RegexFlag.__members__) + +DEFAULT_VERSION = VERSION1 + +_ALL_VERSIONS = VERSION0 | VERSION1 +_ALL_ENCODINGS = ASCII | LOCALE | UNICODE + +# The default flags for the various versions. +DEFAULT_FLAGS = {VERSION0: 0, VERSION1: FULLCASE} + +# The mask for the flags. +GLOBAL_FLAGS = (_ALL_VERSIONS | BESTMATCH | DEBUG | ENHANCEMATCH | POSIX | + REVERSE) +SCOPED_FLAGS = (FULLCASE | IGNORECASE | MULTILINE | DOTALL | WORD | VERBOSE | + _ALL_ENCODINGS) + +ALPHA = frozenset(string.ascii_letters) +DIGITS = frozenset(string.digits) +ALNUM = ALPHA | DIGITS +OCT_DIGITS = frozenset(string.octdigits) +HEX_DIGITS = frozenset(string.hexdigits) +SPECIAL_CHARS = frozenset("()|?*+{^$.[\\#") | frozenset([""]) +NAMED_CHAR_PART = ALNUM | frozenset(" -") +PROPERTY_NAME_PART = ALNUM | frozenset(" &_-.") +SET_OPS = ("||", "~~", "&&", "--") + +# The width of the code words inside the regex engine. +BYTES_PER_CODE = _regex.get_code_size() +BITS_PER_CODE = BYTES_PER_CODE * 8 + +# The repeat count which represents infinity. +UNLIMITED = (1 << BITS_PER_CODE) - 1 + +# The regular expression flags. +REGEX_FLAGS = {"a": ASCII, "b": BESTMATCH, "e": ENHANCEMATCH, "f": FULLCASE, + "i": IGNORECASE, "L": LOCALE, "m": MULTILINE, "p": POSIX, "r": REVERSE, + "s": DOTALL, "u": UNICODE, "V0": VERSION0, "V1": VERSION1, "w": WORD, "x": + VERBOSE} + +# The case flags. +CASE_FLAGS = FULLCASE | IGNORECASE +NOCASE = 0 +FULLIGNORECASE = FULLCASE | IGNORECASE + +FULL_CASE_FOLDING = UNICODE | FULLIGNORECASE + +CASE_FLAGS_COMBINATIONS = {0: 0, FULLCASE: 0, IGNORECASE: IGNORECASE, + FULLIGNORECASE: FULLIGNORECASE} + +# The number of digits in hexadecimal escapes. +HEX_ESCAPES = {"x": 2, "u": 4, "U": 8} + +# The names of the opcodes. +OPCODES = """ +FAILURE +SUCCESS +ANY +ANY_ALL +ANY_ALL_REV +ANY_REV +ANY_U +ANY_U_REV +ATOMIC +BOUNDARY +BRANCH +CALL_REF +CHARACTER +CHARACTER_IGN +CHARACTER_IGN_REV +CHARACTER_REV +CONDITIONAL +DEFAULT_BOUNDARY +DEFAULT_END_OF_WORD +DEFAULT_START_OF_WORD +END +END_OF_LINE +END_OF_LINE_U +END_OF_STRING +END_OF_STRING_LINE +END_OF_STRING_LINE_U +END_OF_WORD +FUZZY +GRAPHEME_BOUNDARY +GREEDY_REPEAT +GROUP +GROUP_CALL +GROUP_EXISTS +KEEP +LAZY_REPEAT +LOOKAROUND +NEXT +PROPERTY +PROPERTY_IGN +PROPERTY_IGN_REV +PROPERTY_REV +PRUNE +RANGE +RANGE_IGN +RANGE_IGN_REV +RANGE_REV +REF_GROUP +REF_GROUP_FLD +REF_GROUP_FLD_REV +REF_GROUP_IGN +REF_GROUP_IGN_REV +REF_GROUP_REV +SEARCH_ANCHOR +SET_DIFF +SET_DIFF_IGN +SET_DIFF_IGN_REV +SET_DIFF_REV +SET_INTER +SET_INTER_IGN +SET_INTER_IGN_REV +SET_INTER_REV +SET_SYM_DIFF +SET_SYM_DIFF_IGN +SET_SYM_DIFF_IGN_REV +SET_SYM_DIFF_REV +SET_UNION +SET_UNION_IGN +SET_UNION_IGN_REV +SET_UNION_REV +SKIP +START_OF_LINE +START_OF_LINE_U +START_OF_STRING +START_OF_WORD +STRING +STRING_FLD +STRING_FLD_REV +STRING_IGN +STRING_IGN_REV +STRING_REV +FUZZY_EXT +""" + +# Define the opcodes in a namespace. +class Namespace: + pass + +OP = Namespace() +for i, op in enumerate(OPCODES.split()): + setattr(OP, op, i) + +def _shrink_cache(cache_dict, args_dict, locale_sensitive, max_length, divisor=5): + """Make room in the given cache. + + Args: + cache_dict: The cache dictionary to modify. + args_dict: The dictionary of named list args used by patterns. + max_length: Maximum # of entries in cache_dict before it is shrunk. + divisor: Cache will shrink to max_length - 1/divisor*max_length items. + """ + # Toss out a fraction of the entries at random to make room for new ones. + # A random algorithm was chosen as opposed to simply cache_dict.popitem() + # as popitem could penalize the same regular expression repeatedly based + # on its internal hash value. Being random should spread the cache miss + # love around. + cache_keys = tuple(cache_dict.keys()) + overage = len(cache_keys) - max_length + if overage < 0: + # Cache is already within limits. Normally this should not happen + # but it could due to multithreading. + return + + number_to_toss = max_length // divisor + overage + + # The import is done here to avoid a circular dependency. + import random + if not hasattr(random, 'sample'): + # Do nothing while resolving the circular dependency: + # re->random->warnings->tokenize->string->re + return + + for doomed_key in random.sample(cache_keys, number_to_toss): + try: + del cache_dict[doomed_key] + except KeyError: + # Ignore problems if the cache changed from another thread. + pass + + # Rebuild the arguments and locale-sensitivity dictionaries. + args_dict.clear() + sensitivity_dict = {} + for pattern, pattern_type, flags, args, default_version, locale in tuple(cache_dict): + args_dict[pattern, pattern_type, flags, default_version, locale] = args + try: + sensitivity_dict[pattern_type, pattern] = locale_sensitive[pattern_type, pattern] + except KeyError: + pass + + locale_sensitive.clear() + locale_sensitive.update(sensitivity_dict) + +def _fold_case(info, string): + "Folds the case of a string." + flags = info.flags + if (flags & _ALL_ENCODINGS) == 0: + flags |= info.guess_encoding + + return _regex.fold_case(flags, string) + +def is_cased_i(info, char): + "Checks whether a character is cased." + return len(_regex.get_all_cases(info.flags, char)) > 1 + +def is_cased_f(flags, char): + "Checks whether a character is cased." + return len(_regex.get_all_cases(flags, char)) > 1 + +def _compile_firstset(info, fs): + "Compiles the firstset for the pattern." + reverse = bool(info.flags & REVERSE) + fs = _check_firstset(info, reverse, fs) + if not fs: + return [] + + # Compile the firstset. + return fs.compile(reverse) + +def _check_firstset(info, reverse, fs): + "Checks the firstset for the pattern." + if not fs or None in fs: + return None + + # If we ignore the case, for simplicity we won't build a firstset. + members = set() + case_flags = NOCASE + for i in fs: + if isinstance(i, Character) and not i.positive: + return None + +# if i.case_flags: +# if isinstance(i, Character): +# if is_cased_i(info, i.value): +# return [] +# elif isinstance(i, SetBase): +# return [] + case_flags |= i.case_flags + members.add(i.with_flags(case_flags=NOCASE)) + + if case_flags == (FULLCASE | IGNORECASE): + return None + + # Build the firstset. + fs = SetUnion(info, list(members), case_flags=case_flags & ~FULLCASE, + zerowidth=True) + fs = fs.optimise(info, reverse, in_set=True) + + return fs + +def _flatten_code(code): + "Flattens the code from a list of tuples." + flat_code = [] + for c in code: + flat_code.extend(c) + + return flat_code + +def make_case_flags(info): + "Makes the case flags." + flags = info.flags & CASE_FLAGS + + # Turn off FULLCASE if ASCII is turned on. + if info.flags & ASCII: + flags &= ~FULLCASE + + return flags + +def make_character(info, value, in_set=False): + "Makes a character literal." + if in_set: + # A character set is built case-sensitively. + return Character(value) + + return Character(value, case_flags=make_case_flags(info)) + +def make_ref_group(info, name, position): + "Makes a group reference." + return RefGroup(info, name, position, case_flags=make_case_flags(info)) + +def make_string_set(info, name): + "Makes a string set." + return StringSet(info, name, case_flags=make_case_flags(info)) + +def make_property(info, prop, in_set): + "Makes a property." + if in_set: + return prop + + return prop.with_flags(case_flags=make_case_flags(info)) + +def _parse_pattern(source, info): + "Parses a pattern, eg. 'a|b|c'." + branches = [parse_sequence(source, info)] + while source.match("|"): + branches.append(parse_sequence(source, info)) + + if len(branches) == 1: + return branches[0] + return Branch(branches) + +def parse_sequence(source, info): + "Parses a sequence, eg. 'abc'." + sequence = [None] + case_flags = make_case_flags(info) + while True: + saved_pos = source.pos + ch = source.get() + if ch in SPECIAL_CHARS: + if ch in ")|": + # The end of a sequence. At the end of the pattern ch is "". + source.pos = saved_pos + break + elif ch == "\\": + # An escape sequence outside a set. + sequence.append(parse_escape(source, info, False)) + elif ch == "(": + # A parenthesised subpattern or a flag. + element = parse_paren(source, info) + if element is None: + case_flags = make_case_flags(info) + else: + sequence.append(element) + elif ch == ".": + # Any character. + if info.flags & DOTALL: + sequence.append(AnyAll()) + elif info.flags & WORD: + sequence.append(AnyU()) + else: + sequence.append(Any()) + elif ch == "[": + # A character set. + sequence.append(parse_set(source, info)) + elif ch == "^": + # The start of a line or the string. + if info.flags & MULTILINE: + if info.flags & WORD: + sequence.append(StartOfLineU()) + else: + sequence.append(StartOfLine()) + else: + sequence.append(StartOfString()) + elif ch == "$": + # The end of a line or the string. + if info.flags & MULTILINE: + if info.flags & WORD: + sequence.append(EndOfLineU()) + else: + sequence.append(EndOfLine()) + else: + if info.flags & WORD: + sequence.append(EndOfStringLineU()) + else: + sequence.append(EndOfStringLine()) + elif ch in "?*+{": + # Looks like a quantifier. + counts = parse_quantifier(source, info, ch) + if counts: + # It _is_ a quantifier. + apply_quantifier(source, info, counts, case_flags, ch, + saved_pos, sequence) + sequence.append(None) + else: + # It's not a quantifier. Maybe it's a fuzzy constraint. + constraints = parse_fuzzy(source, info, ch, case_flags) + if constraints: + # It _is_ a fuzzy constraint. + apply_constraint(source, info, constraints, case_flags, + saved_pos, sequence) + sequence.append(None) + else: + # The element was just a literal. + sequence.append(Character(ord(ch), + case_flags=case_flags)) + else: + # A literal. + sequence.append(Character(ord(ch), case_flags=case_flags)) + else: + # A literal. + sequence.append(Character(ord(ch), case_flags=case_flags)) + + sequence = [item for item in sequence if item is not None] + return Sequence(sequence) + +def apply_quantifier(source, info, counts, case_flags, ch, saved_pos, + sequence): + element = sequence.pop() + if element is None: + if sequence: + raise error("multiple repeat", source.string, saved_pos) + raise error("nothing to repeat", source.string, saved_pos) + + if isinstance(element, (GreedyRepeat, LazyRepeat, PossessiveRepeat)): + raise error("multiple repeat", source.string, saved_pos) + + min_count, max_count = counts + saved_pos = source.pos + ch = source.get() + if ch == "?": + # The "?" suffix that means it's a lazy repeat. + repeated = LazyRepeat + elif ch == "+": + # The "+" suffix that means it's a possessive repeat. + repeated = PossessiveRepeat + else: + # No suffix means that it's a greedy repeat. + source.pos = saved_pos + repeated = GreedyRepeat + + # Ignore the quantifier if it applies to a zero-width item or the number of + # repeats is fixed at 1. + if not element.is_empty() and (min_count != 1 or max_count != 1): + element = repeated(element, min_count, max_count) + + sequence.append(element) + +def apply_constraint(source, info, constraints, case_flags, saved_pos, + sequence): + element = sequence.pop() + if element is None: + raise error("nothing for fuzzy constraint", source.string, saved_pos) + + # If a group is marked as fuzzy then put all of the fuzzy part in the + # group. + if isinstance(element, Group): + element.subpattern = Fuzzy(element.subpattern, constraints) + sequence.append(element) + else: + sequence.append(Fuzzy(element, constraints)) + +_QUANTIFIERS = {"?": (0, 1), "*": (0, None), "+": (1, None)} + +def parse_quantifier(source, info, ch): + "Parses a quantifier." + q = _QUANTIFIERS.get(ch) + if q: + # It's a quantifier. + return q + + if ch == "{": + # Looks like a limited repeated element, eg. 'a{2,3}'. + counts = parse_limited_quantifier(source) + if counts: + return counts + + return None + +def is_above_limit(count): + "Checks whether a count is above the maximum." + return count is not None and count >= UNLIMITED + +def parse_limited_quantifier(source): + "Parses a limited quantifier." + saved_pos = source.pos + min_count = parse_count(source) + if source.match(","): + max_count = parse_count(source) + + # No minimum means 0 and no maximum means unlimited. + min_count = int(min_count or 0) + max_count = int(max_count) if max_count else None + else: + if not min_count: + source.pos = saved_pos + return None + + min_count = max_count = int(min_count) + + if not source.match ("}"): + source.pos = saved_pos + return None + + if is_above_limit(min_count) or is_above_limit(max_count): + raise error("repeat count too big", source.string, saved_pos) + + if max_count is not None and min_count > max_count: + raise error("min repeat greater than max repeat", source.string, + saved_pos) + + return min_count, max_count + +def parse_fuzzy(source, info, ch, case_flags): + "Parses a fuzzy setting, if present." + saved_pos = source.pos + + if ch != "{": + return None + + constraints = {} + try: + parse_fuzzy_item(source, constraints) + while source.match(","): + parse_fuzzy_item(source, constraints) + except ParseError: + source.pos = saved_pos + return None + + if source.match(":"): + constraints["test"] = parse_fuzzy_test(source, info, case_flags) + + if not source.match("}"): + raise error("expected }", source.string, source.pos) + + return constraints + +def parse_fuzzy_item(source, constraints): + "Parses a fuzzy setting item." + saved_pos = source.pos + try: + parse_cost_constraint(source, constraints) + except ParseError: + source.pos = saved_pos + + parse_cost_equation(source, constraints) + +def parse_cost_constraint(source, constraints): + "Parses a cost constraint." + saved_pos = source.pos + ch = source.get() + if ch in ALPHA: + # Syntax: constraint [("<=" | "<") cost] + constraint = parse_constraint(source, constraints, ch) + + max_inc = parse_fuzzy_compare(source) + + if max_inc is None: + # No maximum cost. + constraints[constraint] = 0, None + else: + # There's a maximum cost. + cost_pos = source.pos + max_cost = parse_cost_limit(source) + + # Inclusive or exclusive limit? + if not max_inc: + max_cost -= 1 + + if max_cost < 0: + raise error("bad fuzzy cost limit", source.string, cost_pos) + + constraints[constraint] = 0, max_cost + elif ch in DIGITS: + # Syntax: cost ("<=" | "<") constraint ("<=" | "<") cost + source.pos = saved_pos + + # Minimum cost. + cost_pos = source.pos + min_cost = parse_cost_limit(source) + + min_inc = parse_fuzzy_compare(source) + if min_inc is None: + raise ParseError() + + constraint = parse_constraint(source, constraints, source.get()) + + max_inc = parse_fuzzy_compare(source) + if max_inc is None: + raise ParseError() + + # Maximum cost. + cost_pos = source.pos + max_cost = parse_cost_limit(source) + + # Inclusive or exclusive limits? + if not min_inc: + min_cost += 1 + if not max_inc: + max_cost -= 1 + + if not 0 <= min_cost <= max_cost: + raise error("bad fuzzy cost limit", source.string, cost_pos) + + constraints[constraint] = min_cost, max_cost + else: + raise ParseError() + +def parse_cost_limit(source): + "Parses a cost limit." + cost_pos = source.pos + digits = parse_count(source) + + try: + return int(digits) + except ValueError: + pass + + raise error("bad fuzzy cost limit", source.string, cost_pos) + +def parse_constraint(source, constraints, ch): + "Parses a constraint." + if ch not in "deis": + raise ParseError() + + if ch in constraints: + raise ParseError() + + return ch + +def parse_fuzzy_compare(source): + "Parses a cost comparator." + if source.match("<="): + return True + elif source.match("<"): + return False + else: + return None + +def parse_cost_equation(source, constraints): + "Parses a cost equation." + if "cost" in constraints: + raise error("more than one cost equation", source.string, source.pos) + + cost = {} + + parse_cost_term(source, cost) + while source.match("+"): + parse_cost_term(source, cost) + + max_inc = parse_fuzzy_compare(source) + if max_inc is None: + raise ParseError() + + max_cost = int(parse_count(source)) + + if not max_inc: + max_cost -= 1 + + if max_cost < 0: + raise error("bad fuzzy cost limit", source.string, source.pos) + + cost["max"] = max_cost + + constraints["cost"] = cost + +def parse_cost_term(source, cost): + "Parses a cost equation term." + coeff = parse_count(source) + ch = source.get() + if ch not in "dis": + raise ParseError() + + if ch in cost: + raise error("repeated fuzzy cost", source.string, source.pos) + + cost[ch] = int(coeff or 1) + +def parse_fuzzy_test(source, info, case_flags): + saved_pos = source.pos + ch = source.get() + if ch in SPECIAL_CHARS: + if ch == "\\": + # An escape sequence outside a set. + return parse_escape(source, info, False) + elif ch == ".": + # Any character. + if info.flags & DOTALL: + return AnyAll() + elif info.flags & WORD: + return AnyU() + else: + return Any() + elif ch == "[": + # A character set. + return parse_set(source, info) + else: + raise error("expected character set", source.string, saved_pos) + elif ch: + # A literal. + return Character(ord(ch), case_flags=case_flags) + else: + raise error("expected character set", source.string, saved_pos) + +def parse_count(source): + "Parses a quantifier's count, which can be empty." + return source.get_while(DIGITS) + +def parse_paren(source, info): + """Parses a parenthesised subpattern or a flag. Returns FLAGS if it's an + inline flag. + """ + saved_pos = source.pos + ch = source.get(True) + if ch == "?": + # (?... + saved_pos_2 = source.pos + ch = source.get(True) + if ch == "<": + # (?<... + saved_pos_3 = source.pos + ch = source.get() + if ch in ("=", "!"): + # (?<=... or (?") + saved_flags = info.flags + try: + subpattern = _parse_pattern(source, info) + source.expect(")") + finally: + info.flags = saved_flags + source.ignore_space = bool(info.flags & VERBOSE) + + info.close_group() + return Group(info, group, subpattern) + if ch in ("=", "!"): + # (?=... or (?!...: lookahead. + return parse_lookaround(source, info, False, ch == "=") + if ch == "P": + # (?P...: a Python extension. + return parse_extension(source, info) + if ch == "#": + # (?#...: a comment. + return parse_comment(source) + if ch == "(": + # (?(...: a conditional subpattern. + return parse_conditional(source, info) + if ch == ">": + # (?>...: an atomic subpattern. + return parse_atomic(source, info) + if ch == "|": + # (?|...: a common/reset groups branch. + return parse_common(source, info) + if ch == "R" or "0" <= ch <= "9": + # (?R...: probably a call to a group. + return parse_call_group(source, info, ch, saved_pos_2) + if ch == "&": + # (?&...: a call to a named group. + return parse_call_named_group(source, info, saved_pos_2) + + # (?...: probably a flags subpattern. + source.pos = saved_pos_2 + return parse_flags_subpattern(source, info) + + if ch == "*": + # (*... + saved_pos_2 = source.pos + word = source.get_while(set(")>"), include=False) + if word[ : 1].isalpha(): + verb = VERBS.get(word) + if not verb: + raise error("unknown verb", source.string, saved_pos_2) + + source.expect(")") + + return verb + + # (...: an unnamed capture group. + source.pos = saved_pos + group = info.open_group() + saved_flags = info.flags + try: + subpattern = _parse_pattern(source, info) + source.expect(")") + finally: + info.flags = saved_flags + source.ignore_space = bool(info.flags & VERBOSE) + + info.close_group() + + return Group(info, group, subpattern) + +def parse_extension(source, info): + "Parses a Python extension." + saved_pos = source.pos + ch = source.get() + if ch == "<": + # (?P<...: a named capture group. + name = parse_name(source) + group = info.open_group(name) + source.expect(">") + saved_flags = info.flags + try: + subpattern = _parse_pattern(source, info) + source.expect(")") + finally: + info.flags = saved_flags + source.ignore_space = bool(info.flags & VERBOSE) + + info.close_group() + + return Group(info, group, subpattern) + if ch == "=": + # (?P=...: a named group reference. + name = parse_name(source, allow_numeric=True) + source.expect(")") + if info.is_open_group(name): + raise error("cannot refer to an open group", source.string, + saved_pos) + + return make_ref_group(info, name, saved_pos) + if ch == ">" or ch == "&": + # (?P>...: a call to a group. + return parse_call_named_group(source, info, saved_pos) + + source.pos = saved_pos + raise error("unknown extension", source.string, saved_pos) + +def parse_comment(source): + "Parses a comment." + while True: + saved_pos = source.pos + c = source.get(True) + + if not c or c == ")": + break + + if c == "\\": + c = source.get(True) + + source.pos = saved_pos + source.expect(")") + + return None + +def parse_lookaround(source, info, behind, positive): + "Parses a lookaround." + saved_flags = info.flags + try: + subpattern = _parse_pattern(source, info) + source.expect(")") + finally: + info.flags = saved_flags + source.ignore_space = bool(info.flags & VERBOSE) + + return LookAround(behind, positive, subpattern) + +def parse_conditional(source, info): + "Parses a conditional subpattern." + saved_flags = info.flags + saved_pos = source.pos + ch = source.get() + if ch == "?": + # (?(?... + ch = source.get() + if ch in ("=", "!"): + # (?(?=... or (?(?!...: lookahead conditional. + return parse_lookaround_conditional(source, info, False, ch == "=") + if ch == "<": + # (?(?<... + ch = source.get() + if ch in ("=", "!"): + # (?(?<=... or (?(?"), include=False) + + if not name: + raise error("missing group name", source.string, source.pos) + + if name.isdigit(): + min_group = 0 if allow_group_0 else 1 + if not allow_numeric or int(name) < min_group: + raise error("bad character in group name", source.string, + source.pos) + else: + if not name.isidentifier(): + raise error("bad character in group name", source.string, + source.pos) + + return name + +def is_octal(string): + "Checks whether a string is octal." + return all(ch in OCT_DIGITS for ch in string) + +def is_decimal(string): + "Checks whether a string is decimal." + return all(ch in DIGITS for ch in string) + +def is_hexadecimal(string): + "Checks whether a string is hexadecimal." + return all(ch in HEX_DIGITS for ch in string) + +def parse_escape(source, info, in_set): + "Parses an escape sequence." + saved_ignore = source.ignore_space + source.ignore_space = False + ch = source.get() + source.ignore_space = saved_ignore + if not ch: + # A backslash at the end of the pattern. + raise error("bad escape (end of pattern)", source.string, source.pos) + if ch in HEX_ESCAPES: + # A hexadecimal escape sequence. + return parse_hex_escape(source, info, ch, HEX_ESCAPES[ch], in_set, ch) + elif ch == "g" and not in_set: + # A group reference. + saved_pos = source.pos + try: + return parse_group_ref(source, info) + except error: + # Invalid as a group reference, so assume it's a literal. + source.pos = saved_pos + + return make_character(info, ord(ch), in_set) + elif ch == "G" and not in_set: + # A search anchor. + return SearchAnchor() + elif ch == "L" and not in_set: + # A string set. + return parse_string_set(source, info) + elif ch == "N": + # A named codepoint. + return parse_named_char(source, info, in_set) + elif ch in "pP": + # A Unicode property, positive or negative. + return parse_property(source, info, ch == "p", in_set) + elif ch == "R" and not in_set: + # A line ending. + charset = [0x0A, 0x0B, 0x0C, 0x0D] + if info.guess_encoding == UNICODE: + charset.extend([0x85, 0x2028, 0x2029]) + + return Atomic(Branch([String([0x0D, 0x0A]), SetUnion(info, [Character(c) + for c in charset])])) + elif ch == "X" and not in_set: + # A grapheme cluster. + return Grapheme() + elif ch in ALPHA: + # An alphabetic escape sequence. + # Positional escapes aren't allowed inside a character set. + if not in_set: + if info.flags & WORD: + value = WORD_POSITION_ESCAPES.get(ch) + else: + value = POSITION_ESCAPES.get(ch) + + if value: + return value + + value = CHARSET_ESCAPES.get(ch) + if value: + return value + + value = CHARACTER_ESCAPES.get(ch) + if value: + return Character(ord(value)) + + raise error("bad escape \\%s" % ch, source.string, source.pos) + elif ch in DIGITS: + # A numeric escape sequence. + return parse_numeric_escape(source, info, ch, in_set) + else: + # A literal. + return make_character(info, ord(ch), in_set) + +def parse_numeric_escape(source, info, ch, in_set): + "Parses a numeric escape sequence." + if in_set or ch == "0": + # Octal escape sequence, max 3 digits. + return parse_octal_escape(source, info, [ch], in_set) + + # At least 1 digit, so either octal escape or group. + digits = ch + saved_pos = source.pos + ch = source.get() + if ch in DIGITS: + # At least 2 digits, so either octal escape or group. + digits += ch + saved_pos = source.pos + ch = source.get() + if is_octal(digits) and ch in OCT_DIGITS: + # 3 octal digits, so octal escape sequence. + encoding = info.flags & _ALL_ENCODINGS + if encoding == ASCII or encoding == LOCALE: + octal_mask = 0xFF + else: + octal_mask = 0x1FF + + value = int(digits + ch, 8) & octal_mask + return make_character(info, value) + + # Group reference. + source.pos = saved_pos + if info.is_open_group(digits): + raise error("cannot refer to an open group", source.string, source.pos) + + return make_ref_group(info, digits, source.pos) + +def parse_octal_escape(source, info, digits, in_set): + "Parses an octal escape sequence." + saved_pos = source.pos + ch = source.get() + while len(digits) < 3 and ch in OCT_DIGITS: + digits.append(ch) + saved_pos = source.pos + ch = source.get() + + source.pos = saved_pos + try: + value = int("".join(digits), 8) + return make_character(info, value, in_set) + except ValueError: + if digits[0] in OCT_DIGITS: + raise error("incomplete escape \\%s" % ''.join(digits), + source.string, source.pos) + else: + raise error("bad escape \\%s" % digits[0], source.string, + source.pos) + +def parse_hex_escape(source, info, esc, expected_len, in_set, type): + "Parses a hex escape sequence." + saved_pos = source.pos + digits = [] + for i in range(expected_len): + ch = source.get() + if ch not in HEX_DIGITS: + raise error("incomplete escape \\%s%s" % (type, ''.join(digits)), + source.string, saved_pos) + digits.append(ch) + + try: + value = int("".join(digits), 16) + except ValueError: + pass + else: + if value < 0x110000: + return make_character(info, value, in_set) + + # Bad hex escape. + raise error("bad hex escape \\%s%s" % (esc, ''.join(digits)), + source.string, saved_pos) + +def parse_group_ref(source, info): + "Parses a group reference." + source.expect("<") + saved_pos = source.pos + name = parse_name(source, True) + source.expect(">") + if info.is_open_group(name): + raise error("cannot refer to an open group", source.string, source.pos) + + return make_ref_group(info, name, saved_pos) + +def parse_string_set(source, info): + "Parses a string set reference." + source.expect("<") + name = parse_name(source, True) + source.expect(">") + if name is None or name not in info.kwargs: + raise error("undefined named list", source.string, source.pos) + + return make_string_set(info, name) + +def parse_named_char(source, info, in_set): + "Parses a named character." + saved_pos = source.pos + if source.match("{"): + name = source.get_while(NAMED_CHAR_PART, keep_spaces=True) + if source.match("}"): + try: + value = unicodedata.lookup(name) + return make_character(info, ord(value), in_set) + except KeyError: + raise error("undefined character name", source.string, + source.pos) + + source.pos = saved_pos + return make_character(info, ord("N"), in_set) + +def parse_property(source, info, positive, in_set): + "Parses a Unicode property." + saved_pos = source.pos + ch = source.get() + if ch == "{": + negate = source.match("^") + prop_name, name = parse_property_name(source) + if source.match("}"): + # It's correctly delimited. + prop = lookup_property(prop_name, name, positive != negate, source) + return make_property(info, prop, in_set) + elif ch and ch in "CLMNPSZ": + # An abbreviated property, eg \pL. + prop = lookup_property(None, ch, positive, source) + return make_property(info, prop, in_set) + + # Not a property, so treat as a literal "p" or "P". + source.pos = saved_pos + ch = "p" if positive else "P" + return make_character(info, ord(ch), in_set) + +def parse_property_name(source): + "Parses a property name, which may be qualified." + name = source.get_while(PROPERTY_NAME_PART) + saved_pos = source.pos + + ch = source.get() + if ch and ch in ":=": + prop_name = name + name = source.get_while(ALNUM | set(" &_-./")).strip() + + if name: + # Name after the ":" or "=", so it's a qualified name. + saved_pos = source.pos + else: + # No name after the ":" or "=", so assume it's an unqualified name. + prop_name, name = None, prop_name + else: + prop_name = None + + source.pos = saved_pos + return prop_name, name + +def parse_set(source, info): + "Parses a character set." + version = (info.flags & _ALL_VERSIONS) or DEFAULT_VERSION + + saved_ignore = source.ignore_space + source.ignore_space = False + # Negative set? + negate = source.match("^") + try: + if version == VERSION0: + item = parse_set_imp_union(source, info) + else: + item = parse_set_union(source, info) + + if not source.match("]"): + raise error("missing ]", source.string, source.pos) + finally: + source.ignore_space = saved_ignore + + if negate: + item = item.with_flags(positive=not item.positive) + + item = item.with_flags(case_flags=make_case_flags(info)) + + return item + +def parse_set_union(source, info): + "Parses a set union ([x||y])." + items = [parse_set_symm_diff(source, info)] + while source.match("||"): + items.append(parse_set_symm_diff(source, info)) + + if len(items) == 1: + return items[0] + return SetUnion(info, items) + +def parse_set_symm_diff(source, info): + "Parses a set symmetric difference ([x~~y])." + items = [parse_set_inter(source, info)] + while source.match("~~"): + items.append(parse_set_inter(source, info)) + + if len(items) == 1: + return items[0] + return SetSymDiff(info, items) + +def parse_set_inter(source, info): + "Parses a set intersection ([x&&y])." + items = [parse_set_diff(source, info)] + while source.match("&&"): + items.append(parse_set_diff(source, info)) + + if len(items) == 1: + return items[0] + return SetInter(info, items) + +def parse_set_diff(source, info): + "Parses a set difference ([x--y])." + items = [parse_set_imp_union(source, info)] + while source.match("--"): + items.append(parse_set_imp_union(source, info)) + + if len(items) == 1: + return items[0] + return SetDiff(info, items) + +def parse_set_imp_union(source, info): + "Parses a set implicit union ([xy])." + version = (info.flags & _ALL_VERSIONS) or DEFAULT_VERSION + + items = [parse_set_member(source, info)] + while True: + saved_pos = source.pos + if source.match("]"): + # End of the set. + source.pos = saved_pos + break + + if version == VERSION1 and any(source.match(op) for op in SET_OPS): + # The new behaviour has set operators. + source.pos = saved_pos + break + + items.append(parse_set_member(source, info)) + + if len(items) == 1: + return items[0] + return SetUnion(info, items) + +def parse_set_member(source, info): + "Parses a member in a character set." + # Parse a set item. + start = parse_set_item(source, info) + saved_pos1 = source.pos + if (not isinstance(start, Character) or not start.positive or not + source.match("-")): + # It's not the start of a range. + return start + + version = (info.flags & _ALL_VERSIONS) or DEFAULT_VERSION + + # It looks like the start of a range of characters. + saved_pos2 = source.pos + if version == VERSION1 and source.match("-"): + # It's actually the set difference operator '--', so return the + # character. + source.pos = saved_pos1 + return start + + if source.match("]"): + # We've reached the end of the set, so return both the character and + # hyphen. + source.pos = saved_pos2 + return SetUnion(info, [start, Character(ord("-"))]) + + # Parse a set item. + end = parse_set_item(source, info) + if not isinstance(end, Character) or not end.positive: + # It's not a range, so return the character, hyphen and property. + return SetUnion(info, [start, Character(ord("-")), end]) + + # It _is_ a range. + if start.value > end.value: + raise error("bad character range", source.string, source.pos) + + if start.value == end.value: + return start + + return Range(start.value, end.value) + +def parse_set_item(source, info): + "Parses an item in a character set." + version = (info.flags & _ALL_VERSIONS) or DEFAULT_VERSION + + if source.match("\\"): + # An escape sequence in a set. + return parse_escape(source, info, True) + + saved_pos = source.pos + if source.match("[:"): + # Looks like a POSIX character class. + try: + return parse_posix_class(source, info) + except ParseError: + # Not a POSIX character class. + source.pos = saved_pos + + if version == VERSION1 and source.match("["): + # It's the start of a nested set. + + # Negative set? + negate = source.match("^") + item = parse_set_union(source, info) + + if not source.match("]"): + raise error("missing ]", source.string, source.pos) + + if negate: + item = item.with_flags(positive=not item.positive) + + return item + + ch = source.get() + if not ch: + raise error("unterminated character set", source.string, source.pos) + + return Character(ord(ch)) + +def parse_posix_class(source, info): + "Parses a POSIX character class." + negate = source.match("^") + prop_name, name = parse_property_name(source) + if not source.match(":]"): + raise ParseError() + + return lookup_property(prop_name, name, not negate, source, posix=True) + +def float_to_rational(flt): + "Converts a float to a rational pair." + int_part = int(flt) + error = flt - int_part + if abs(error) < 0.0001: + return int_part, 1 + + den, num = float_to_rational(1.0 / error) + + return int_part * den + num, den + +def numeric_to_rational(numeric): + "Converts a numeric string to a rational string, if possible." + if numeric[ : 1] == "-": + sign, numeric = numeric[0], numeric[1 : ] + else: + sign = "" + + parts = numeric.split("/") + if len(parts) == 2: + num, den = float_to_rational(float(parts[0]) / float(parts[1])) + elif len(parts) == 1: + num, den = float_to_rational(float(parts[0])) + else: + raise ValueError() + + result = "{}{}/{}".format(sign, num, den) + if result.endswith("/1"): + return result[ : -2] + + return result + +def standardise_name(name): + "Standardises a property or value name." + try: + return numeric_to_rational("".join(name)) + except (ValueError, ZeroDivisionError): + return "".join(ch for ch in name if ch not in "_- ").upper() + +_POSIX_CLASSES = set('ALNUM DIGIT PUNCT XDIGIT'.split()) + +_BINARY_VALUES = set('YES Y NO N TRUE T FALSE F'.split()) + +def lookup_property(property, value, positive, source=None, posix=False): + "Looks up a property." + # Normalise the names (which may still be lists). + property = standardise_name(property) if property else None + value = standardise_name(value) + + if (property, value) == ("GENERALCATEGORY", "ASSIGNED"): + property, value, positive = "GENERALCATEGORY", "UNASSIGNED", not positive + + if posix and not property and value.upper() in _POSIX_CLASSES: + value = 'POSIX' + value + + if property: + # Both the property and the value are provided. + prop = PROPERTIES.get(property) + if not prop: + if not source: + raise error("unknown property") + + raise error("unknown property", source.string, source.pos) + + prop_id, value_dict = prop + val_id = value_dict.get(value) + if val_id is None: + if not source: + raise error("unknown property value") + + raise error("unknown property value", source.string, source.pos) + + return Property((prop_id << 16) | val_id, positive) + + # Only the value is provided. + # It might be the name of a GC, script or block value. + for property in ("GC", "SCRIPT", "BLOCK"): + prop_id, value_dict = PROPERTIES.get(property) + val_id = value_dict.get(value) + if val_id is not None: + return Property((prop_id << 16) | val_id, positive) + + # It might be the name of a binary property. + prop = PROPERTIES.get(value) + if prop: + prop_id, value_dict = prop + if set(value_dict) == _BINARY_VALUES: + return Property((prop_id << 16) | 1, positive) + + return Property(prop_id << 16, not positive) + + # It might be the name of a binary property starting with a prefix. + if value.startswith("IS"): + prop = PROPERTIES.get(value[2 : ]) + if prop: + prop_id, value_dict = prop + if "YES" in value_dict: + return Property((prop_id << 16) | 1, positive) + + # It might be the name of a script or block starting with a prefix. + for prefix, property in (("IS", "SCRIPT"), ("IN", "BLOCK")): + if value.startswith(prefix): + prop_id, value_dict = PROPERTIES.get(property) + val_id = value_dict.get(value[2 : ]) + if val_id is not None: + return Property((prop_id << 16) | val_id, positive) + + # Unknown property. + if not source: + raise error("unknown property") + + raise error("unknown property", source.string, source.pos) + +def _compile_replacement(source, pattern, is_unicode): + "Compiles a replacement template escape sequence." + ch = source.get() + if ch in ALPHA: + # An alphabetic escape sequence. + value = CHARACTER_ESCAPES.get(ch) + if value: + return False, [ord(value)] + + if ch in HEX_ESCAPES and (ch == "x" or is_unicode): + # A hexadecimal escape sequence. + return False, [parse_repl_hex_escape(source, HEX_ESCAPES[ch], ch)] + + if ch == "g": + # A group preference. + return True, [compile_repl_group(source, pattern)] + + if ch == "N" and is_unicode: + # A named character. + value = parse_repl_named_char(source) + if value is not None: + return False, [value] + + raise error("bad escape \\%s" % ch, source.string, source.pos) + + if isinstance(source.sep, bytes): + octal_mask = 0xFF + else: + octal_mask = 0x1FF + + if ch == "0": + # An octal escape sequence. + digits = ch + while len(digits) < 3: + saved_pos = source.pos + ch = source.get() + if ch not in OCT_DIGITS: + source.pos = saved_pos + break + digits += ch + + return False, [int(digits, 8) & octal_mask] + + if ch in DIGITS: + # Either an octal escape sequence (3 digits) or a group reference (max + # 2 digits). + digits = ch + saved_pos = source.pos + ch = source.get() + if ch in DIGITS: + digits += ch + saved_pos = source.pos + ch = source.get() + if ch and is_octal(digits + ch): + # An octal escape sequence. + return False, [int(digits + ch, 8) & octal_mask] + + # A group reference. + source.pos = saved_pos + return True, [int(digits)] + + if ch == "\\": + # An escaped backslash is a backslash. + return False, [ord("\\")] + + if not ch: + # A trailing backslash. + raise error("bad escape (end of pattern)", source.string, source.pos) + + # An escaped non-backslash is a backslash followed by the literal. + return False, [ord("\\"), ord(ch)] + +def parse_repl_hex_escape(source, expected_len, type): + "Parses a hex escape sequence in a replacement string." + digits = [] + for i in range(expected_len): + ch = source.get() + if ch not in HEX_DIGITS: + raise error("incomplete escape \\%s%s" % (type, ''.join(digits)), + source.string, source.pos) + digits.append(ch) + + return int("".join(digits), 16) + +def parse_repl_named_char(source): + "Parses a named character in a replacement string." + saved_pos = source.pos + if source.match("{"): + name = source.get_while(ALPHA | set(" ")) + + if source.match("}"): + try: + value = unicodedata.lookup(name) + return ord(value) + except KeyError: + raise error("undefined character name", source.string, + source.pos) + + source.pos = saved_pos + return None + +def compile_repl_group(source, pattern): + "Compiles a replacement template group reference." + source.expect("<") + name = parse_name(source, True, True) + + source.expect(">") + if name.isdigit(): + index = int(name) + if not 0 <= index <= pattern.groups: + raise error("invalid group reference", source.string, source.pos) + + return index + + try: + return pattern.groupindex[name] + except KeyError: + raise IndexError("unknown group") + +# The regular expression is parsed into a syntax tree. The different types of +# node are defined below. + +INDENT = " " +POSITIVE_OP = 0x1 +ZEROWIDTH_OP = 0x2 +FUZZY_OP = 0x4 +REVERSE_OP = 0x8 +REQUIRED_OP = 0x10 + +POS_TEXT = {False: "NON-MATCH", True: "MATCH"} +CASE_TEXT = {NOCASE: "", IGNORECASE: " SIMPLE_IGNORE_CASE", FULLCASE: "", + FULLIGNORECASE: " FULL_IGNORE_CASE"} + +def make_sequence(items): + if len(items) == 1: + return items[0] + return Sequence(items) + +# Common base class for all nodes. +class RegexBase: + def __init__(self): + self._key = self.__class__ + + def with_flags(self, positive=None, case_flags=None, zerowidth=None): + if positive is None: + positive = self.positive + else: + positive = bool(positive) + if case_flags is None: + case_flags = self.case_flags + else: + case_flags = CASE_FLAGS_COMBINATIONS[case_flags & CASE_FLAGS] + if zerowidth is None: + zerowidth = self.zerowidth + else: + zerowidth = bool(zerowidth) + + if (positive == self.positive and case_flags == self.case_flags and + zerowidth == self.zerowidth): + return self + + return self.rebuild(positive, case_flags, zerowidth) + + def fix_groups(self, pattern, reverse, fuzzy): + pass + + def optimise(self, info, reverse): + return self + + def pack_characters(self, info): + return self + + def remove_captures(self): + return self + + def is_atomic(self): + return True + + def can_be_affix(self): + return True + + def contains_group(self): + return False + + def get_firstset(self, reverse): + raise _FirstSetError() + + def has_simple_start(self): + return False + + def compile(self, reverse=False, fuzzy=False): + return self._compile(reverse, fuzzy) + + def is_empty(self): + return False + + def __hash__(self): + return hash(self._key) + + def __eq__(self, other): + return type(self) is type(other) and self._key == other._key + + def __ne__(self, other): + return not self.__eq__(other) + + def get_required_string(self, reverse): + return self.max_width(), None + +# Base class for zero-width nodes. +class ZeroWidthBase(RegexBase): + def __init__(self, positive=True): + RegexBase.__init__(self) + self.positive = bool(positive) + + self._key = self.__class__, self.positive + + def get_firstset(self, reverse): + return set([None]) + + def _compile(self, reverse, fuzzy): + flags = 0 + if self.positive: + flags |= POSITIVE_OP + if fuzzy: + flags |= FUZZY_OP + if reverse: + flags |= REVERSE_OP + return [(self._opcode, flags)] + + def dump(self, indent, reverse): + print("{}{} {}".format(INDENT * indent, self._op_name, + POS_TEXT[self.positive])) + + def max_width(self): + return 0 + +class Any(RegexBase): + _opcode = {False: OP.ANY, True: OP.ANY_REV} + _op_name = "ANY" + + def has_simple_start(self): + return True + + def _compile(self, reverse, fuzzy): + flags = 0 + if fuzzy: + flags |= FUZZY_OP + return [(self._opcode[reverse], flags)] + + def dump(self, indent, reverse): + print("{}{}".format(INDENT * indent, self._op_name)) + + def max_width(self): + return 1 + +class AnyAll(Any): + _opcode = {False: OP.ANY_ALL, True: OP.ANY_ALL_REV} + _op_name = "ANY_ALL" + +class AnyU(Any): + _opcode = {False: OP.ANY_U, True: OP.ANY_U_REV} + _op_name = "ANY_U" + +class Atomic(RegexBase): + def __init__(self, subpattern): + RegexBase.__init__(self) + self.subpattern = subpattern + + def fix_groups(self, pattern, reverse, fuzzy): + self.subpattern.fix_groups(pattern, reverse, fuzzy) + + def optimise(self, info, reverse): + self.subpattern = self.subpattern.optimise(info, reverse) + + if self.subpattern.is_empty(): + return self.subpattern + return self + + def pack_characters(self, info): + self.subpattern = self.subpattern.pack_characters(info) + return self + + def remove_captures(self): + self.subpattern = self.subpattern.remove_captures() + return self + + def can_be_affix(self): + return self.subpattern.can_be_affix() + + def contains_group(self): + return self.subpattern.contains_group() + + def get_firstset(self, reverse): + return self.subpattern.get_firstset(reverse) + + def has_simple_start(self): + return self.subpattern.has_simple_start() + + def _compile(self, reverse, fuzzy): + return ([(OP.ATOMIC, )] + self.subpattern.compile(reverse, fuzzy) + + [(OP.END, )]) + + def dump(self, indent, reverse): + print("{}ATOMIC".format(INDENT * indent)) + self.subpattern.dump(indent + 1, reverse) + + def is_empty(self): + return self.subpattern.is_empty() + + def __eq__(self, other): + return (type(self) is type(other) and self.subpattern == + other.subpattern) + + def max_width(self): + return self.subpattern.max_width() + + def get_required_string(self, reverse): + return self.subpattern.get_required_string(reverse) + +class Boundary(ZeroWidthBase): + _opcode = OP.BOUNDARY + _op_name = "BOUNDARY" + +class Branch(RegexBase): + def __init__(self, branches): + RegexBase.__init__(self) + self.branches = branches + + def fix_groups(self, pattern, reverse, fuzzy): + for b in self.branches: + b.fix_groups(pattern, reverse, fuzzy) + + def optimise(self, info, reverse): + if not self.branches: + return Sequence([]) + + # Flatten branches within branches. + branches = Branch._flatten_branches(info, reverse, self.branches) + + # Move any common prefix or suffix out of the branches. + if reverse: + suffix, branches = Branch._split_common_suffix(info, branches) + prefix = [] + else: + prefix, branches = Branch._split_common_prefix(info, branches) + suffix = [] + + # Try to reduce adjacent single-character branches to sets. + branches = Branch._reduce_to_set(info, reverse, branches) + + if len(branches) > 1: + sequence = [Branch(branches)] + + if not prefix or not suffix: + # We might be able to add a quick precheck before the branches. + firstset = self._add_precheck(info, reverse, branches) + + if firstset: + if reverse: + sequence.append(firstset) + else: + sequence.insert(0, firstset) + else: + sequence = branches + + return make_sequence(prefix + sequence + suffix) + + def _add_precheck(self, info, reverse, branches): + charset = set() + pos = -1 if reverse else 0 + + for branch in branches: + if type(branch) is Literal and branch.case_flags == NOCASE: + charset.add(branch.characters[pos]) + else: + return + + if not charset: + return None + + return _check_firstset(info, reverse, [Character(c) for c in charset]) + + def pack_characters(self, info): + self.branches = [b.pack_characters(info) for b in self.branches] + return self + + def remove_captures(self): + self.branches = [b.remove_captures() for b in self.branches] + return self + + def is_atomic(self): + return all(b.is_atomic() for b in self.branches) + + def can_be_affix(self): + return all(b.can_be_affix() for b in self.branches) + + def contains_group(self): + return any(b.contains_group() for b in self.branches) + + def get_firstset(self, reverse): + fs = set() + for b in self.branches: + fs |= b.get_firstset(reverse) + + return fs or set([None]) + + def _compile(self, reverse, fuzzy): + if not self.branches: + return [] + + code = [(OP.BRANCH, )] + for b in self.branches: + code.extend(b.compile(reverse, fuzzy)) + code.append((OP.NEXT, )) + + code[-1] = (OP.END, ) + + return code + + def dump(self, indent, reverse): + print("{}BRANCH".format(INDENT * indent)) + self.branches[0].dump(indent + 1, reverse) + for b in self.branches[1 : ]: + print("{}OR".format(INDENT * indent)) + b.dump(indent + 1, reverse) + + @staticmethod + def _flatten_branches(info, reverse, branches): + # Flatten the branches so that there aren't branches of branches. + new_branches = [] + for b in branches: + b = b.optimise(info, reverse) + if isinstance(b, Branch): + new_branches.extend(b.branches) + else: + new_branches.append(b) + + return new_branches + + @staticmethod + def _split_common_prefix(info, branches): + # Common leading items can be moved out of the branches. + # Get the items in the branches. + alternatives = [] + for b in branches: + if isinstance(b, Sequence): + alternatives.append(b.items) + else: + alternatives.append([b]) + + # What is the maximum possible length of the prefix? + max_count = min(len(a) for a in alternatives) + + # What is the longest common prefix? + prefix = alternatives[0] + pos = 0 + end_pos = max_count + while pos < end_pos and prefix[pos].can_be_affix() and all(a[pos] == + prefix[pos] for a in alternatives): + pos += 1 + count = pos + + if info.flags & UNICODE: + # We need to check that we're not splitting a sequence of + # characters which could form part of full case-folding. + count = pos + while count > 0 and not all(Branch._can_split(a, count) for a in + alternatives): + count -= 1 + + # No common prefix is possible. + if count == 0: + return [], branches + + # Rebuild the branches. + new_branches = [] + for a in alternatives: + new_branches.append(make_sequence(a[count : ])) + + return prefix[ : count], new_branches + + @staticmethod + def _split_common_suffix(info, branches): + # Common trailing items can be moved out of the branches. + # Get the items in the branches. + alternatives = [] + for b in branches: + if isinstance(b, Sequence): + alternatives.append(b.items) + else: + alternatives.append([b]) + + # What is the maximum possible length of the suffix? + max_count = min(len(a) for a in alternatives) + + # What is the longest common suffix? + suffix = alternatives[0] + pos = -1 + end_pos = -1 - max_count + while pos > end_pos and suffix[pos].can_be_affix() and all(a[pos] == + suffix[pos] for a in alternatives): + pos -= 1 + count = -1 - pos + + if info.flags & UNICODE: + # We need to check that we're not splitting a sequence of + # characters which could form part of full case-folding. + while count > 0 and not all(Branch._can_split_rev(a, count) for a + in alternatives): + count -= 1 + + # No common suffix is possible. + if count == 0: + return [], branches + + # Rebuild the branches. + new_branches = [] + for a in alternatives: + new_branches.append(make_sequence(a[ : -count])) + + return suffix[-count : ], new_branches + + @staticmethod + def _can_split(items, count): + # Check the characters either side of the proposed split. + if not Branch._is_full_case(items, count - 1): + return True + + if not Branch._is_full_case(items, count): + return True + + # Check whether a 1-1 split would be OK. + if Branch._is_folded(items[count - 1 : count + 1]): + return False + + # Check whether a 1-2 split would be OK. + if (Branch._is_full_case(items, count + 2) and + Branch._is_folded(items[count - 1 : count + 2])): + return False + + # Check whether a 2-1 split would be OK. + if (Branch._is_full_case(items, count - 2) and + Branch._is_folded(items[count - 2 : count + 1])): + return False + + return True + + @staticmethod + def _can_split_rev(items, count): + end = len(items) + + # Check the characters either side of the proposed split. + if not Branch._is_full_case(items, end - count): + return True + + if not Branch._is_full_case(items, end - count - 1): + return True + + # Check whether a 1-1 split would be OK. + if Branch._is_folded(items[end - count - 1 : end - count + 1]): + return False + + # Check whether a 1-2 split would be OK. + if (Branch._is_full_case(items, end - count + 2) and + Branch._is_folded(items[end - count - 1 : end - count + 2])): + return False + + # Check whether a 2-1 split would be OK. + if (Branch._is_full_case(items, end - count - 2) and + Branch._is_folded(items[end - count - 2 : end - count + 1])): + return False + + return True + + @staticmethod + def _merge_common_prefixes(info, reverse, branches): + # Branches with the same case-sensitive character prefix can be grouped + # together if they are separated only by other branches with a + # character prefix. + prefixed = defaultdict(list) + order = {} + new_branches = [] + for b in branches: + if Branch._is_simple_character(b): + # Branch starts with a simple character. + prefixed[b.value].append([b]) + order.setdefault(b.value, len(order)) + elif (isinstance(b, Sequence) and b.items and + Branch._is_simple_character(b.items[0])): + # Branch starts with a simple character. + prefixed[b.items[0].value].append(b.items) + order.setdefault(b.items[0].value, len(order)) + else: + Branch._flush_char_prefix(info, reverse, prefixed, order, + new_branches) + + new_branches.append(b) + + Branch._flush_char_prefix(info, prefixed, order, new_branches) + + return new_branches + + @staticmethod + def _is_simple_character(c): + return isinstance(c, Character) and c.positive and not c.case_flags + + @staticmethod + def _reduce_to_set(info, reverse, branches): + # Can the branches be reduced to a set? + new_branches = [] + items = set() + case_flags = NOCASE + for b in branches: + if isinstance(b, (Character, Property, SetBase)): + # Branch starts with a single character. + if b.case_flags != case_flags: + # Different case sensitivity, so flush. + Branch._flush_set_members(info, reverse, items, case_flags, + new_branches) + + case_flags = b.case_flags + + items.add(b.with_flags(case_flags=NOCASE)) + else: + Branch._flush_set_members(info, reverse, items, case_flags, + new_branches) + + new_branches.append(b) + + Branch._flush_set_members(info, reverse, items, case_flags, + new_branches) + + return new_branches + + @staticmethod + def _flush_char_prefix(info, reverse, prefixed, order, new_branches): + # Flush the prefixed branches. + if not prefixed: + return + + for value, branches in sorted(prefixed.items(), key=lambda pair: + order[pair[0]]): + if len(branches) == 1: + new_branches.append(make_sequence(branches[0])) + else: + subbranches = [] + optional = False + for b in branches: + if len(b) > 1: + subbranches.append(make_sequence(b[1 : ])) + elif not optional: + subbranches.append(Sequence()) + optional = True + + sequence = Sequence([Character(value), Branch(subbranches)]) + new_branches.append(sequence.optimise(info, reverse)) + + prefixed.clear() + order.clear() + + @staticmethod + def _flush_set_members(info, reverse, items, case_flags, new_branches): + # Flush the set members. + if not items: + return + + if len(items) == 1: + item = list(items)[0] + else: + item = SetUnion(info, list(items)).optimise(info, reverse) + + new_branches.append(item.with_flags(case_flags=case_flags)) + + items.clear() + + @staticmethod + def _is_full_case(items, i): + if not 0 <= i < len(items): + return False + + item = items[i] + return (isinstance(item, Character) and item.positive and + (item.case_flags & FULLIGNORECASE) == FULLIGNORECASE) + + @staticmethod + def _is_folded(items): + if len(items) < 2: + return False + + for i in items: + if (not isinstance(i, Character) or not i.positive or not + i.case_flags): + return False + + folded = "".join(chr(i.value) for i in items) + folded = _regex.fold_case(FULL_CASE_FOLDING, folded) + + # Get the characters which expand to multiple codepoints on folding. + expanding_chars = _regex.get_expand_on_folding() + + for c in expanding_chars: + if folded == _regex.fold_case(FULL_CASE_FOLDING, c): + return True + + return False + + def is_empty(self): + return all(b.is_empty() for b in self.branches) + + def __eq__(self, other): + return type(self) is type(other) and self.branches == other.branches + + def max_width(self): + return max(b.max_width() for b in self.branches) + +class CallGroup(RegexBase): + def __init__(self, info, group, position): + RegexBase.__init__(self) + self.info = info + self.group = group + self.position = position + + self._key = self.__class__, self.group + + def fix_groups(self, pattern, reverse, fuzzy): + try: + self.group = int(self.group) + except ValueError: + try: + self.group = self.info.group_index[self.group] + except KeyError: + raise error("invalid group reference", pattern, self.position) + + if not 0 <= self.group <= self.info.group_count: + raise error("unknown group", pattern, self.position) + + if self.group > 0 and self.info.open_group_count[self.group] > 1: + raise error("ambiguous group reference", pattern, self.position) + + self.info.group_calls.append((self, reverse, fuzzy)) + + self._key = self.__class__, self.group + + def remove_captures(self): + raise error("group reference not allowed", pattern, self.position) + + def _compile(self, reverse, fuzzy): + return [(OP.GROUP_CALL, self.call_ref)] + + def dump(self, indent, reverse): + print("{}GROUP_CALL {}".format(INDENT * indent, self.group)) + + def __eq__(self, other): + return type(self) is type(other) and self.group == other.group + + def max_width(self): + return UNLIMITED + + def __del__(self): + self.info = None + +class CallRef(RegexBase): + def __init__(self, ref, parsed): + self.ref = ref + self.parsed = parsed + + def _compile(self, reverse, fuzzy): + return ([(OP.CALL_REF, self.ref)] + self.parsed._compile(reverse, + fuzzy) + [(OP.END, )]) + +class Character(RegexBase): + _opcode = {(NOCASE, False): OP.CHARACTER, (IGNORECASE, False): + OP.CHARACTER_IGN, (FULLCASE, False): OP.CHARACTER, (FULLIGNORECASE, + False): OP.CHARACTER_IGN, (NOCASE, True): OP.CHARACTER_REV, (IGNORECASE, + True): OP.CHARACTER_IGN_REV, (FULLCASE, True): OP.CHARACTER_REV, + (FULLIGNORECASE, True): OP.CHARACTER_IGN_REV} + + def __init__(self, value, positive=True, case_flags=NOCASE, + zerowidth=False): + RegexBase.__init__(self) + self.value = value + self.positive = bool(positive) + self.case_flags = CASE_FLAGS_COMBINATIONS[case_flags] + self.zerowidth = bool(zerowidth) + + if (self.positive and (self.case_flags & FULLIGNORECASE) == + FULLIGNORECASE): + self.folded = _regex.fold_case(FULL_CASE_FOLDING, chr(self.value)) + else: + self.folded = chr(self.value) + + self._key = (self.__class__, self.value, self.positive, + self.case_flags, self.zerowidth) + + def rebuild(self, positive, case_flags, zerowidth): + return Character(self.value, positive, case_flags, zerowidth) + + def optimise(self, info, reverse, in_set=False): + return self + + def get_firstset(self, reverse): + return set([self]) + + def has_simple_start(self): + return True + + def _compile(self, reverse, fuzzy): + flags = 0 + if self.positive: + flags |= POSITIVE_OP + if self.zerowidth: + flags |= ZEROWIDTH_OP + if fuzzy: + flags |= FUZZY_OP + + code = PrecompiledCode([self._opcode[self.case_flags, reverse], flags, + self.value]) + + if len(self.folded) > 1: + # The character expands on full case-folding. + code = Branch([code, String([ord(c) for c in self.folded], + case_flags=self.case_flags)]) + + return code.compile(reverse, fuzzy) + + def dump(self, indent, reverse): + display = ascii(chr(self.value)).lstrip("bu") + print("{}CHARACTER {} {}{}".format(INDENT * indent, + POS_TEXT[self.positive], display, CASE_TEXT[self.case_flags])) + + def matches(self, ch): + return (ch == self.value) == self.positive + + def max_width(self): + return len(self.folded) + + def get_required_string(self, reverse): + if not self.positive: + return 1, None + + self.folded_characters = tuple(ord(c) for c in self.folded) + + return 0, self + +class Conditional(RegexBase): + def __init__(self, info, group, yes_item, no_item, position): + RegexBase.__init__(self) + self.info = info + self.group = group + self.yes_item = yes_item + self.no_item = no_item + self.position = position + + def fix_groups(self, pattern, reverse, fuzzy): + try: + self.group = int(self.group) + except ValueError: + try: + self.group = self.info.group_index[self.group] + except KeyError: + if self.group == 'DEFINE': + # 'DEFINE' is a special name unless there's a group with + # that name. + self.group = 0 + else: + raise error("unknown group", pattern, self.position) + + if not 0 <= self.group <= self.info.group_count: + raise error("invalid group reference", pattern, self.position) + + self.yes_item.fix_groups(pattern, reverse, fuzzy) + self.no_item.fix_groups(pattern, reverse, fuzzy) + + def optimise(self, info, reverse): + yes_item = self.yes_item.optimise(info, reverse) + no_item = self.no_item.optimise(info, reverse) + + return Conditional(info, self.group, yes_item, no_item, self.position) + + def pack_characters(self, info): + self.yes_item = self.yes_item.pack_characters(info) + self.no_item = self.no_item.pack_characters(info) + return self + + def remove_captures(self): + self.yes_item = self.yes_item.remove_captures() + self.no_item = self.no_item.remove_captures() + + def is_atomic(self): + return self.yes_item.is_atomic() and self.no_item.is_atomic() + + def can_be_affix(self): + return self.yes_item.can_be_affix() and self.no_item.can_be_affix() + + def contains_group(self): + return self.yes_item.contains_group() or self.no_item.contains_group() + + def get_firstset(self, reverse): + return (self.yes_item.get_firstset(reverse) | + self.no_item.get_firstset(reverse)) + + def _compile(self, reverse, fuzzy): + code = [(OP.GROUP_EXISTS, self.group)] + code.extend(self.yes_item.compile(reverse, fuzzy)) + add_code = self.no_item.compile(reverse, fuzzy) + if add_code: + code.append((OP.NEXT, )) + code.extend(add_code) + + code.append((OP.END, )) + + return code + + def dump(self, indent, reverse): + print("{}GROUP_EXISTS {}".format(INDENT * indent, self.group)) + self.yes_item.dump(indent + 1, reverse) + if not self.no_item.is_empty(): + print("{}OR".format(INDENT * indent)) + self.no_item.dump(indent + 1, reverse) + + def is_empty(self): + return self.yes_item.is_empty() and self.no_item.is_empty() + + def __eq__(self, other): + return type(self) is type(other) and (self.group, self.yes_item, + self.no_item) == (other.group, other.yes_item, other.no_item) + + def max_width(self): + return max(self.yes_item.max_width(), self.no_item.max_width()) + + def __del__(self): + self.info = None + +class DefaultBoundary(ZeroWidthBase): + _opcode = OP.DEFAULT_BOUNDARY + _op_name = "DEFAULT_BOUNDARY" + +class DefaultEndOfWord(ZeroWidthBase): + _opcode = OP.DEFAULT_END_OF_WORD + _op_name = "DEFAULT_END_OF_WORD" + +class DefaultStartOfWord(ZeroWidthBase): + _opcode = OP.DEFAULT_START_OF_WORD + _op_name = "DEFAULT_START_OF_WORD" + +class EndOfLine(ZeroWidthBase): + _opcode = OP.END_OF_LINE + _op_name = "END_OF_LINE" + +class EndOfLineU(EndOfLine): + _opcode = OP.END_OF_LINE_U + _op_name = "END_OF_LINE_U" + +class EndOfString(ZeroWidthBase): + _opcode = OP.END_OF_STRING + _op_name = "END_OF_STRING" + +class EndOfStringLine(ZeroWidthBase): + _opcode = OP.END_OF_STRING_LINE + _op_name = "END_OF_STRING_LINE" + +class EndOfStringLineU(EndOfStringLine): + _opcode = OP.END_OF_STRING_LINE_U + _op_name = "END_OF_STRING_LINE_U" + +class EndOfWord(ZeroWidthBase): + _opcode = OP.END_OF_WORD + _op_name = "END_OF_WORD" + +class Failure(ZeroWidthBase): + _op_name = "FAILURE" + + def _compile(self, reverse, fuzzy): + return [(OP.FAILURE, )] + +class Fuzzy(RegexBase): + def __init__(self, subpattern, constraints=None): + RegexBase.__init__(self) + if constraints is None: + constraints = {} + self.subpattern = subpattern + self.constraints = constraints + + # If an error type is mentioned in the cost equation, then its maximum + # defaults to unlimited. + if "cost" in constraints: + for e in "dis": + if e in constraints["cost"]: + constraints.setdefault(e, (0, None)) + + # If any error type is mentioned, then all the error maxima default to + # 0, otherwise they default to unlimited. + if set(constraints) & set("dis"): + for e in "dis": + constraints.setdefault(e, (0, 0)) + else: + for e in "dis": + constraints.setdefault(e, (0, None)) + + # The maximum of the generic error type defaults to unlimited. + constraints.setdefault("e", (0, None)) + + # The cost equation defaults to equal costs. Also, the cost of any + # error type not mentioned in the cost equation defaults to 0. + if "cost" in constraints: + for e in "dis": + constraints["cost"].setdefault(e, 0) + else: + constraints["cost"] = {"d": 1, "i": 1, "s": 1, "max": + constraints["e"][1]} + + def fix_groups(self, pattern, reverse, fuzzy): + self.subpattern.fix_groups(pattern, reverse, True) + + def pack_characters(self, info): + self.subpattern = self.subpattern.pack_characters(info) + return self + + def remove_captures(self): + self.subpattern = self.subpattern.remove_captures() + return self + + def is_atomic(self): + return self.subpattern.is_atomic() + + def contains_group(self): + return self.subpattern.contains_group() + + def _compile(self, reverse, fuzzy): + # The individual limits. + arguments = [] + for e in "dise": + v = self.constraints[e] + arguments.append(v[0]) + arguments.append(UNLIMITED if v[1] is None else v[1]) + + # The coeffs of the cost equation. + for e in "dis": + arguments.append(self.constraints["cost"][e]) + + # The maximum of the cost equation. + v = self.constraints["cost"]["max"] + arguments.append(UNLIMITED if v is None else v) + + flags = 0 + if reverse: + flags |= REVERSE_OP + + test = self.constraints.get("test") + + if test: + return ([(OP.FUZZY_EXT, flags) + tuple(arguments)] + + test.compile(reverse, True) + [(OP.NEXT,)] + + self.subpattern.compile(reverse, True) + [(OP.END,)]) + + return ([(OP.FUZZY, flags) + tuple(arguments)] + + self.subpattern.compile(reverse, True) + [(OP.END,)]) + + def dump(self, indent, reverse): + constraints = self._constraints_to_string() + if constraints: + constraints = " " + constraints + print("{}FUZZY{}".format(INDENT * indent, constraints)) + self.subpattern.dump(indent + 1, reverse) + + def is_empty(self): + return self.subpattern.is_empty() + + def __eq__(self, other): + return (type(self) is type(other) and self.subpattern == + other.subpattern and self.constraints == other.constraints) + + def max_width(self): + return UNLIMITED + + def _constraints_to_string(self): + constraints = [] + + for name in "ids": + min, max = self.constraints[name] + if max == 0: + continue + + con = "" + + if min > 0: + con = "{}<=".format(min) + + con += name + + if max is not None: + con += "<={}".format(max) + + constraints.append(con) + + cost = [] + for name in "ids": + coeff = self.constraints["cost"][name] + if coeff > 0: + cost.append("{}{}".format(coeff, name)) + + limit = self.constraints["cost"]["max"] + if limit is not None and limit > 0: + cost = "{}<={}".format("+".join(cost), limit) + constraints.append(cost) + + return ",".join(constraints) + +class Grapheme(RegexBase): + def _compile(self, reverse, fuzzy): + # Match at least 1 character until a grapheme boundary is reached. Note + # that this is the same whether matching forwards or backwards. + grapheme_matcher = Atomic(Sequence([LazyRepeat(AnyAll(), 1, None), + GraphemeBoundary()])) + + return grapheme_matcher.compile(reverse, fuzzy) + + def dump(self, indent, reverse): + print("{}GRAPHEME".format(INDENT * indent)) + + def max_width(self): + return UNLIMITED + +class GraphemeBoundary: + def compile(self, reverse, fuzzy): + return [(OP.GRAPHEME_BOUNDARY, 1)] + +class GreedyRepeat(RegexBase): + _opcode = OP.GREEDY_REPEAT + _op_name = "GREEDY_REPEAT" + + def __init__(self, subpattern, min_count, max_count): + RegexBase.__init__(self) + self.subpattern = subpattern + self.min_count = min_count + self.max_count = max_count + + def fix_groups(self, pattern, reverse, fuzzy): + self.subpattern.fix_groups(pattern, reverse, fuzzy) + + def optimise(self, info, reverse): + subpattern = self.subpattern.optimise(info, reverse) + + return type(self)(subpattern, self.min_count, self.max_count) + + def pack_characters(self, info): + self.subpattern = self.subpattern.pack_characters(info) + return self + + def remove_captures(self): + self.subpattern = self.subpattern.remove_captures() + return self + + def is_atomic(self): + return self.min_count == self.max_count and self.subpattern.is_atomic() + + def can_be_affix(self): + return False + + def contains_group(self): + return self.subpattern.contains_group() + + def get_firstset(self, reverse): + fs = self.subpattern.get_firstset(reverse) + if self.min_count == 0: + fs.add(None) + + return fs + + def _compile(self, reverse, fuzzy): + repeat = [self._opcode, self.min_count] + if self.max_count is None: + repeat.append(UNLIMITED) + else: + repeat.append(self.max_count) + + subpattern = self.subpattern.compile(reverse, fuzzy) + if not subpattern: + return [] + + return ([tuple(repeat)] + subpattern + [(OP.END, )]) + + def dump(self, indent, reverse): + if self.max_count is None: + limit = "INF" + else: + limit = self.max_count + print("{}{} {} {}".format(INDENT * indent, self._op_name, + self.min_count, limit)) + + self.subpattern.dump(indent + 1, reverse) + + def is_empty(self): + return self.subpattern.is_empty() + + def __eq__(self, other): + return type(self) is type(other) and (self.subpattern, self.min_count, + self.max_count) == (other.subpattern, other.min_count, + other.max_count) + + def max_width(self): + if self.max_count is None: + return UNLIMITED + + return self.subpattern.max_width() * self.max_count + + def get_required_string(self, reverse): + max_count = UNLIMITED if self.max_count is None else self.max_count + if self.min_count == 0: + w = self.subpattern.max_width() * max_count + return min(w, UNLIMITED), None + + ofs, req = self.subpattern.get_required_string(reverse) + if req: + return ofs, req + + w = self.subpattern.max_width() * max_count + return min(w, UNLIMITED), None + +class PossessiveRepeat(GreedyRepeat): + def is_atomic(self): + return True + + def _compile(self, reverse, fuzzy): + subpattern = self.subpattern.compile(reverse, fuzzy) + if not subpattern: + return [] + + repeat = [self._opcode, self.min_count] + if self.max_count is None: + repeat.append(UNLIMITED) + else: + repeat.append(self.max_count) + + return ([(OP.ATOMIC, ), tuple(repeat)] + subpattern + [(OP.END, ), + (OP.END, )]) + + def dump(self, indent, reverse): + print("{}ATOMIC".format(INDENT * indent)) + + if self.max_count is None: + limit = "INF" + else: + limit = self.max_count + print("{}{} {} {}".format(INDENT * (indent + 1), self._op_name, + self.min_count, limit)) + + self.subpattern.dump(indent + 2, reverse) + +class Group(RegexBase): + def __init__(self, info, group, subpattern): + RegexBase.__init__(self) + self.info = info + self.group = group + self.subpattern = subpattern + + self.call_ref = None + + def fix_groups(self, pattern, reverse, fuzzy): + self.info.defined_groups[self.group] = (self, reverse, fuzzy) + self.subpattern.fix_groups(pattern, reverse, fuzzy) + + def optimise(self, info, reverse): + subpattern = self.subpattern.optimise(info, reverse) + + return Group(self.info, self.group, subpattern) + + def pack_characters(self, info): + self.subpattern = self.subpattern.pack_characters(info) + return self + + def remove_captures(self): + return self.subpattern.remove_captures() + + def is_atomic(self): + return self.subpattern.is_atomic() + + def can_be_affix(self): + return False + + def contains_group(self): + return True + + def get_firstset(self, reverse): + return self.subpattern.get_firstset(reverse) + + def has_simple_start(self): + return self.subpattern.has_simple_start() + + def _compile(self, reverse, fuzzy): + code = [] + + public_group = private_group = self.group + if private_group < 0: + public_group = self.info.private_groups[private_group] + private_group = self.info.group_count - private_group + + key = self.group, reverse, fuzzy + ref = self.info.call_refs.get(key) + if ref is not None: + code += [(OP.CALL_REF, ref)] + + code += [(OP.GROUP, int(not reverse), private_group, public_group)] + code += self.subpattern.compile(reverse, fuzzy) + code += [(OP.END, )] + + if ref is not None: + code += [(OP.END, )] + + return code + + def dump(self, indent, reverse): + group = self.group + if group < 0: + group = private_groups[group] + print("{}GROUP {}".format(INDENT * indent, group)) + self.subpattern.dump(indent + 1, reverse) + + def __eq__(self, other): + return (type(self) is type(other) and (self.group, self.subpattern) == + (other.group, other.subpattern)) + + def max_width(self): + return self.subpattern.max_width() + + def get_required_string(self, reverse): + return self.subpattern.get_required_string(reverse) + + def __del__(self): + self.info = None + +class Keep(ZeroWidthBase): + _opcode = OP.KEEP + _op_name = "KEEP" + +class LazyRepeat(GreedyRepeat): + _opcode = OP.LAZY_REPEAT + _op_name = "LAZY_REPEAT" + +class LookAround(RegexBase): + _dir_text = {False: "AHEAD", True: "BEHIND"} + + def __init__(self, behind, positive, subpattern): + RegexBase.__init__(self) + self.behind = bool(behind) + self.positive = bool(positive) + self.subpattern = subpattern + + def fix_groups(self, pattern, reverse, fuzzy): + self.subpattern.fix_groups(pattern, self.behind, fuzzy) + + def optimise(self, info, reverse): + subpattern = self.subpattern.optimise(info, self.behind) + if self.positive and subpattern.is_empty(): + return subpattern + + return LookAround(self.behind, self.positive, subpattern) + + def pack_characters(self, info): + self.subpattern = self.subpattern.pack_characters(info) + return self + + def remove_captures(self): + return self.subpattern.remove_captures() + + def is_atomic(self): + return self.subpattern.is_atomic() + + def can_be_affix(self): + return self.subpattern.can_be_affix() + + def contains_group(self): + return self.subpattern.contains_group() + + def get_firstset(self, reverse): + if self.positive and self.behind == reverse: + return self.subpattern.get_firstset(reverse) + + return set([None]) + + def _compile(self, reverse, fuzzy): + flags = 0 + if self.positive: + flags |= POSITIVE_OP + if fuzzy: + flags |= FUZZY_OP + if reverse: + flags |= REVERSE_OP + + return ([(OP.LOOKAROUND, flags, int(not self.behind))] + + self.subpattern.compile(self.behind) + [(OP.END, )]) + + def dump(self, indent, reverse): + print("{}LOOK{} {}".format(INDENT * indent, + self._dir_text[self.behind], POS_TEXT[self.positive])) + self.subpattern.dump(indent + 1, self.behind) + + def is_empty(self): + return self.positive and self.subpattern.is_empty() + + def __eq__(self, other): + return type(self) is type(other) and (self.behind, self.positive, + self.subpattern) == (other.behind, other.positive, other.subpattern) + + def max_width(self): + return 0 + +class LookAroundConditional(RegexBase): + _dir_text = {False: "AHEAD", True: "BEHIND"} + + def __init__(self, behind, positive, subpattern, yes_item, no_item): + RegexBase.__init__(self) + self.behind = bool(behind) + self.positive = bool(positive) + self.subpattern = subpattern + self.yes_item = yes_item + self.no_item = no_item + + def fix_groups(self, pattern, reverse, fuzzy): + self.subpattern.fix_groups(pattern, reverse, fuzzy) + self.yes_item.fix_groups(pattern, reverse, fuzzy) + self.no_item.fix_groups(pattern, reverse, fuzzy) + + def optimise(self, info, reverse): + subpattern = self.subpattern.optimise(info, self.behind) + yes_item = self.yes_item.optimise(info, self.behind) + no_item = self.no_item.optimise(info, self.behind) + + return LookAroundConditional(self.behind, self.positive, subpattern, + yes_item, no_item) + + def pack_characters(self, info): + self.subpattern = self.subpattern.pack_characters(info) + self.yes_item = self.yes_item.pack_characters(info) + self.no_item = self.no_item.pack_characters(info) + return self + + def remove_captures(self): + self.subpattern = self.subpattern.remove_captures() + self.yes_item = self.yes_item.remove_captures() + self.no_item = self.no_item.remove_captures() + + def is_atomic(self): + return (self.subpattern.is_atomic() and self.yes_item.is_atomic() and + self.no_item.is_atomic()) + + def can_be_affix(self): + return (self.subpattern.can_be_affix() and self.yes_item.can_be_affix() + and self.no_item.can_be_affix()) + + def contains_group(self): + return (self.subpattern.contains_group() or + self.yes_item.contains_group() or self.no_item.contains_group()) + + def _compile(self, reverse, fuzzy): + code = [(OP.CONDITIONAL, int(self.positive), int(not self.behind))] + code.extend(self.subpattern.compile(self.behind, fuzzy)) + code.append((OP.NEXT, )) + code.extend(self.yes_item.compile(reverse, fuzzy)) + add_code = self.no_item.compile(reverse, fuzzy) + if add_code: + code.append((OP.NEXT, )) + code.extend(add_code) + + code.append((OP.END, )) + + return code + + def dump(self, indent, reverse): + print("{}CONDITIONAL {} {}".format(INDENT * indent, + self._dir_text[self.behind], POS_TEXT[self.positive])) + self.subpattern.dump(indent + 1, self.behind) + print("{}EITHER".format(INDENT * indent)) + self.yes_item.dump(indent + 1, reverse) + if not self.no_item.is_empty(): + print("{}OR".format(INDENT * indent)) + self.no_item.dump(indent + 1, reverse) + + def is_empty(self): + return (self.subpattern.is_empty() and self.yes_item.is_empty() or + self.no_item.is_empty()) + + def __eq__(self, other): + return type(self) is type(other) and (self.subpattern, self.yes_item, + self.no_item) == (other.subpattern, other.yes_item, other.no_item) + + def max_width(self): + return max(self.yes_item.max_width(), self.no_item.max_width()) + + def get_required_string(self, reverse): + return self.max_width(), None + +class PrecompiledCode(RegexBase): + def __init__(self, code): + self.code = code + + def _compile(self, reverse, fuzzy): + return [tuple(self.code)] + +class Property(RegexBase): + _opcode = {(NOCASE, False): OP.PROPERTY, (IGNORECASE, False): + OP.PROPERTY_IGN, (FULLCASE, False): OP.PROPERTY, (FULLIGNORECASE, False): + OP.PROPERTY_IGN, (NOCASE, True): OP.PROPERTY_REV, (IGNORECASE, True): + OP.PROPERTY_IGN_REV, (FULLCASE, True): OP.PROPERTY_REV, (FULLIGNORECASE, + True): OP.PROPERTY_IGN_REV} + + def __init__(self, value, positive=True, case_flags=NOCASE, + zerowidth=False): + RegexBase.__init__(self) + self.value = value + self.positive = bool(positive) + self.case_flags = CASE_FLAGS_COMBINATIONS[case_flags] + self.zerowidth = bool(zerowidth) + + self._key = (self.__class__, self.value, self.positive, + self.case_flags, self.zerowidth) + + def rebuild(self, positive, case_flags, zerowidth): + return Property(self.value, positive, case_flags, zerowidth) + + def optimise(self, info, reverse, in_set=False): + return self + + def get_firstset(self, reverse): + return set([self]) + + def has_simple_start(self): + return True + + def _compile(self, reverse, fuzzy): + flags = 0 + if self.positive: + flags |= POSITIVE_OP + if self.zerowidth: + flags |= ZEROWIDTH_OP + if fuzzy: + flags |= FUZZY_OP + return [(self._opcode[self.case_flags, reverse], flags, self.value)] + + def dump(self, indent, reverse): + prop = PROPERTY_NAMES[self.value >> 16] + name, value = prop[0], prop[1][self.value & 0xFFFF] + print("{}PROPERTY {} {}:{}{}".format(INDENT * indent, + POS_TEXT[self.positive], name, value, CASE_TEXT[self.case_flags])) + + def matches(self, ch): + return _regex.has_property_value(self.value, ch) == self.positive + + def max_width(self): + return 1 + +class Prune(ZeroWidthBase): + _op_name = "PRUNE" + + def _compile(self, reverse, fuzzy): + return [(OP.PRUNE, )] + +class Range(RegexBase): + _opcode = {(NOCASE, False): OP.RANGE, (IGNORECASE, False): OP.RANGE_IGN, + (FULLCASE, False): OP.RANGE, (FULLIGNORECASE, False): OP.RANGE_IGN, + (NOCASE, True): OP.RANGE_REV, (IGNORECASE, True): OP.RANGE_IGN_REV, + (FULLCASE, True): OP.RANGE_REV, (FULLIGNORECASE, True): OP.RANGE_IGN_REV} + _op_name = "RANGE" + + def __init__(self, lower, upper, positive=True, case_flags=NOCASE, + zerowidth=False): + RegexBase.__init__(self) + self.lower = lower + self.upper = upper + self.positive = bool(positive) + self.case_flags = CASE_FLAGS_COMBINATIONS[case_flags] + self.zerowidth = bool(zerowidth) + + self._key = (self.__class__, self.lower, self.upper, self.positive, + self.case_flags, self.zerowidth) + + def rebuild(self, positive, case_flags, zerowidth): + return Range(self.lower, self.upper, positive, case_flags, zerowidth) + + def optimise(self, info, reverse, in_set=False): + # Is the range case-sensitive? + if not self.positive or not (self.case_flags & IGNORECASE) or in_set: + return self + + # Is full case-folding possible? + if (not (info.flags & UNICODE) or (self.case_flags & FULLIGNORECASE) != + FULLIGNORECASE): + return self + + # Get the characters which expand to multiple codepoints on folding. + expanding_chars = _regex.get_expand_on_folding() + + # Get the folded characters in the range. + items = [] + for ch in expanding_chars: + if self.lower <= ord(ch) <= self.upper: + folded = _regex.fold_case(FULL_CASE_FOLDING, ch) + items.append(String([ord(c) for c in folded], + case_flags=self.case_flags)) + + if not items: + # We can fall back to simple case-folding. + return self + + if len(items) < self.upper - self.lower + 1: + # Not all the characters are covered by the full case-folding. + items.insert(0, self) + + return Branch(items) + + def _compile(self, reverse, fuzzy): + flags = 0 + if self.positive: + flags |= POSITIVE_OP + if self.zerowidth: + flags |= ZEROWIDTH_OP + if fuzzy: + flags |= FUZZY_OP + return [(self._opcode[self.case_flags, reverse], flags, self.lower, + self.upper)] + + def dump(self, indent, reverse): + display_lower = ascii(chr(self.lower)).lstrip("bu") + display_upper = ascii(chr(self.upper)).lstrip("bu") + print("{}RANGE {} {} {}{}".format(INDENT * indent, + POS_TEXT[self.positive], display_lower, display_upper, + CASE_TEXT[self.case_flags])) + + def matches(self, ch): + return (self.lower <= ch <= self.upper) == self.positive + + def max_width(self): + return 1 + +class RefGroup(RegexBase): + _opcode = {(NOCASE, False): OP.REF_GROUP, (IGNORECASE, False): + OP.REF_GROUP_IGN, (FULLCASE, False): OP.REF_GROUP, (FULLIGNORECASE, + False): OP.REF_GROUP_FLD, (NOCASE, True): OP.REF_GROUP_REV, (IGNORECASE, + True): OP.REF_GROUP_IGN_REV, (FULLCASE, True): OP.REF_GROUP_REV, + (FULLIGNORECASE, True): OP.REF_GROUP_FLD_REV} + + def __init__(self, info, group, position, case_flags=NOCASE): + RegexBase.__init__(self) + self.info = info + self.group = group + self.position = position + self.case_flags = CASE_FLAGS_COMBINATIONS[case_flags] + + self._key = self.__class__, self.group, self.case_flags + + def fix_groups(self, pattern, reverse, fuzzy): + try: + self.group = int(self.group) + except ValueError: + try: + self.group = self.info.group_index[self.group] + except KeyError: + raise error("unknown group", pattern, self.position) + + if not 1 <= self.group <= self.info.group_count: + raise error("invalid group reference", pattern, self.position) + + self._key = self.__class__, self.group, self.case_flags + + def remove_captures(self): + raise error("group reference not allowed", pattern, self.position) + + def _compile(self, reverse, fuzzy): + flags = 0 + if fuzzy: + flags |= FUZZY_OP + return [(self._opcode[self.case_flags, reverse], flags, self.group)] + + def dump(self, indent, reverse): + print("{}REF_GROUP {}{}".format(INDENT * indent, self.group, + CASE_TEXT[self.case_flags])) + + def max_width(self): + return UNLIMITED + + def __del__(self): + self.info = None + +class SearchAnchor(ZeroWidthBase): + _opcode = OP.SEARCH_ANCHOR + _op_name = "SEARCH_ANCHOR" + +class Sequence(RegexBase): + def __init__(self, items=None): + RegexBase.__init__(self) + if items is None: + items = [] + + self.items = items + + def fix_groups(self, pattern, reverse, fuzzy): + for s in self.items: + s.fix_groups(pattern, reverse, fuzzy) + + def optimise(self, info, reverse): + # Flatten the sequences. + items = [] + for s in self.items: + s = s.optimise(info, reverse) + if isinstance(s, Sequence): + items.extend(s.items) + else: + items.append(s) + + return make_sequence(items) + + def pack_characters(self, info): + "Packs sequences of characters into strings." + items = [] + characters = [] + case_flags = NOCASE + for s in self.items: + if type(s) is Character and s.positive and not s.zerowidth: + if s.case_flags != case_flags: + # Different case sensitivity, so flush, unless neither the + # previous nor the new character are cased. + if s.case_flags or is_cased_i(info, s.value): + Sequence._flush_characters(info, characters, + case_flags, items) + + case_flags = s.case_flags + + characters.append(s.value) + elif type(s) is String or type(s) is Literal: + if s.case_flags != case_flags: + # Different case sensitivity, so flush, unless the neither + # the previous nor the new string are cased. + if s.case_flags or any(is_cased_i(info, c) for c in + characters): + Sequence._flush_characters(info, characters, + case_flags, items) + + case_flags = s.case_flags + + characters.extend(s.characters) + else: + Sequence._flush_characters(info, characters, case_flags, items) + + items.append(s.pack_characters(info)) + + Sequence._flush_characters(info, characters, case_flags, items) + + return make_sequence(items) + + def remove_captures(self): + self.items = [s.remove_captures() for s in self.items] + return self + + def is_atomic(self): + return all(s.is_atomic() for s in self.items) + + def can_be_affix(self): + return False + + def contains_group(self): + return any(s.contains_group() for s in self.items) + + def get_firstset(self, reverse): + fs = set() + items = self.items + if reverse: + items.reverse() + for s in items: + fs |= s.get_firstset(reverse) + if None not in fs: + return fs + fs.discard(None) + + return fs | set([None]) + + def has_simple_start(self): + return bool(self.items) and self.items[0].has_simple_start() + + def _compile(self, reverse, fuzzy): + seq = self.items + if reverse: + seq = seq[::-1] + + code = [] + for s in seq: + code.extend(s.compile(reverse, fuzzy)) + + return code + + def dump(self, indent, reverse): + for s in self.items: + s.dump(indent, reverse) + + @staticmethod + def _flush_characters(info, characters, case_flags, items): + if not characters: + return + + # Disregard case_flags if all of the characters are case-less. + if case_flags & IGNORECASE: + if not any(is_cased_i(info, c) for c in characters): + case_flags = NOCASE + + if (case_flags & FULLIGNORECASE) == FULLIGNORECASE: + literals = Sequence._fix_full_casefold(characters) + + for item in literals: + chars = item.characters + + if len(chars) == 1: + items.append(Character(chars[0], case_flags=item.case_flags)) + else: + items.append(String(chars, case_flags=item.case_flags)) + else: + if len(characters) == 1: + items.append(Character(characters[0], case_flags=case_flags)) + else: + items.append(String(characters, case_flags=case_flags)) + + characters[:] = [] + + @staticmethod + def _fix_full_casefold(characters): + # Split a literal needing full case-folding into chunks that need it + # and chunks that can use simple case-folding, which is faster. + expanded = [_regex.fold_case(FULL_CASE_FOLDING, c) for c in + _regex.get_expand_on_folding()] + string = _regex.fold_case(FULL_CASE_FOLDING, ''.join(chr(c) + for c in characters)).lower() + chunks = [] + + for e in expanded: + found = string.find(e) + + while found >= 0: + chunks.append((found, found + len(e))) + found = string.find(e, found + 1) + + pos = 0 + literals = [] + + for start, end in Sequence._merge_chunks(chunks): + if pos < start: + literals.append(Literal(characters[pos : start], + case_flags=IGNORECASE)) + + literals.append(Literal(characters[start : end], + case_flags=FULLIGNORECASE)) + pos = end + + if pos < len(characters): + literals.append(Literal(characters[pos : ], case_flags=IGNORECASE)) + + return literals + + @staticmethod + def _merge_chunks(chunks): + if len(chunks) < 2: + return chunks + + chunks.sort() + + start, end = chunks[0] + new_chunks = [] + + for s, e in chunks[1 : ]: + if s <= end: + end = max(end, e) + else: + new_chunks.append((start, end)) + start, end = s, e + + new_chunks.append((start, end)) + + return new_chunks + + def is_empty(self): + return all(i.is_empty() for i in self.items) + + def __eq__(self, other): + return type(self) is type(other) and self.items == other.items + + def max_width(self): + return sum(s.max_width() for s in self.items) + + def get_required_string(self, reverse): + seq = self.items + if reverse: + seq = seq[::-1] + + offset = 0 + + for s in seq: + ofs, req = s.get_required_string(reverse) + offset += ofs + if req: + return offset, req + + return offset, None + +class SetBase(RegexBase): + def __init__(self, info, items, positive=True, case_flags=NOCASE, + zerowidth=False): + RegexBase.__init__(self) + self.info = info + self.items = tuple(items) + self.positive = bool(positive) + self.case_flags = CASE_FLAGS_COMBINATIONS[case_flags] + self.zerowidth = bool(zerowidth) + + self.char_width = 1 + + self._key = (self.__class__, self.items, self.positive, + self.case_flags, self.zerowidth) + + def rebuild(self, positive, case_flags, zerowidth): + return type(self)(self.info, self.items, positive, case_flags, + zerowidth).optimise(self.info, False) + + def get_firstset(self, reverse): + return set([self]) + + def has_simple_start(self): + return True + + def _compile(self, reverse, fuzzy): + flags = 0 + if self.positive: + flags |= POSITIVE_OP + if self.zerowidth: + flags |= ZEROWIDTH_OP + if fuzzy: + flags |= FUZZY_OP + code = [(self._opcode[self.case_flags, reverse], flags)] + for m in self.items: + code.extend(m.compile()) + + code.append((OP.END, )) + + return code + + def dump(self, indent, reverse): + print("{}{} {}{}".format(INDENT * indent, self._op_name, + POS_TEXT[self.positive], CASE_TEXT[self.case_flags])) + for i in self.items: + i.dump(indent + 1, reverse) + + def _handle_case_folding(self, info, in_set): + # Is the set case-sensitive? + if not self.positive or not (self.case_flags & IGNORECASE) or in_set: + return self + + # Is full case-folding possible? + if (not (self.info.flags & UNICODE) or (self.case_flags & + FULLIGNORECASE) != FULLIGNORECASE): + return self + + # Get the characters which expand to multiple codepoints on folding. + expanding_chars = _regex.get_expand_on_folding() + + # Get the folded characters in the set. + items = [] + seen = set() + for ch in expanding_chars: + if self.matches(ord(ch)): + folded = _regex.fold_case(FULL_CASE_FOLDING, ch) + if folded not in seen: + items.append(String([ord(c) for c in folded], + case_flags=self.case_flags)) + seen.add(folded) + + if not items: + # We can fall back to simple case-folding. + return self + + return Branch([self] + items) + + def max_width(self): + # Is the set case-sensitive? + if not self.positive or not (self.case_flags & IGNORECASE): + return 1 + + # Is full case-folding possible? + if (not (self.info.flags & UNICODE) or (self.case_flags & + FULLIGNORECASE) != FULLIGNORECASE): + return 1 + + # Get the characters which expand to multiple codepoints on folding. + expanding_chars = _regex.get_expand_on_folding() + + # Get the folded characters in the set. + seen = set() + for ch in expanding_chars: + if self.matches(ord(ch)): + folded = _regex.fold_case(FULL_CASE_FOLDING, ch) + seen.add(folded) + + if not seen: + return 1 + + return max(len(folded) for folded in seen) + + def __del__(self): + self.info = None + +class SetDiff(SetBase): + _opcode = {(NOCASE, False): OP.SET_DIFF, (IGNORECASE, False): + OP.SET_DIFF_IGN, (FULLCASE, False): OP.SET_DIFF, (FULLIGNORECASE, False): + OP.SET_DIFF_IGN, (NOCASE, True): OP.SET_DIFF_REV, (IGNORECASE, True): + OP.SET_DIFF_IGN_REV, (FULLCASE, True): OP.SET_DIFF_REV, (FULLIGNORECASE, + True): OP.SET_DIFF_IGN_REV} + _op_name = "SET_DIFF" + + def optimise(self, info, reverse, in_set=False): + items = self.items + if len(items) > 2: + items = [items[0], SetUnion(info, items[1 : ])] + + if len(items) == 1: + return items[0].with_flags(case_flags=self.case_flags, + zerowidth=self.zerowidth).optimise(info, reverse, in_set) + + self.items = tuple(m.optimise(info, reverse, in_set=True) for m in + items) + + return self._handle_case_folding(info, in_set) + + def matches(self, ch): + m = self.items[0].matches(ch) and not self.items[1].matches(ch) + return m == self.positive + +class SetInter(SetBase): + _opcode = {(NOCASE, False): OP.SET_INTER, (IGNORECASE, False): + OP.SET_INTER_IGN, (FULLCASE, False): OP.SET_INTER, (FULLIGNORECASE, + False): OP.SET_INTER_IGN, (NOCASE, True): OP.SET_INTER_REV, (IGNORECASE, + True): OP.SET_INTER_IGN_REV, (FULLCASE, True): OP.SET_INTER_REV, + (FULLIGNORECASE, True): OP.SET_INTER_IGN_REV} + _op_name = "SET_INTER" + + def optimise(self, info, reverse, in_set=False): + items = [] + for m in self.items: + m = m.optimise(info, reverse, in_set=True) + if isinstance(m, SetInter) and m.positive: + # Intersection in intersection. + items.extend(m.items) + else: + items.append(m) + + if len(items) == 1: + return items[0].with_flags(case_flags=self.case_flags, + zerowidth=self.zerowidth).optimise(info, reverse, in_set) + + self.items = tuple(items) + + return self._handle_case_folding(info, in_set) + + def matches(self, ch): + m = all(i.matches(ch) for i in self.items) + return m == self.positive + +class SetSymDiff(SetBase): + _opcode = {(NOCASE, False): OP.SET_SYM_DIFF, (IGNORECASE, False): + OP.SET_SYM_DIFF_IGN, (FULLCASE, False): OP.SET_SYM_DIFF, (FULLIGNORECASE, + False): OP.SET_SYM_DIFF_IGN, (NOCASE, True): OP.SET_SYM_DIFF_REV, + (IGNORECASE, True): OP.SET_SYM_DIFF_IGN_REV, (FULLCASE, True): + OP.SET_SYM_DIFF_REV, (FULLIGNORECASE, True): OP.SET_SYM_DIFF_IGN_REV} + _op_name = "SET_SYM_DIFF" + + def optimise(self, info, reverse, in_set=False): + items = [] + for m in self.items: + m = m.optimise(info, reverse, in_set=True) + if isinstance(m, SetSymDiff) and m.positive: + # Symmetric difference in symmetric difference. + items.extend(m.items) + else: + items.append(m) + + if len(items) == 1: + return items[0].with_flags(case_flags=self.case_flags, + zerowidth=self.zerowidth).optimise(info, reverse, in_set) + + self.items = tuple(items) + + return self._handle_case_folding(info, in_set) + + def matches(self, ch): + m = False + for i in self.items: + m = m != i.matches(ch) + + return m == self.positive + +class SetUnion(SetBase): + _opcode = {(NOCASE, False): OP.SET_UNION, (IGNORECASE, False): + OP.SET_UNION_IGN, (FULLCASE, False): OP.SET_UNION, (FULLIGNORECASE, + False): OP.SET_UNION_IGN, (NOCASE, True): OP.SET_UNION_REV, (IGNORECASE, + True): OP.SET_UNION_IGN_REV, (FULLCASE, True): OP.SET_UNION_REV, + (FULLIGNORECASE, True): OP.SET_UNION_IGN_REV} + _op_name = "SET_UNION" + + def optimise(self, info, reverse, in_set=False): + items = [] + for m in self.items: + m = m.optimise(info, reverse, in_set=True) + if isinstance(m, SetUnion) and m.positive: + # Union in union. + items.extend(m.items) + else: + items.append(m) + + if len(items) == 1: + i = items[0] + return i.with_flags(positive=i.positive == self.positive, + case_flags=self.case_flags, + zerowidth=self.zerowidth).optimise(info, reverse, in_set) + + self.items = tuple(items) + + return self._handle_case_folding(info, in_set) + + def _compile(self, reverse, fuzzy): + flags = 0 + if self.positive: + flags |= POSITIVE_OP + if self.zerowidth: + flags |= ZEROWIDTH_OP + if fuzzy: + flags |= FUZZY_OP + + characters, others = defaultdict(list), [] + for m in self.items: + if isinstance(m, Character): + characters[m.positive].append(m.value) + else: + others.append(m) + + code = [(self._opcode[self.case_flags, reverse], flags)] + + for positive, values in characters.items(): + flags = 0 + if positive: + flags |= POSITIVE_OP + if len(values) == 1: + code.append((OP.CHARACTER, flags, values[0])) + else: + code.append((OP.STRING, flags, len(values)) + tuple(values)) + + for m in others: + code.extend(m.compile()) + + code.append((OP.END, )) + + return code + + def matches(self, ch): + m = any(i.matches(ch) for i in self.items) + return m == self.positive + +class Skip(ZeroWidthBase): + _op_name = "SKIP" + _opcode = OP.SKIP + +class StartOfLine(ZeroWidthBase): + _opcode = OP.START_OF_LINE + _op_name = "START_OF_LINE" + +class StartOfLineU(StartOfLine): + _opcode = OP.START_OF_LINE_U + _op_name = "START_OF_LINE_U" + +class StartOfString(ZeroWidthBase): + _opcode = OP.START_OF_STRING + _op_name = "START_OF_STRING" + +class StartOfWord(ZeroWidthBase): + _opcode = OP.START_OF_WORD + _op_name = "START_OF_WORD" + +class String(RegexBase): + _opcode = {(NOCASE, False): OP.STRING, (IGNORECASE, False): OP.STRING_IGN, + (FULLCASE, False): OP.STRING, (FULLIGNORECASE, False): OP.STRING_FLD, + (NOCASE, True): OP.STRING_REV, (IGNORECASE, True): OP.STRING_IGN_REV, + (FULLCASE, True): OP.STRING_REV, (FULLIGNORECASE, True): + OP.STRING_FLD_REV} + + def __init__(self, characters, case_flags=NOCASE): + self.characters = tuple(characters) + self.case_flags = CASE_FLAGS_COMBINATIONS[case_flags] + + if (self.case_flags & FULLIGNORECASE) == FULLIGNORECASE: + folded_characters = [] + for char in self.characters: + folded = _regex.fold_case(FULL_CASE_FOLDING, chr(char)) + folded_characters.extend(ord(c) for c in folded) + else: + folded_characters = self.characters + + self.folded_characters = tuple(folded_characters) + self.required = False + + self._key = self.__class__, self.characters, self.case_flags + + def get_firstset(self, reverse): + if reverse: + pos = -1 + else: + pos = 0 + return set([Character(self.characters[pos], + case_flags=self.case_flags)]) + + def has_simple_start(self): + return True + + def _compile(self, reverse, fuzzy): + flags = 0 + if fuzzy: + flags |= FUZZY_OP + if self.required: + flags |= REQUIRED_OP + return [(self._opcode[self.case_flags, reverse], flags, + len(self.folded_characters)) + self.folded_characters] + + def dump(self, indent, reverse): + display = ascii("".join(chr(c) for c in self.characters)).lstrip("bu") + print("{}STRING {}{}".format(INDENT * indent, display, + CASE_TEXT[self.case_flags])) + + def max_width(self): + return len(self.folded_characters) + + def get_required_string(self, reverse): + return 0, self + +class Literal(String): + def dump(self, indent, reverse): + literal = ''.join(chr(c) for c in self.characters) + display = ascii(literal).lstrip("bu") + print("{}LITERAL MATCH {}{}".format(INDENT * indent, display, + CASE_TEXT[self.case_flags])) + +class StringSet(Branch): + def __init__(self, info, name, case_flags=NOCASE): + self.info = info + self.name = name + self.case_flags = CASE_FLAGS_COMBINATIONS[case_flags] + + self._key = self.__class__, self.name, self.case_flags + + self.set_key = (name, self.case_flags) + if self.set_key not in info.named_lists_used: + info.named_lists_used[self.set_key] = len(info.named_lists_used) + + index = self.info.named_lists_used[self.set_key] + items = self.info.kwargs[self.name] + + case_flags = self.case_flags + + encoding = self.info.flags & _ALL_ENCODINGS + fold_flags = encoding | case_flags + + choices = [] + + for string in items: + if isinstance(string, str): + string = [ord(c) for c in string] + + choices.append([Character(c, case_flags=case_flags) for c in + string]) + + # Sort from longest to shortest. + choices.sort(key=len, reverse=True) + + self.branches = [Sequence(choice) for choice in choices] + + def dump(self, indent, reverse): + print("{}STRING_SET {}{}".format(INDENT * indent, self.name, + CASE_TEXT[self.case_flags])) + + def __del__(self): + self.info = None + +class Source: + "Scanner for the regular expression source string." + def __init__(self, string): + if isinstance(string, str): + self.string = string + self.char_type = chr + else: + self.string = string.decode("latin-1") + self.char_type = lambda c: bytes([c]) + + self.pos = 0 + self.ignore_space = False + self.sep = string[ : 0] + + def get(self, override_ignore=False): + string = self.string + pos = self.pos + + try: + if self.ignore_space and not override_ignore: + while True: + if string[pos].isspace(): + # Skip over the whitespace. + pos += 1 + elif string[pos] == "#": + # Skip over the comment to the end of the line. + pos = string.index("\n", pos) + else: + break + + ch = string[pos] + self.pos = pos + 1 + return ch + except IndexError: + # We've reached the end of the string. + self.pos = pos + return string[ : 0] + except ValueError: + # The comment extended to the end of the string. + self.pos = len(string) + return string[ : 0] + + def get_many(self, count=1): + string = self.string + pos = self.pos + + try: + if self.ignore_space: + substring = [] + + while len(substring) < count: + while True: + if string[pos].isspace(): + # Skip over the whitespace. + pos += 1 + elif string[pos] == "#": + # Skip over the comment to the end of the line. + pos = string.index("\n", pos) + else: + break + + substring.append(string[pos]) + pos += 1 + + substring = "".join(substring) + else: + substring = string[pos : pos + count] + pos += len(substring) + + self.pos = pos + return substring + except IndexError: + # We've reached the end of the string. + self.pos = len(string) + return "".join(substring) + except ValueError: + # The comment extended to the end of the string. + self.pos = len(string) + return "".join(substring) + + def get_while(self, test_set, include=True, keep_spaces=False): + string = self.string + pos = self.pos + + if self.ignore_space and not keep_spaces: + try: + substring = [] + + while True: + if string[pos].isspace(): + # Skip over the whitespace. + pos += 1 + elif string[pos] == "#": + # Skip over the comment to the end of the line. + pos = string.index("\n", pos) + elif (string[pos] in test_set) == include: + substring.append(string[pos]) + pos += 1 + else: + break + + self.pos = pos + except IndexError: + # We've reached the end of the string. + self.pos = len(string) + except ValueError: + # The comment extended to the end of the string. + self.pos = len(string) + + return "".join(substring) + else: + try: + while (string[pos] in test_set) == include: + pos += 1 + + substring = string[self.pos : pos] + + self.pos = pos + + return substring + except IndexError: + # We've reached the end of the string. + substring = string[self.pos : pos] + + self.pos = pos + + return substring + + def skip_while(self, test_set, include=True): + string = self.string + pos = self.pos + + try: + if self.ignore_space: + while True: + if string[pos].isspace(): + # Skip over the whitespace. + pos += 1 + elif string[pos] == "#": + # Skip over the comment to the end of the line. + pos = string.index("\n", pos) + elif (string[pos] in test_set) == include: + pos += 1 + else: + break + else: + while (string[pos] in test_set) == include: + pos += 1 + + self.pos = pos + except IndexError: + # We've reached the end of the string. + self.pos = len(string) + except ValueError: + # The comment extended to the end of the string. + self.pos = len(string) + + def match(self, substring): + string = self.string + pos = self.pos + + if self.ignore_space: + try: + for c in substring: + while True: + if string[pos].isspace(): + # Skip over the whitespace. + pos += 1 + elif string[pos] == "#": + # Skip over the comment to the end of the line. + pos = string.index("\n", pos) + else: + break + + if string[pos] != c: + return False + + pos += 1 + + self.pos = pos + + return True + except IndexError: + # We've reached the end of the string. + return False + except ValueError: + # The comment extended to the end of the string. + return False + else: + if not string.startswith(substring, pos): + return False + + self.pos = pos + len(substring) + + return True + + def expect(self, substring): + if not self.match(substring): + raise error("missing {}".format(substring), self.string, self.pos) + + def at_end(self): + string = self.string + pos = self.pos + + try: + if self.ignore_space: + while True: + if string[pos].isspace(): + pos += 1 + elif string[pos] == "#": + pos = string.index("\n", pos) + else: + break + + return pos >= len(string) + except IndexError: + # We've reached the end of the string. + return True + except ValueError: + # The comment extended to the end of the string. + return True + +class Info: + "Info about the regular expression." + + def __init__(self, flags=0, char_type=None, kwargs={}): + flags |= DEFAULT_FLAGS[(flags & _ALL_VERSIONS) or DEFAULT_VERSION] + self.flags = flags + self.global_flags = flags + self.inline_locale = False + + self.kwargs = kwargs + + self.group_count = 0 + self.group_index = {} + self.group_name = {} + self.char_type = char_type + self.named_lists_used = {} + self.open_groups = [] + self.open_group_count = {} + self.defined_groups = {} + self.group_calls = [] + self.private_groups = {} + + def open_group(self, name=None): + group = self.group_index.get(name) + if group is None: + while True: + self.group_count += 1 + if name is None or self.group_count not in self.group_name: + break + + group = self.group_count + if name: + self.group_index[name] = group + self.group_name[group] = name + + if group in self.open_groups: + # We have a nested named group. We'll assign it a private group + # number, initially negative until we can assign a proper + # (positive) number. + group_alias = -(len(self.private_groups) + 1) + self.private_groups[group_alias] = group + group = group_alias + + self.open_groups.append(group) + self.open_group_count[group] = self.open_group_count.get(group, 0) + 1 + + return group + + def close_group(self): + self.open_groups.pop() + + def is_open_group(self, name): + # In version 1, a group reference can refer to an open group. We'll + # just pretend the group isn't open. + version = (self.flags & _ALL_VERSIONS) or DEFAULT_VERSION + if version == VERSION1: + return False + + if name.isdigit(): + group = int(name) + else: + group = self.group_index.get(name) + + return group in self.open_groups + +def _check_group_features(info, parsed): + """Checks whether the reverse and fuzzy features of the group calls match + the groups which they call. + """ + call_refs = {} + additional_groups = [] + for call, reverse, fuzzy in info.group_calls: + # Look up the reference of this group call. + key = (call.group, reverse, fuzzy) + ref = call_refs.get(key) + if ref is None: + # This group doesn't have a reference yet, so look up its features. + if call.group == 0: + # Calling the pattern as a whole. + rev = bool(info.flags & REVERSE) + fuz = isinstance(parsed, Fuzzy) + if (rev, fuz) != (reverse, fuzzy): + # The pattern as a whole doesn't have the features we want, + # so we'll need to make a copy of it with the desired + # features. + additional_groups.append((CallRef(len(call_refs), parsed), + reverse, fuzzy)) + else: + # Calling a capture group. + def_info = info.defined_groups[call.group] + group = def_info[0] + if def_info[1 : ] != (reverse, fuzzy): + # The group doesn't have the features we want, so we'll + # need to make a copy of it with the desired features. + additional_groups.append((group, reverse, fuzzy)) + + ref = len(call_refs) + call_refs[key] = ref + + call.call_ref = ref + + info.call_refs = call_refs + info.additional_groups = additional_groups + +def _get_required_string(parsed, flags): + "Gets the required string and related info of a parsed pattern." + + req_offset, required = parsed.get_required_string(bool(flags & REVERSE)) + if required: + required.required = True + if req_offset >= UNLIMITED: + req_offset = -1 + + req_flags = required.case_flags + if not (flags & UNICODE): + req_flags &= ~UNICODE + + req_chars = required.folded_characters + else: + req_offset = 0 + req_chars = () + req_flags = 0 + + return req_offset, req_chars, req_flags + +class Scanner: + def __init__(self, lexicon, flags=0): + self.lexicon = lexicon + + # Combine phrases into a compound pattern. + patterns = [] + for phrase, action in lexicon: + # Parse the regular expression. + source = Source(phrase) + info = Info(flags, source.char_type) + source.ignore_space = bool(info.flags & VERBOSE) + parsed = _parse_pattern(source, info) + if not source.at_end(): + raise error("unbalanced parenthesis", source.string, + source.pos) + + # We want to forbid capture groups within each phrase. + patterns.append(parsed.remove_captures()) + + # Combine all the subpatterns into one pattern. + info = Info(flags) + patterns = [Group(info, g + 1, p) for g, p in enumerate(patterns)] + parsed = Branch(patterns) + + # Optimise the compound pattern. + reverse = bool(info.flags & REVERSE) + parsed = parsed.optimise(info, reverse) + parsed = parsed.pack_characters(info) + + # Get the required string. + req_offset, req_chars, req_flags = _get_required_string(parsed, + info.flags) + + # Check the features of the groups. + _check_group_features(info, parsed) + + # Complain if there are any group calls. They are not supported by the + # Scanner class. + if info.call_refs: + raise error("recursive regex not supported by Scanner", + source.string, source.pos) + + reverse = bool(info.flags & REVERSE) + + # Compile the compound pattern. The result is a list of tuples. + code = parsed.compile(reverse) + [(OP.SUCCESS, )] + + # Flatten the code into a list of ints. + code = _flatten_code(code) + + if not parsed.has_simple_start(): + # Get the first set, if possible. + try: + fs_code = _compile_firstset(info, parsed.get_firstset(reverse)) + fs_code = _flatten_code(fs_code) + code = fs_code + code + except _FirstSetError: + pass + + # Check the global flags for conflicts. + version = (info.flags & _ALL_VERSIONS) or DEFAULT_VERSION + if version not in (0, VERSION0, VERSION1): + raise ValueError("VERSION0 and VERSION1 flags are mutually incompatible") + + # Create the PatternObject. + # + # Local flags like IGNORECASE affect the code generation, but aren't + # needed by the PatternObject itself. Conversely, global flags like + # LOCALE _don't_ affect the code generation but _are_ needed by the + # PatternObject. + self.scanner = _regex.compile(None, (flags & GLOBAL_FLAGS) | version, + code, {}, {}, {}, [], req_offset, req_chars, req_flags, + len(patterns)) + + def scan(self, string): + result = [] + append = result.append + match = self.scanner.scanner(string).match + i = 0 + while True: + m = match() + if not m: + break + j = m.end() + if i == j: + break + action = self.lexicon[m.lastindex - 1][1] + if hasattr(action, '__call__'): + self.match = m + action = action(self, m.group()) + if action is not None: + append(action) + i = j + + return result, string[i : ] + +# Get the known properties dict. +PROPERTIES = _regex.get_properties() + +# Build the inverse of the properties dict. +PROPERTY_NAMES = {} +for prop_name, (prop_id, values) in PROPERTIES.items(): + name, prop_values = PROPERTY_NAMES.get(prop_id, ("", {})) + name = max(name, prop_name, key=len) + PROPERTY_NAMES[prop_id] = name, prop_values + + for val_name, val_id in values.items(): + prop_values[val_id] = max(prop_values.get(val_id, ""), val_name, + key=len) + +# Character escape sequences. +CHARACTER_ESCAPES = { + "a": "\a", + "b": "\b", + "f": "\f", + "n": "\n", + "r": "\r", + "t": "\t", + "v": "\v", +} + +# Predefined character set escape sequences. +CHARSET_ESCAPES = { + "d": lookup_property(None, "Digit", True), + "D": lookup_property(None, "Digit", False), + "h": lookup_property(None, "Blank", True), + "s": lookup_property(None, "Space", True), + "S": lookup_property(None, "Space", False), + "w": lookup_property(None, "Word", True), + "W": lookup_property(None, "Word", False), +} + +# Positional escape sequences. +POSITION_ESCAPES = { + "A": StartOfString(), + "b": Boundary(), + "B": Boundary(False), + "K": Keep(), + "m": StartOfWord(), + "M": EndOfWord(), + "Z": EndOfString(), +} + +# Positional escape sequences when WORD flag set. +WORD_POSITION_ESCAPES = dict(POSITION_ESCAPES) +WORD_POSITION_ESCAPES.update({ + "b": DefaultBoundary(), + "B": DefaultBoundary(False), + "m": DefaultStartOfWord(), + "M": DefaultEndOfWord(), +}) + +# Regex control verbs. +VERBS = { + "FAIL": Failure(), + "F": Failure(), + "PRUNE": Prune(), + "SKIP": Skip(), +} diff --git a/vlmpy310/lib/python3.10/site-packages/regex/regex.py b/vlmpy310/lib/python3.10/site-packages/regex/regex.py new file mode 100644 index 0000000000000000000000000000000000000000..0fdb4da983c495d15db51e593ec880c8513ed55f --- /dev/null +++ b/vlmpy310/lib/python3.10/site-packages/regex/regex.py @@ -0,0 +1,746 @@ +# +# Secret Labs' Regular Expression Engine +# +# Copyright (c) 1998-2001 by Secret Labs AB. All rights reserved. +# +# This version of the SRE library can be redistributed under CNRI's +# Python 1.6 license. For any other use, please contact Secret Labs +# AB (info@pythonware.com). +# +# Portions of this engine have been developed in cooperation with +# CNRI. Hewlett-Packard provided funding for 1.6 integration and +# other compatibility work. +# +# 2010-01-16 mrab Python front-end re-written and extended + +r"""Support for regular expressions (RE). + +This module provides regular expression matching operations similar to those +found in Perl. It supports both 8-bit and Unicode strings; both the pattern and +the strings being processed can contain null bytes and characters outside the +US ASCII range. + +Regular expressions can contain both special and ordinary characters. Most +ordinary characters, like "A", "a", or "0", are the simplest regular +expressions; they simply match themselves. You can concatenate ordinary +characters, so last matches the string 'last'. + +There are a few differences between the old (legacy) behaviour and the new +(enhanced) behaviour, which are indicated by VERSION0 or VERSION1. + +The special characters are: + "." Matches any character except a newline. + "^" Matches the start of the string. + "$" Matches the end of the string or just before the + newline at the end of the string. + "*" Matches 0 or more (greedy) repetitions of the preceding + RE. Greedy means that it will match as many repetitions + as possible. + "+" Matches 1 or more (greedy) repetitions of the preceding + RE. + "?" Matches 0 or 1 (greedy) of the preceding RE. + *?,+?,?? Non-greedy versions of the previous three special + characters. + *+,++,?+ Possessive versions of the previous three special + characters. + {m,n} Matches from m to n repetitions of the preceding RE. + {m,n}? Non-greedy version of the above. + {m,n}+ Possessive version of the above. + {...} Fuzzy matching constraints. + "\\" Either escapes special characters or signals a special + sequence. + [...] Indicates a set of characters. A "^" as the first + character indicates a complementing set. + "|" A|B, creates an RE that will match either A or B. + (...) Matches the RE inside the parentheses. The contents are + captured and can be retrieved or matched later in the + string. + (?flags-flags) VERSION1: Sets/clears the flags for the remainder of + the group or pattern; VERSION0: Sets the flags for the + entire pattern. + (?:...) Non-capturing version of regular parentheses. + (?>...) Atomic non-capturing version of regular parentheses. + (?flags-flags:...) Non-capturing version of regular parentheses with local + flags. + (?P...) The substring matched by the group is accessible by + name. + (?...) The substring matched by the group is accessible by + name. + (?P=name) Matches the text matched earlier by the group named + name. + (?#...) A comment; ignored. + (?=...) Matches if ... matches next, but doesn't consume the + string. + (?!...) Matches if ... doesn't match next. + (?<=...) Matches if preceded by .... + (? Matches the text matched by the group named name. + \G Matches the empty string, but only at the position where + the search started. + \h Matches horizontal whitespace. + \K Keeps only what follows for the entire match. + \L Named list. The list is provided as a keyword argument. + \m Matches the empty string, but only at the start of a word. + \M Matches the empty string, but only at the end of a word. + \n Matches the newline character. + \N{name} Matches the named character. + \p{name=value} Matches the character if its property has the specified + value. + \P{name=value} Matches the character if its property hasn't the specified + value. + \r Matches the carriage-return character. + \s Matches any whitespace character; equivalent to + [ \t\n\r\f\v]. + \S Matches any non-whitespace character; equivalent to [^\s]. + \t Matches the tab character. + \uXXXX Matches the Unicode codepoint with 4-digit hex code XXXX. + \UXXXXXXXX Matches the Unicode codepoint with 8-digit hex code + XXXXXXXX. + \v Matches the vertical tab character. + \w Matches any alphanumeric character; equivalent to + [a-zA-Z0-9_] when matching a bytestring or a Unicode string + with the ASCII flag, or the whole range of Unicode + alphanumeric characters (letters plus digits plus + underscore) when matching a Unicode string. With LOCALE, it + will match the set [0-9_] plus characters defined as + letters for the current locale. + \W Matches the complement of \w; equivalent to [^\w]. + \xXX Matches the character with 2-digit hex code XX. + \X Matches a grapheme. + \Z Matches only at the end of the string. + \\ Matches a literal backslash. + +This module exports the following functions: + match Match a regular expression pattern at the beginning of a string. + fullmatch Match a regular expression pattern against all of a string. + search Search a string for the presence of a pattern. + sub Substitute occurrences of a pattern found in a string using a + template string. + subf Substitute occurrences of a pattern found in a string using a + format string. + subn Same as sub, but also return the number of substitutions made. + subfn Same as subf, but also return the number of substitutions made. + split Split a string by the occurrences of a pattern. VERSION1: will + split at zero-width match; VERSION0: won't split at zero-width + match. + splititer Return an iterator yielding the parts of a split string. + findall Find all occurrences of a pattern in a string. + finditer Return an iterator yielding a match object for each match. + compile Compile a pattern into a Pattern object. + purge Clear the regular expression cache. + escape Backslash all non-alphanumerics or special characters in a + string. + +Most of the functions support a concurrent parameter: if True, the GIL will be +released during matching, allowing other Python threads to run concurrently. If +the string changes during matching, the behaviour is undefined. This parameter +is not needed when working on the builtin (immutable) string classes. + +Some of the functions in this module take flags as optional parameters. Most of +these flags can also be set within an RE: + A a ASCII Make \w, \W, \b, \B, \d, and \D match the + corresponding ASCII character categories. Default + when matching a bytestring. + B b BESTMATCH Find the best fuzzy match (default is first). + D DEBUG Print the parsed pattern. + E e ENHANCEMATCH Attempt to improve the fit after finding the first + fuzzy match. + F f FULLCASE Use full case-folding when performing + case-insensitive matching in Unicode. + I i IGNORECASE Perform case-insensitive matching. + L L LOCALE Make \w, \W, \b, \B, \d, and \D dependent on the + current locale. (One byte per character only.) + M m MULTILINE "^" matches the beginning of lines (after a newline) + as well as the string. "$" matches the end of lines + (before a newline) as well as the end of the string. + P p POSIX Perform POSIX-standard matching (leftmost longest). + R r REVERSE Searches backwards. + S s DOTALL "." matches any character at all, including the + newline. + U u UNICODE Make \w, \W, \b, \B, \d, and \D dependent on the + Unicode locale. Default when matching a Unicode + string. + V0 V0 VERSION0 Turn on the old legacy behaviour. + V1 V1 VERSION1 Turn on the new enhanced behaviour. This flag + includes the FULLCASE flag. + W w WORD Make \b and \B work with default Unicode word breaks + and make ".", "^" and "$" work with Unicode line + breaks. + X x VERBOSE Ignore whitespace and comments for nicer looking REs. + +This module also defines an exception 'error'. + +""" + +# Public symbols. +__all__ = ["cache_all", "compile", "DEFAULT_VERSION", "escape", "findall", + "finditer", "fullmatch", "match", "purge", "search", "split", "splititer", + "sub", "subf", "subfn", "subn", "template", "Scanner", "A", "ASCII", "B", + "BESTMATCH", "D", "DEBUG", "E", "ENHANCEMATCH", "S", "DOTALL", "F", + "FULLCASE", "I", "IGNORECASE", "L", "LOCALE", "M", "MULTILINE", "P", "POSIX", + "R", "REVERSE", "T", "TEMPLATE", "U", "UNICODE", "V0", "VERSION0", "V1", + "VERSION1", "X", "VERBOSE", "W", "WORD", "error", "Regex", "__version__", + "__doc__", "RegexFlag"] + +__version__ = "2.5.148" + +# -------------------------------------------------------------------- +# Public interface. + +def match(pattern, string, flags=0, pos=None, endpos=None, partial=False, + concurrent=None, timeout=None, ignore_unused=False, **kwargs): + """Try to apply the pattern at the start of the string, returning a match + object, or None if no match was found.""" + pat = _compile(pattern, flags, ignore_unused, kwargs, True) + return pat.match(string, pos, endpos, concurrent, partial, timeout) + +def fullmatch(pattern, string, flags=0, pos=None, endpos=None, partial=False, + concurrent=None, timeout=None, ignore_unused=False, **kwargs): + """Try to apply the pattern against all of the string, returning a match + object, or None if no match was found.""" + pat = _compile(pattern, flags, ignore_unused, kwargs, True) + return pat.fullmatch(string, pos, endpos, concurrent, partial, timeout) + +def search(pattern, string, flags=0, pos=None, endpos=None, partial=False, + concurrent=None, timeout=None, ignore_unused=False, **kwargs): + """Search through string looking for a match to the pattern, returning a + match object, or None if no match was found.""" + pat = _compile(pattern, flags, ignore_unused, kwargs, True) + return pat.search(string, pos, endpos, concurrent, partial, timeout) + +def sub(pattern, repl, string, count=0, flags=0, pos=None, endpos=None, + concurrent=None, timeout=None, ignore_unused=False, **kwargs): + """Return the string obtained by replacing the leftmost (or rightmost with a + reverse pattern) non-overlapping occurrences of the pattern in string by the + replacement repl. repl can be either a string or a callable; if a string, + backslash escapes in it are processed; if a callable, it's passed the match + object and must return a replacement string to be used.""" + pat = _compile(pattern, flags, ignore_unused, kwargs, True) + return pat.sub(repl, string, count, pos, endpos, concurrent, timeout) + +def subf(pattern, format, string, count=0, flags=0, pos=None, endpos=None, + concurrent=None, timeout=None, ignore_unused=False, **kwargs): + """Return the string obtained by replacing the leftmost (or rightmost with a + reverse pattern) non-overlapping occurrences of the pattern in string by the + replacement format. format can be either a string or a callable; if a string, + it's treated as a format string; if a callable, it's passed the match object + and must return a replacement string to be used.""" + pat = _compile(pattern, flags, ignore_unused, kwargs, True) + return pat.subf(format, string, count, pos, endpos, concurrent, timeout) + +def subn(pattern, repl, string, count=0, flags=0, pos=None, endpos=None, + concurrent=None, timeout=None, ignore_unused=False, **kwargs): + """Return a 2-tuple containing (new_string, number). new_string is the string + obtained by replacing the leftmost (or rightmost with a reverse pattern) + non-overlapping occurrences of the pattern in the source string by the + replacement repl. number is the number of substitutions that were made. repl + can be either a string or a callable; if a string, backslash escapes in it + are processed; if a callable, it's passed the match object and must return a + replacement string to be used.""" + pat = _compile(pattern, flags, ignore_unused, kwargs, True) + return pat.subn(repl, string, count, pos, endpos, concurrent, timeout) + +def subfn(pattern, format, string, count=0, flags=0, pos=None, endpos=None, + concurrent=None, timeout=None, ignore_unused=False, **kwargs): + """Return a 2-tuple containing (new_string, number). new_string is the string + obtained by replacing the leftmost (or rightmost with a reverse pattern) + non-overlapping occurrences of the pattern in the source string by the + replacement format. number is the number of substitutions that were made. format + can be either a string or a callable; if a string, it's treated as a format + string; if a callable, it's passed the match object and must return a + replacement string to be used.""" + pat = _compile(pattern, flags, ignore_unused, kwargs, True) + return pat.subfn(format, string, count, pos, endpos, concurrent, timeout) + +def split(pattern, string, maxsplit=0, flags=0, concurrent=None, timeout=None, + ignore_unused=False, **kwargs): + """Split the source string by the occurrences of the pattern, returning a + list containing the resulting substrings. If capturing parentheses are used + in pattern, then the text of all groups in the pattern are also returned as + part of the resulting list. If maxsplit is nonzero, at most maxsplit splits + occur, and the remainder of the string is returned as the final element of + the list.""" + pat = _compile(pattern, flags, ignore_unused, kwargs, True) + return pat.split(string, maxsplit, concurrent, timeout) + +def splititer(pattern, string, maxsplit=0, flags=0, concurrent=None, + timeout=None, ignore_unused=False, **kwargs): + "Return an iterator yielding the parts of a split string." + pat = _compile(pattern, flags, ignore_unused, kwargs, True) + return pat.splititer(string, maxsplit, concurrent, timeout) + +def findall(pattern, string, flags=0, pos=None, endpos=None, overlapped=False, + concurrent=None, timeout=None, ignore_unused=False, **kwargs): + """Return a list of all matches in the string. The matches may be overlapped + if overlapped is True. If one or more groups are present in the pattern, + return a list of groups; this will be a list of tuples if the pattern has + more than one group. Empty matches are included in the result.""" + pat = _compile(pattern, flags, ignore_unused, kwargs, True) + return pat.findall(string, pos, endpos, overlapped, concurrent, timeout) + +def finditer(pattern, string, flags=0, pos=None, endpos=None, overlapped=False, + partial=False, concurrent=None, timeout=None, ignore_unused=False, **kwargs): + """Return an iterator over all matches in the string. The matches may be + overlapped if overlapped is True. For each match, the iterator returns a + match object. Empty matches are included in the result.""" + pat = _compile(pattern, flags, ignore_unused, kwargs, True) + return pat.finditer(string, pos, endpos, overlapped, concurrent, partial, + timeout) + +def compile(pattern, flags=0, ignore_unused=False, cache_pattern=None, **kwargs): + "Compile a regular expression pattern, returning a pattern object." + if cache_pattern is None: + cache_pattern = _cache_all + return _compile(pattern, flags, ignore_unused, kwargs, cache_pattern) + +def purge(): + "Clear the regular expression cache" + _cache.clear() + _locale_sensitive.clear() + +# Whether to cache all patterns. +_cache_all = True + +def cache_all(value=True): + """Sets whether to cache all patterns, even those are compiled explicitly. + Passing None has no effect, but returns the current setting.""" + global _cache_all + + if value is None: + return _cache_all + + _cache_all = value + +def template(pattern, flags=0): + "Compile a template pattern, returning a pattern object." + return _compile(pattern, flags | TEMPLATE, False, {}, False) + +def escape(pattern, special_only=True, literal_spaces=False): + """Escape a string for use as a literal in a pattern. If special_only is + True, escape only special characters, else escape all non-alphanumeric + characters. If literal_spaces is True, don't escape spaces.""" + # Convert it to Unicode. + if isinstance(pattern, bytes): + p = pattern.decode("latin-1") + else: + p = pattern + + s = [] + if special_only: + for c in p: + if c == " " and literal_spaces: + s.append(c) + elif c in _METACHARS or c.isspace(): + s.append("\\") + s.append(c) + else: + s.append(c) + else: + for c in p: + if c == " " and literal_spaces: + s.append(c) + elif c in _ALNUM: + s.append(c) + else: + s.append("\\") + s.append(c) + + r = "".join(s) + # Convert it back to bytes if necessary. + if isinstance(pattern, bytes): + r = r.encode("latin-1") + + return r + +# -------------------------------------------------------------------- +# Internals. + +import regex._regex_core as _regex_core +import regex._regex as _regex +from threading import RLock as _RLock +from locale import getpreferredencoding as _getpreferredencoding +from regex._regex_core import * +from regex._regex_core import (_ALL_VERSIONS, _ALL_ENCODINGS, _FirstSetError, + _UnscopedFlagSet, _check_group_features, _compile_firstset, + _compile_replacement, _flatten_code, _fold_case, _get_required_string, + _parse_pattern, _shrink_cache) +from regex._regex_core import (ALNUM as _ALNUM, Info as _Info, OP as _OP, Source + as _Source, Fuzzy as _Fuzzy) + +# Version 0 is the old behaviour, compatible with the original 're' module. +# Version 1 is the new behaviour, which differs slightly. + +DEFAULT_VERSION = VERSION0 + +_METACHARS = frozenset("()[]{}?*+|^$\\.-#&~") + +_regex_core.DEFAULT_VERSION = DEFAULT_VERSION + +# Caches for the patterns and replacements. +_cache = {} +_cache_lock = _RLock() +_named_args = {} +_replacement_cache = {} +_locale_sensitive = {} + +# Maximum size of the cache. +_MAXCACHE = 500 +_MAXREPCACHE = 500 + +def _compile(pattern, flags, ignore_unused, kwargs, cache_it): + "Compiles a regular expression to a PatternObject." + + global DEFAULT_VERSION + try: + from regex import DEFAULT_VERSION + except ImportError: + pass + + # We won't bother to cache the pattern if we're debugging. + if (flags & DEBUG) != 0: + cache_it = False + + # What locale is this pattern using? + locale_key = (type(pattern), pattern) + if _locale_sensitive.get(locale_key, True) or (flags & LOCALE) != 0: + # This pattern is, or might be, locale-sensitive. + pattern_locale = _getpreferredencoding() + else: + # This pattern is definitely not locale-sensitive. + pattern_locale = None + + def complain_unused_args(): + if ignore_unused: + return + + # Complain about any unused keyword arguments, possibly resulting from a typo. + unused_kwargs = set(kwargs) - {k for k, v in args_needed} + if unused_kwargs: + any_one = next(iter(unused_kwargs)) + raise ValueError('unused keyword argument {!a}'.format(any_one)) + + if cache_it: + try: + # Do we know what keyword arguments are needed? + args_key = pattern, type(pattern), flags + args_needed = _named_args[args_key] + + # Are we being provided with its required keyword arguments? + args_supplied = set() + if args_needed: + for k, v in args_needed: + try: + args_supplied.add((k, frozenset(kwargs[k]))) + except KeyError: + raise error("missing named list: {!r}".format(k)) + + complain_unused_args() + + args_supplied = frozenset(args_supplied) + + # Have we already seen this regular expression and named list? + pattern_key = (pattern, type(pattern), flags, args_supplied, + DEFAULT_VERSION, pattern_locale) + return _cache[pattern_key] + except KeyError: + # It's a new pattern, or new named list for a known pattern. + pass + + # Guess the encoding from the class of the pattern string. + if isinstance(pattern, str): + guess_encoding = UNICODE + elif isinstance(pattern, bytes): + guess_encoding = ASCII + elif isinstance(pattern, Pattern): + if flags: + raise ValueError("cannot process flags argument with a compiled pattern") + + return pattern + else: + raise TypeError("first argument must be a string or compiled pattern") + + # Set the default version in the core code in case it has been changed. + _regex_core.DEFAULT_VERSION = DEFAULT_VERSION + + global_flags = flags + + while True: + caught_exception = None + try: + source = _Source(pattern) + info = _Info(global_flags, source.char_type, kwargs) + info.guess_encoding = guess_encoding + source.ignore_space = bool(info.flags & VERBOSE) + parsed = _parse_pattern(source, info) + break + except _UnscopedFlagSet: + # Remember the global flags for the next attempt. + global_flags = info.global_flags + except error as e: + caught_exception = e + + if caught_exception: + raise error(caught_exception.msg, caught_exception.pattern, + caught_exception.pos) + + if not source.at_end(): + raise error("unbalanced parenthesis", pattern, source.pos) + + # Check the global flags for conflicts. + version = (info.flags & _ALL_VERSIONS) or DEFAULT_VERSION + if version not in (0, VERSION0, VERSION1): + raise ValueError("VERSION0 and VERSION1 flags are mutually incompatible") + + if (info.flags & _ALL_ENCODINGS) not in (0, ASCII, LOCALE, UNICODE): + raise ValueError("ASCII, LOCALE and UNICODE flags are mutually incompatible") + + if isinstance(pattern, bytes) and (info.flags & UNICODE): + raise ValueError("cannot use UNICODE flag with a bytes pattern") + + if not (info.flags & _ALL_ENCODINGS): + if isinstance(pattern, str): + info.flags |= UNICODE + else: + info.flags |= ASCII + + reverse = bool(info.flags & REVERSE) + fuzzy = isinstance(parsed, _Fuzzy) + + # Remember whether this pattern as an inline locale flag. + _locale_sensitive[locale_key] = info.inline_locale + + # Fix the group references. + caught_exception = None + try: + parsed.fix_groups(pattern, reverse, False) + except error as e: + caught_exception = e + + if caught_exception: + raise error(caught_exception.msg, caught_exception.pattern, + caught_exception.pos) + + # Should we print the parsed pattern? + if flags & DEBUG: + parsed.dump(indent=0, reverse=reverse) + + # Optimise the parsed pattern. + parsed = parsed.optimise(info, reverse) + parsed = parsed.pack_characters(info) + + # Get the required string. + req_offset, req_chars, req_flags = _get_required_string(parsed, info.flags) + + # Build the named lists. + named_lists = {} + named_list_indexes = [None] * len(info.named_lists_used) + args_needed = set() + for key, index in info.named_lists_used.items(): + name, case_flags = key + values = frozenset(kwargs[name]) + if case_flags: + items = frozenset(_fold_case(info, v) for v in values) + else: + items = values + named_lists[name] = values + named_list_indexes[index] = items + args_needed.add((name, values)) + + complain_unused_args() + + # Check the features of the groups. + _check_group_features(info, parsed) + + # Compile the parsed pattern. The result is a list of tuples. + code = parsed.compile(reverse) + + # Is there a group call to the pattern as a whole? + key = (0, reverse, fuzzy) + ref = info.call_refs.get(key) + if ref is not None: + code = [(_OP.CALL_REF, ref)] + code + [(_OP.END, )] + + # Add the final 'success' opcode. + code += [(_OP.SUCCESS, )] + + # Compile the additional copies of the groups that we need. + for group, rev, fuz in info.additional_groups: + code += group.compile(rev, fuz) + + # Flatten the code into a list of ints. + code = _flatten_code(code) + + if not parsed.has_simple_start(): + # Get the first set, if possible. + try: + fs_code = _compile_firstset(info, parsed.get_firstset(reverse)) + fs_code = _flatten_code(fs_code) + code = fs_code + code + except _FirstSetError: + pass + + # The named capture groups. + index_group = dict((v, n) for n, v in info.group_index.items()) + + # Create the PatternObject. + # + # Local flags like IGNORECASE affect the code generation, but aren't needed + # by the PatternObject itself. Conversely, global flags like LOCALE _don't_ + # affect the code generation but _are_ needed by the PatternObject. + compiled_pattern = _regex.compile(pattern, info.flags | version, code, + info.group_index, index_group, named_lists, named_list_indexes, + req_offset, req_chars, req_flags, info.group_count) + + # Do we need to reduce the size of the cache? + if len(_cache) >= _MAXCACHE: + with _cache_lock: + _shrink_cache(_cache, _named_args, _locale_sensitive, _MAXCACHE) + + if cache_it: + if (info.flags & LOCALE) == 0: + pattern_locale = None + + args_needed = frozenset(args_needed) + + # Store this regular expression and named list. + pattern_key = (pattern, type(pattern), flags, args_needed, + DEFAULT_VERSION, pattern_locale) + _cache[pattern_key] = compiled_pattern + + # Store what keyword arguments are needed. + _named_args[args_key] = args_needed + + return compiled_pattern + +def _compile_replacement_helper(pattern, template): + "Compiles a replacement template." + # This function is called by the _regex module. + + # Have we seen this before? + key = pattern.pattern, pattern.flags, template + compiled = _replacement_cache.get(key) + if compiled is not None: + return compiled + + if len(_replacement_cache) >= _MAXREPCACHE: + _replacement_cache.clear() + + is_unicode = isinstance(template, str) + source = _Source(template) + if is_unicode: + def make_string(char_codes): + return "".join(chr(c) for c in char_codes) + else: + def make_string(char_codes): + return bytes(char_codes) + + compiled = [] + literal = [] + while True: + ch = source.get() + if not ch: + break + if ch == "\\": + # '_compile_replacement' will return either an int group reference + # or a string literal. It returns items (plural) in order to handle + # a 2-character literal (an invalid escape sequence). + is_group, items = _compile_replacement(source, pattern, is_unicode) + if is_group: + # It's a group, so first flush the literal. + if literal: + compiled.append(make_string(literal)) + literal = [] + compiled.extend(items) + else: + literal.extend(items) + else: + literal.append(ord(ch)) + + # Flush the literal. + if literal: + compiled.append(make_string(literal)) + + _replacement_cache[key] = compiled + + return compiled + +# We define Pattern here after all the support objects have been defined. +_pat = _compile('', 0, False, {}, False) +Pattern = type(_pat) +Match = type(_pat.match('')) +del _pat + +# Make Pattern public for typing annotations. +__all__.append("Pattern") +__all__.append("Match") + +# We'll define an alias for the 'compile' function so that the repr of a +# pattern object is eval-able. +Regex = compile + +# Register myself for pickling. +import copyreg as _copy_reg + +def _pickle(pattern): + return _regex.compile, pattern._pickled_data + +_copy_reg.pickle(Pattern, _pickle) diff --git a/vlmpy310/lib/python3.10/site-packages/regex/test_regex.py b/vlmpy310/lib/python3.10/site-packages/regex/test_regex.py new file mode 100644 index 0000000000000000000000000000000000000000..bce5a871164919ca910c2b02b9cfd3c58e5912b8 --- /dev/null +++ b/vlmpy310/lib/python3.10/site-packages/regex/test_regex.py @@ -0,0 +1,4488 @@ +from weakref import proxy +import copy +import pickle +import regex +import string +import sys +import unittest + +# String subclasses for issue 18468. +class StrSubclass(str): + def __getitem__(self, index): + return StrSubclass(super().__getitem__(index)) + +class BytesSubclass(bytes): + def __getitem__(self, index): + return BytesSubclass(super().__getitem__(index)) + +class RegexTests(unittest.TestCase): + PATTERN_CLASS = "" + FLAGS_WITH_COMPILED_PAT = "cannot process flags argument with a compiled pattern" + INVALID_GROUP_REF = "invalid group reference" + MISSING_GT = "missing >" + BAD_GROUP_NAME = "bad character in group name" + MISSING_GROUP_NAME = "missing group name" + MISSING_LT = "missing <" + UNKNOWN_GROUP_I = "unknown group" + UNKNOWN_GROUP = "unknown group" + BAD_ESCAPE = r"bad escape \(end of pattern\)" + BAD_OCTAL_ESCAPE = r"bad escape \\" + BAD_SET = "unterminated character set" + STR_PAT_ON_BYTES = "cannot use a string pattern on a bytes-like object" + BYTES_PAT_ON_STR = "cannot use a bytes pattern on a string-like object" + STR_PAT_BYTES_TEMPL = "expected str instance, bytes found" + BYTES_PAT_STR_TEMPL = "expected a bytes-like object, str found" + BYTES_PAT_UNI_FLAG = "cannot use UNICODE flag with a bytes pattern" + MIXED_FLAGS = "ASCII, LOCALE and UNICODE flags are mutually incompatible" + MISSING_RPAREN = "missing \\)" + TRAILING_CHARS = "unbalanced parenthesis" + BAD_CHAR_RANGE = "bad character range" + NOTHING_TO_REPEAT = "nothing to repeat" + MULTIPLE_REPEAT = "multiple repeat" + OPEN_GROUP = "cannot refer to an open group" + DUPLICATE_GROUP = "duplicate group" + CANT_TURN_OFF = "bad inline flags: cannot turn flags off" + UNDEF_CHAR_NAME = "undefined character name" + + def assertTypedEqual(self, actual, expect, msg=None): + self.assertEqual(actual, expect, msg) + + def recurse(actual, expect): + if isinstance(expect, (tuple, list)): + for x, y in zip(actual, expect): + recurse(x, y) + else: + self.assertIs(type(actual), type(expect), msg) + + recurse(actual, expect) + + def test_weakref(self): + s = 'QabbbcR' + x = regex.compile('ab+c') + y = proxy(x) + if x.findall('QabbbcR') != y.findall('QabbbcR'): + self.fail() + + def test_search_star_plus(self): + self.assertEqual(regex.search('a*', 'xxx').span(0), (0, 0)) + self.assertEqual(regex.search('x*', 'axx').span(), (0, 0)) + self.assertEqual(regex.search('x+', 'axx').span(0), (1, 3)) + self.assertEqual(regex.search('x+', 'axx').span(), (1, 3)) + self.assertEqual(regex.search('x', 'aaa'), None) + self.assertEqual(regex.match('a*', 'xxx').span(0), (0, 0)) + self.assertEqual(regex.match('a*', 'xxx').span(), (0, 0)) + self.assertEqual(regex.match('x*', 'xxxa').span(0), (0, 3)) + self.assertEqual(regex.match('x*', 'xxxa').span(), (0, 3)) + self.assertEqual(regex.match('a+', 'xxx'), None) + + def bump_num(self, matchobj): + int_value = int(matchobj[0]) + return str(int_value + 1) + + def test_basic_regex_sub(self): + self.assertEqual(regex.sub("(?i)b+", "x", "bbbb BBBB"), 'x x') + self.assertEqual(regex.sub(r'\d+', self.bump_num, '08.2 -2 23x99y'), + '9.3 -3 24x100y') + self.assertEqual(regex.sub(r'\d+', self.bump_num, '08.2 -2 23x99y', 3), + '9.3 -3 23x99y') + + self.assertEqual(regex.sub('.', lambda m: r"\n", 'x'), "\\n") + self.assertEqual(regex.sub('.', r"\n", 'x'), "\n") + + self.assertEqual(regex.sub('(?Px)', r'\g\g', 'xx'), 'xxxx') + self.assertEqual(regex.sub('(?Px)', r'\g\g<1>', 'xx'), 'xxxx') + self.assertEqual(regex.sub('(?Px)', r'\g\g', 'xx'), + 'xxxx') + self.assertEqual(regex.sub('(?Px)', r'\g<1>\g<1>', 'xx'), 'xxxx') + + self.assertEqual(regex.sub('a', r'\t\n\v\r\f\a\b', 'a'), "\t\n\v\r\f\a\b") + self.assertEqual(regex.sub('a', '\t\n\v\r\f\a', 'a'), "\t\n\v\r\f\a") + self.assertEqual(regex.sub('a', '\t\n\v\r\f\a', 'a'), chr(9) + chr(10) + + chr(11) + chr(13) + chr(12) + chr(7)) + + self.assertEqual(regex.sub(r'^\s*', 'X', 'test'), 'Xtest') + + self.assertEqual(regex.sub(r"x", r"\x0A", "x"), "\n") + self.assertEqual(regex.sub(r"x", r"\u000A", "x"), "\n") + self.assertEqual(regex.sub(r"x", r"\U0000000A", "x"), "\n") + self.assertEqual(regex.sub(r"x", r"\N{LATIN CAPITAL LETTER A}", + "x"), "A") + + self.assertEqual(regex.sub(br"x", br"\x0A", b"x"), b"\n") + + def test_bug_449964(self): + # Fails for group followed by other escape. + self.assertEqual(regex.sub(r'(?Px)', r'\g<1>\g<1>\b', 'xx'), + "xx\bxx\b") + + def test_bug_449000(self): + # Test for sub() on escaped characters. + self.assertEqual(regex.sub(r'\r\n', r'\n', 'abc\r\ndef\r\n'), + "abc\ndef\n") + self.assertEqual(regex.sub('\r\n', r'\n', 'abc\r\ndef\r\n'), + "abc\ndef\n") + self.assertEqual(regex.sub(r'\r\n', '\n', 'abc\r\ndef\r\n'), + "abc\ndef\n") + self.assertEqual(regex.sub('\r\n', '\n', 'abc\r\ndef\r\n'), + "abc\ndef\n") + + def test_bug_1661(self): + # Verify that flags do not get silently ignored with compiled patterns + pattern = regex.compile('.') + self.assertRaisesRegex(ValueError, self.FLAGS_WITH_COMPILED_PAT, + lambda: regex.match(pattern, 'A', regex.I)) + self.assertRaisesRegex(ValueError, self.FLAGS_WITH_COMPILED_PAT, + lambda: regex.search(pattern, 'A', regex.I)) + self.assertRaisesRegex(ValueError, self.FLAGS_WITH_COMPILED_PAT, + lambda: regex.findall(pattern, 'A', regex.I)) + self.assertRaisesRegex(ValueError, self.FLAGS_WITH_COMPILED_PAT, + lambda: regex.compile(pattern, regex.I)) + + def test_bug_3629(self): + # A regex that triggered a bug in the sre-code validator + self.assertEqual(repr(type(regex.compile("(?P)(?(quote))"))), + self.PATTERN_CLASS) + + def test_sub_template_numeric_escape(self): + # Bug 776311 and friends. + self.assertEqual(regex.sub('x', r'\0', 'x'), "\0") + self.assertEqual(regex.sub('x', r'\000', 'x'), "\000") + self.assertEqual(regex.sub('x', r'\001', 'x'), "\001") + self.assertEqual(regex.sub('x', r'\008', 'x'), "\0" + "8") + self.assertEqual(regex.sub('x', r'\009', 'x'), "\0" + "9") + self.assertEqual(regex.sub('x', r'\111', 'x'), "\111") + self.assertEqual(regex.sub('x', r'\117', 'x'), "\117") + + self.assertEqual(regex.sub('x', r'\1111', 'x'), "\1111") + self.assertEqual(regex.sub('x', r'\1111', 'x'), "\111" + "1") + + self.assertEqual(regex.sub('x', r'\00', 'x'), '\x00') + self.assertEqual(regex.sub('x', r'\07', 'x'), '\x07') + self.assertEqual(regex.sub('x', r'\08', 'x'), "\0" + "8") + self.assertEqual(regex.sub('x', r'\09', 'x'), "\0" + "9") + self.assertEqual(regex.sub('x', r'\0a', 'x'), "\0" + "a") + + self.assertEqual(regex.sub('x', r'\400', 'x'), "\u0100") + self.assertEqual(regex.sub('x', r'\777', 'x'), "\u01FF") + self.assertEqual(regex.sub(b'x', br'\400', b'x'), b"\x00") + self.assertEqual(regex.sub(b'x', br'\777', b'x'), b"\xFF") + + self.assertRaisesRegex(regex.error, self.INVALID_GROUP_REF, lambda: + regex.sub('x', r'\1', 'x')) + self.assertRaisesRegex(regex.error, self.INVALID_GROUP_REF, lambda: + regex.sub('x', r'\8', 'x')) + self.assertRaisesRegex(regex.error, self.INVALID_GROUP_REF, lambda: + regex.sub('x', r'\9', 'x')) + self.assertRaisesRegex(regex.error, self.INVALID_GROUP_REF, lambda: + regex.sub('x', r'\11', 'x')) + self.assertRaisesRegex(regex.error, self.INVALID_GROUP_REF, lambda: + regex.sub('x', r'\18', 'x')) + self.assertRaisesRegex(regex.error, self.INVALID_GROUP_REF, lambda: + regex.sub('x', r'\1a', 'x')) + self.assertRaisesRegex(regex.error, self.INVALID_GROUP_REF, lambda: + regex.sub('x', r'\90', 'x')) + self.assertRaisesRegex(regex.error, self.INVALID_GROUP_REF, lambda: + regex.sub('x', r'\99', 'x')) + self.assertRaisesRegex(regex.error, self.INVALID_GROUP_REF, lambda: + regex.sub('x', r'\118', 'x')) # r'\11' + '8' + self.assertRaisesRegex(regex.error, self.INVALID_GROUP_REF, lambda: + regex.sub('x', r'\11a', 'x')) + self.assertRaisesRegex(regex.error, self.INVALID_GROUP_REF, lambda: + regex.sub('x', r'\181', 'x')) # r'\18' + '1' + self.assertRaisesRegex(regex.error, self.INVALID_GROUP_REF, lambda: + regex.sub('x', r'\800', 'x')) # r'\80' + '0' + + # In Python 2.3 (etc), these loop endlessly in sre_parser.py. + self.assertEqual(regex.sub('(((((((((((x)))))))))))', r'\11', 'x'), + 'x') + self.assertEqual(regex.sub('((((((((((y))))))))))(.)', r'\118', 'xyz'), + 'xz8') + self.assertEqual(regex.sub('((((((((((y))))))))))(.)', r'\11a', 'xyz'), + 'xza') + + def test_qualified_re_sub(self): + self.assertEqual(regex.sub('a', 'b', 'aaaaa'), 'bbbbb') + self.assertEqual(regex.sub('a', 'b', 'aaaaa', 1), 'baaaa') + + def test_bug_114660(self): + self.assertEqual(regex.sub(r'(\S)\s+(\S)', r'\1 \2', 'hello there'), + 'hello there') + + def test_bug_462270(self): + # Test for empty sub() behaviour, see SF bug #462270 + if sys.version_info >= (3, 7, 0): + self.assertEqual(regex.sub('(?V0)x*', '-', 'abxd'), '-a-b--d-') + else: + self.assertEqual(regex.sub('(?V0)x*', '-', 'abxd'), '-a-b-d-') + self.assertEqual(regex.sub('(?V1)x*', '-', 'abxd'), '-a-b--d-') + self.assertEqual(regex.sub('x+', '-', 'abxd'), 'ab-d') + + def test_bug_14462(self): + # chr(255) is a valid identifier in Python 3. + group_name = '\xFF' + self.assertEqual(regex.search(r'(?P<' + group_name + '>a)', + 'abc').group(group_name), 'a') + + def test_symbolic_refs(self): + self.assertRaisesRegex(regex.error, self.MISSING_GT, lambda: + regex.sub('(?Px)', r'\gx)', r'\g<', 'xx')) + self.assertRaisesRegex(regex.error, self.MISSING_LT, lambda: + regex.sub('(?Px)', r'\g', 'xx')) + self.assertRaisesRegex(regex.error, self.BAD_GROUP_NAME, lambda: + regex.sub('(?Px)', r'\g', 'xx')) + self.assertRaisesRegex(regex.error, self.BAD_GROUP_NAME, lambda: + regex.sub('(?Px)', r'\g<1a1>', 'xx')) + self.assertRaisesRegex(IndexError, self.UNKNOWN_GROUP_I, lambda: + regex.sub('(?Px)', r'\g', 'xx')) + + # The new behaviour of unmatched but valid groups is to treat them like + # empty matches in the replacement template, like in Perl. + self.assertEqual(regex.sub('(?Px)|(?Py)', r'\g', 'xx'), '') + self.assertEqual(regex.sub('(?Px)|(?Py)', r'\2', 'xx'), '') + + # The old behaviour was to raise it as an IndexError. + self.assertRaisesRegex(regex.error, self.BAD_GROUP_NAME, lambda: + regex.sub('(?Px)', r'\g<-1>', 'xx')) + + def test_re_subn(self): + self.assertEqual(regex.subn("(?i)b+", "x", "bbbb BBBB"), ('x x', 2)) + self.assertEqual(regex.subn("b+", "x", "bbbb BBBB"), ('x BBBB', 1)) + self.assertEqual(regex.subn("b+", "x", "xyz"), ('xyz', 0)) + self.assertEqual(regex.subn("b*", "x", "xyz"), ('xxxyxzx', 4)) + self.assertEqual(regex.subn("b*", "x", "xyz", 2), ('xxxyz', 2)) + + def test_re_split(self): + self.assertEqual(regex.split(":", ":a:b::c"), ['', 'a', 'b', '', 'c']) + if sys.version_info >= (3, 7, 0): + self.assertEqual(regex.split(":*", ":a:b::c"), ['', '', 'a', '', + 'b', '', 'c', '']) + self.assertEqual(regex.split("(:*)", ":a:b::c"), ['', ':', '', '', + 'a', ':', '', '', 'b', '::', '', '', 'c', '', '']) + self.assertEqual(regex.split("(?::*)", ":a:b::c"), ['', '', 'a', + '', 'b', '', 'c', '']) + self.assertEqual(regex.split("(:)*", ":a:b::c"), ['', ':', '', + None, 'a', ':', '', None, 'b', ':', '', None, 'c', None, '']) + else: + self.assertEqual(regex.split(":*", ":a:b::c"), ['', 'a', 'b', 'c']) + self.assertEqual(regex.split("(:*)", ":a:b::c"), ['', ':', 'a', + ':', 'b', '::', 'c']) + self.assertEqual(regex.split("(?::*)", ":a:b::c"), ['', 'a', 'b', + 'c']) + self.assertEqual(regex.split("(:)*", ":a:b::c"), ['', ':', 'a', + ':', 'b', ':', 'c']) + self.assertEqual(regex.split("([b:]+)", ":a:b::c"), ['', ':', 'a', + ':b::', 'c']) + self.assertEqual(regex.split("(b)|(:+)", ":a:b::c"), ['', None, ':', + 'a', None, ':', '', 'b', None, '', None, '::', 'c']) + self.assertEqual(regex.split("(?:b)|(?::+)", ":a:b::c"), ['', 'a', '', + '', 'c']) + + self.assertEqual(regex.split("x", "xaxbxc"), ['', 'a', 'b', 'c']) + self.assertEqual([m for m in regex.splititer("x", "xaxbxc")], ['', 'a', + 'b', 'c']) + + self.assertEqual(regex.split("(?r)x", "xaxbxc"), ['c', 'b', 'a', '']) + self.assertEqual([m for m in regex.splititer("(?r)x", "xaxbxc")], ['c', + 'b', 'a', '']) + + self.assertEqual(regex.split("(x)|(y)", "xaxbxc"), ['', 'x', None, 'a', + 'x', None, 'b', 'x', None, 'c']) + self.assertEqual([m for m in regex.splititer("(x)|(y)", "xaxbxc")], + ['', 'x', None, 'a', 'x', None, 'b', 'x', None, 'c']) + + self.assertEqual(regex.split("(?r)(x)|(y)", "xaxbxc"), ['c', 'x', None, + 'b', 'x', None, 'a', 'x', None, '']) + self.assertEqual([m for m in regex.splititer("(?r)(x)|(y)", "xaxbxc")], + ['c', 'x', None, 'b', 'x', None, 'a', 'x', None, '']) + + self.assertEqual(regex.split(r"(?V1)\b", "a b c"), ['', 'a', ' ', 'b', + ' ', 'c', '']) + self.assertEqual(regex.split(r"(?V1)\m", "a b c"), ['', 'a ', 'b ', + 'c']) + self.assertEqual(regex.split(r"(?V1)\M", "a b c"), ['a', ' b', ' c', + '']) + + def test_qualified_re_split(self): + self.assertEqual(regex.split(":", ":a:b::c", 2), ['', 'a', 'b::c']) + self.assertEqual(regex.split(':', 'a:b:c:d', 2), ['a', 'b', 'c:d']) + self.assertEqual(regex.split("(:)", ":a:b::c", 2), ['', ':', 'a', ':', + 'b::c']) + + if sys.version_info >= (3, 7, 0): + self.assertEqual(regex.split("(:*)", ":a:b::c", 2), ['', ':', '', + '', 'a:b::c']) + else: + self.assertEqual(regex.split("(:*)", ":a:b::c", 2), ['', ':', 'a', + ':', 'b::c']) + + def test_re_findall(self): + self.assertEqual(regex.findall(":+", "abc"), []) + self.assertEqual(regex.findall(":+", "a:b::c:::d"), [':', '::', ':::']) + self.assertEqual(regex.findall("(:+)", "a:b::c:::d"), [':', '::', + ':::']) + self.assertEqual(regex.findall("(:)(:*)", "a:b::c:::d"), [(':', ''), + (':', ':'), (':', '::')]) + + self.assertEqual(regex.findall(r"\((?P.{0,5}?TEST)\)", + "(MY TEST)"), ["MY TEST"]) + self.assertEqual(regex.findall(r"\((?P.{0,3}?TEST)\)", + "(MY TEST)"), ["MY TEST"]) + self.assertEqual(regex.findall(r"\((?P.{0,3}?T)\)", "(MY T)"), + ["MY T"]) + + self.assertEqual(regex.findall(r"[^a]{2}[A-Z]", "\n S"), [' S']) + self.assertEqual(regex.findall(r"[^a]{2,3}[A-Z]", "\n S"), ['\n S']) + self.assertEqual(regex.findall(r"[^a]{2,3}[A-Z]", "\n S"), [' S']) + + self.assertEqual(regex.findall(r"X(Y[^Y]+?){1,2}( |Q)+DEF", + "XYABCYPPQ\nQ DEF"), [('YPPQ\n', ' ')]) + + self.assertEqual(regex.findall(r"(\nTest(\n+.+?){0,2}?)?\n+End", + "\nTest\nxyz\nxyz\nEnd"), [('\nTest\nxyz\nxyz', '\nxyz')]) + + def test_bug_117612(self): + self.assertEqual(regex.findall(r"(a|(b))", "aba"), [('a', ''), ('b', + 'b'), ('a', '')]) + + def test_re_match(self): + self.assertEqual(regex.match('a', 'a')[:], ('a',)) + self.assertEqual(regex.match('(a)', 'a')[:], ('a', 'a')) + self.assertEqual(regex.match(r'(a)', 'a')[0], 'a') + self.assertEqual(regex.match(r'(a)', 'a')[1], 'a') + self.assertEqual(regex.match(r'(a)', 'a').group(1, 1), ('a', 'a')) + + pat = regex.compile('((a)|(b))(c)?') + self.assertEqual(pat.match('a')[:], ('a', 'a', 'a', None, None)) + self.assertEqual(pat.match('b')[:], ('b', 'b', None, 'b', None)) + self.assertEqual(pat.match('ac')[:], ('ac', 'a', 'a', None, 'c')) + self.assertEqual(pat.match('bc')[:], ('bc', 'b', None, 'b', 'c')) + self.assertEqual(pat.match('bc')[:], ('bc', 'b', None, 'b', 'c')) + + # A single group. + m = regex.match('(a)', 'a') + self.assertEqual(m.group(), 'a') + self.assertEqual(m.group(0), 'a') + self.assertEqual(m.group(1), 'a') + self.assertEqual(m.group(1, 1), ('a', 'a')) + + pat = regex.compile('(?:(?Pa)|(?Pb))(?Pc)?') + self.assertEqual(pat.match('a').group(1, 2, 3), ('a', None, None)) + self.assertEqual(pat.match('b').group('a1', 'b2', 'c3'), (None, 'b', + None)) + self.assertEqual(pat.match('ac').group(1, 'b2', 3), ('a', None, 'c')) + + def test_re_groupref_exists(self): + self.assertEqual(regex.match(r'^(\()?([^()]+)(?(1)\))$', '(a)')[:], + ('(a)', '(', 'a')) + self.assertEqual(regex.match(r'^(\()?([^()]+)(?(1)\))$', 'a')[:], ('a', + None, 'a')) + self.assertEqual(regex.match(r'^(\()?([^()]+)(?(1)\))$', 'a)'), None) + self.assertEqual(regex.match(r'^(\()?([^()]+)(?(1)\))$', '(a'), None) + self.assertEqual(regex.match('^(?:(a)|c)((?(1)b|d))$', 'ab')[:], ('ab', + 'a', 'b')) + self.assertEqual(regex.match('^(?:(a)|c)((?(1)b|d))$', 'cd')[:], ('cd', + None, 'd')) + self.assertEqual(regex.match('^(?:(a)|c)((?(1)|d))$', 'cd')[:], ('cd', + None, 'd')) + self.assertEqual(regex.match('^(?:(a)|c)((?(1)|d))$', 'a')[:], ('a', + 'a', '')) + + # Tests for bug #1177831: exercise groups other than the first group. + p = regex.compile('(?Pa)(?Pb)?((?(g2)c|d))') + self.assertEqual(p.match('abc')[:], ('abc', 'a', 'b', 'c')) + self.assertEqual(p.match('ad')[:], ('ad', 'a', None, 'd')) + self.assertEqual(p.match('abd'), None) + self.assertEqual(p.match('ac'), None) + + def test_re_groupref(self): + self.assertEqual(regex.match(r'^(\|)?([^()]+)\1$', '|a|')[:], ('|a|', + '|', 'a')) + self.assertEqual(regex.match(r'^(\|)?([^()]+)\1?$', 'a')[:], ('a', + None, 'a')) + self.assertEqual(regex.match(r'^(\|)?([^()]+)\1$', 'a|'), None) + self.assertEqual(regex.match(r'^(\|)?([^()]+)\1$', '|a'), None) + self.assertEqual(regex.match(r'^(?:(a)|c)(\1)$', 'aa')[:], ('aa', 'a', + 'a')) + self.assertEqual(regex.match(r'^(?:(a)|c)(\1)?$', 'c')[:], ('c', None, + None)) + + self.assertEqual(regex.findall(r"(?i)(.{1,40}?),(.{1,40}?)(?:;)+(.{1,80}).{1,40}?\3(\ |;)+(.{1,80}?)\1", + "TEST, BEST; LEST ; Lest 123 Test, Best"), [('TEST', ' BEST', + ' LEST', ' ', '123 ')]) + + def test_groupdict(self): + self.assertEqual(regex.match('(?Pfirst) (?Psecond)', + 'first second').groupdict(), {'first': 'first', 'second': 'second'}) + + def test_expand(self): + self.assertEqual(regex.match("(?Pfirst) (?Psecond)", + "first second").expand(r"\2 \1 \g \g"), + 'second first second first') + + def test_repeat_minmax(self): + self.assertEqual(regex.match(r"^(\w){1}$", "abc"), None) + self.assertEqual(regex.match(r"^(\w){1}?$", "abc"), None) + self.assertEqual(regex.match(r"^(\w){1,2}$", "abc"), None) + self.assertEqual(regex.match(r"^(\w){1,2}?$", "abc"), None) + + self.assertEqual(regex.match(r"^(\w){3}$", "abc")[1], 'c') + self.assertEqual(regex.match(r"^(\w){1,3}$", "abc")[1], 'c') + self.assertEqual(regex.match(r"^(\w){1,4}$", "abc")[1], 'c') + self.assertEqual(regex.match(r"^(\w){3,4}?$", "abc")[1], 'c') + self.assertEqual(regex.match(r"^(\w){3}?$", "abc")[1], 'c') + self.assertEqual(regex.match(r"^(\w){1,3}?$", "abc")[1], 'c') + self.assertEqual(regex.match(r"^(\w){1,4}?$", "abc")[1], 'c') + self.assertEqual(regex.match(r"^(\w){3,4}?$", "abc")[1], 'c') + + self.assertEqual(regex.match("^x{1}$", "xxx"), None) + self.assertEqual(regex.match("^x{1}?$", "xxx"), None) + self.assertEqual(regex.match("^x{1,2}$", "xxx"), None) + self.assertEqual(regex.match("^x{1,2}?$", "xxx"), None) + + self.assertEqual(regex.match("^x{1}", "xxx")[0], 'x') + self.assertEqual(regex.match("^x{1}?", "xxx")[0], 'x') + self.assertEqual(regex.match("^x{0,1}", "xxx")[0], 'x') + self.assertEqual(regex.match("^x{0,1}?", "xxx")[0], '') + + self.assertEqual(bool(regex.match("^x{3}$", "xxx")), True) + self.assertEqual(bool(regex.match("^x{1,3}$", "xxx")), True) + self.assertEqual(bool(regex.match("^x{1,4}$", "xxx")), True) + self.assertEqual(bool(regex.match("^x{3,4}?$", "xxx")), True) + self.assertEqual(bool(regex.match("^x{3}?$", "xxx")), True) + self.assertEqual(bool(regex.match("^x{1,3}?$", "xxx")), True) + self.assertEqual(bool(regex.match("^x{1,4}?$", "xxx")), True) + self.assertEqual(bool(regex.match("^x{3,4}?$", "xxx")), True) + + self.assertEqual(regex.match("^x{}$", "xxx"), None) + self.assertEqual(bool(regex.match("^x{}$", "x{}")), True) + + def test_getattr(self): + self.assertEqual(regex.compile("(?i)(a)(b)").pattern, '(?i)(a)(b)') + self.assertEqual(regex.compile("(?i)(a)(b)").flags, regex.I | regex.U | + regex.DEFAULT_VERSION) + self.assertEqual(regex.compile(b"(?i)(a)(b)").flags, regex.A | regex.I + | regex.DEFAULT_VERSION) + self.assertEqual(regex.compile("(?i)(a)(b)").groups, 2) + self.assertEqual(regex.compile("(?i)(a)(b)").groupindex, {}) + + self.assertEqual(regex.compile("(?i)(?Pa)(?Pb)").groupindex, + {'first': 1, 'other': 2}) + + self.assertEqual(regex.match("(a)", "a").pos, 0) + self.assertEqual(regex.match("(a)", "a").endpos, 1) + + self.assertEqual(regex.search("b(c)", "abcdef").pos, 0) + self.assertEqual(regex.search("b(c)", "abcdef").endpos, 6) + self.assertEqual(regex.search("b(c)", "abcdef").span(), (1, 3)) + self.assertEqual(regex.search("b(c)", "abcdef").span(1), (2, 3)) + + self.assertEqual(regex.match("(a)", "a").string, 'a') + self.assertEqual(regex.match("(a)", "a").regs, ((0, 1), (0, 1))) + self.assertEqual(repr(type(regex.match("(a)", "a").re)), + self.PATTERN_CLASS) + + # Issue 14260. + p = regex.compile(r'abc(?Pdef)') + p.groupindex["n"] = 0 + self.assertEqual(p.groupindex["n"], 1) + + def test_special_escapes(self): + self.assertEqual(regex.search(r"\b(b.)\b", "abcd abc bcd bx")[1], 'bx') + self.assertEqual(regex.search(r"\B(b.)\B", "abc bcd bc abxd")[1], 'bx') + self.assertEqual(regex.search(br"\b(b.)\b", b"abcd abc bcd bx", + regex.LOCALE)[1], b'bx') + self.assertEqual(regex.search(br"\B(b.)\B", b"abc bcd bc abxd", + regex.LOCALE)[1], b'bx') + self.assertEqual(regex.search(r"\b(b.)\b", "abcd abc bcd bx", + regex.UNICODE)[1], 'bx') + self.assertEqual(regex.search(r"\B(b.)\B", "abc bcd bc abxd", + regex.UNICODE)[1], 'bx') + + self.assertEqual(regex.search(r"^abc$", "\nabc\n", regex.M)[0], 'abc') + self.assertEqual(regex.search(r"^\Aabc\Z$", "abc", regex.M)[0], 'abc') + self.assertEqual(regex.search(r"^\Aabc\Z$", "\nabc\n", regex.M), None) + + self.assertEqual(regex.search(br"\b(b.)\b", b"abcd abc bcd bx")[1], + b'bx') + self.assertEqual(regex.search(br"\B(b.)\B", b"abc bcd bc abxd")[1], + b'bx') + self.assertEqual(regex.search(br"^abc$", b"\nabc\n", regex.M)[0], + b'abc') + self.assertEqual(regex.search(br"^\Aabc\Z$", b"abc", regex.M)[0], + b'abc') + self.assertEqual(regex.search(br"^\Aabc\Z$", b"\nabc\n", regex.M), + None) + + self.assertEqual(regex.search(r"\d\D\w\W\s\S", "1aa! a")[0], '1aa! a') + self.assertEqual(regex.search(br"\d\D\w\W\s\S", b"1aa! a", + regex.LOCALE)[0], b'1aa! a') + self.assertEqual(regex.search(r"\d\D\w\W\s\S", "1aa! a", + regex.UNICODE)[0], '1aa! a') + + def test_bigcharset(self): + self.assertEqual(regex.match(r"([\u2222\u2223])", "\u2222")[1], + '\u2222') + self.assertEqual(regex.match(r"([\u2222\u2223])", "\u2222", + regex.UNICODE)[1], '\u2222') + self.assertEqual("".join(regex.findall(".", + "e\xe8\xe9\xea\xeb\u0113\u011b\u0117", flags=regex.UNICODE)), + 'e\xe8\xe9\xea\xeb\u0113\u011b\u0117') + self.assertEqual("".join(regex.findall(r"[e\xe8\xe9\xea\xeb\u0113\u011b\u0117]", + "e\xe8\xe9\xea\xeb\u0113\u011b\u0117", flags=regex.UNICODE)), + 'e\xe8\xe9\xea\xeb\u0113\u011b\u0117') + self.assertEqual("".join(regex.findall(r"e|\xe8|\xe9|\xea|\xeb|\u0113|\u011b|\u0117", + "e\xe8\xe9\xea\xeb\u0113\u011b\u0117", flags=regex.UNICODE)), + 'e\xe8\xe9\xea\xeb\u0113\u011b\u0117') + + def test_anyall(self): + self.assertEqual(regex.match("a.b", "a\nb", regex.DOTALL)[0], "a\nb") + self.assertEqual(regex.match("a.*b", "a\n\nb", regex.DOTALL)[0], + "a\n\nb") + + def test_non_consuming(self): + self.assertEqual(regex.match(r"(a(?=\s[^a]))", "a b")[1], 'a') + self.assertEqual(regex.match(r"(a(?=\s[^a]*))", "a b")[1], 'a') + self.assertEqual(regex.match(r"(a(?=\s[abc]))", "a b")[1], 'a') + self.assertEqual(regex.match(r"(a(?=\s[abc]*))", "a bc")[1], 'a') + self.assertEqual(regex.match(r"(a)(?=\s\1)", "a a")[1], 'a') + self.assertEqual(regex.match(r"(a)(?=\s\1*)", "a aa")[1], 'a') + self.assertEqual(regex.match(r"(a)(?=\s(abc|a))", "a a")[1], 'a') + + self.assertEqual(regex.match(r"(a(?!\s[^a]))", "a a")[1], 'a') + self.assertEqual(regex.match(r"(a(?!\s[abc]))", "a d")[1], 'a') + self.assertEqual(regex.match(r"(a)(?!\s\1)", "a b")[1], 'a') + self.assertEqual(regex.match(r"(a)(?!\s(abc|a))", "a b")[1], 'a') + + def test_ignore_case(self): + self.assertEqual(regex.match("abc", "ABC", regex.I)[0], 'ABC') + self.assertEqual(regex.match(b"abc", b"ABC", regex.I)[0], b'ABC') + + self.assertEqual(regex.match(r"(a\s[^a]*)", "a bb", regex.I)[1], + 'a bb') + self.assertEqual(regex.match(r"(a\s[abc])", "a b", regex.I)[1], 'a b') + self.assertEqual(regex.match(r"(a\s[abc]*)", "a bb", regex.I)[1], + 'a bb') + self.assertEqual(regex.match(r"((a)\s\2)", "a a", regex.I)[1], 'a a') + self.assertEqual(regex.match(r"((a)\s\2*)", "a aa", regex.I)[1], + 'a aa') + self.assertEqual(regex.match(r"((a)\s(abc|a))", "a a", regex.I)[1], + 'a a') + self.assertEqual(regex.match(r"((a)\s(abc|a)*)", "a aa", regex.I)[1], + 'a aa') + + # Issue 3511. + self.assertEqual(regex.match(r"[Z-a]", "_").span(), (0, 1)) + self.assertEqual(regex.match(r"(?i)[Z-a]", "_").span(), (0, 1)) + + self.assertEqual(bool(regex.match(r"(?i)nao", "nAo")), True) + self.assertEqual(bool(regex.match(r"(?i)n\xE3o", "n\xC3o")), True) + self.assertEqual(bool(regex.match(r"(?i)n\xE3o", "N\xC3O")), True) + self.assertEqual(bool(regex.match(r"(?i)s", "\u017F")), True) + + def test_case_folding(self): + self.assertEqual(regex.search(r"(?fi)ss", "SS").span(), (0, 2)) + self.assertEqual(regex.search(r"(?fi)SS", "ss").span(), (0, 2)) + self.assertEqual(regex.search(r"(?fi)SS", + "\N{LATIN SMALL LETTER SHARP S}").span(), (0, 1)) + self.assertEqual(regex.search(r"(?fi)\N{LATIN SMALL LETTER SHARP S}", + "SS").span(), (0, 2)) + + self.assertEqual(regex.search(r"(?fi)\N{LATIN SMALL LIGATURE ST}", + "ST").span(), (0, 2)) + self.assertEqual(regex.search(r"(?fi)ST", + "\N{LATIN SMALL LIGATURE ST}").span(), (0, 1)) + self.assertEqual(regex.search(r"(?fi)ST", + "\N{LATIN SMALL LIGATURE LONG S T}").span(), (0, 1)) + + self.assertEqual(regex.search(r"(?fi)SST", + "\N{LATIN SMALL LETTER SHARP S}t").span(), (0, 2)) + self.assertEqual(regex.search(r"(?fi)SST", + "s\N{LATIN SMALL LIGATURE LONG S T}").span(), (0, 2)) + self.assertEqual(regex.search(r"(?fi)SST", + "s\N{LATIN SMALL LIGATURE ST}").span(), (0, 2)) + self.assertEqual(regex.search(r"(?fi)\N{LATIN SMALL LIGATURE ST}", + "SST").span(), (1, 3)) + self.assertEqual(regex.search(r"(?fi)SST", + "s\N{LATIN SMALL LIGATURE ST}").span(), (0, 2)) + + self.assertEqual(regex.search(r"(?fi)FFI", + "\N{LATIN SMALL LIGATURE FFI}").span(), (0, 1)) + self.assertEqual(regex.search(r"(?fi)FFI", + "\N{LATIN SMALL LIGATURE FF}i").span(), (0, 2)) + self.assertEqual(regex.search(r"(?fi)FFI", + "f\N{LATIN SMALL LIGATURE FI}").span(), (0, 2)) + self.assertEqual(regex.search(r"(?fi)\N{LATIN SMALL LIGATURE FFI}", + "FFI").span(), (0, 3)) + self.assertEqual(regex.search(r"(?fi)\N{LATIN SMALL LIGATURE FF}i", + "FFI").span(), (0, 3)) + self.assertEqual(regex.search(r"(?fi)f\N{LATIN SMALL LIGATURE FI}", + "FFI").span(), (0, 3)) + + sigma = "\u03A3\u03C3\u03C2" + for ch1 in sigma: + for ch2 in sigma: + if not regex.match(r"(?fi)" + ch1, ch2): + self.fail() + + self.assertEqual(bool(regex.search(r"(?iV1)ff", "\uFB00\uFB01")), + True) + self.assertEqual(bool(regex.search(r"(?iV1)ff", "\uFB01\uFB00")), + True) + self.assertEqual(bool(regex.search(r"(?iV1)fi", "\uFB00\uFB01")), + True) + self.assertEqual(bool(regex.search(r"(?iV1)fi", "\uFB01\uFB00")), + True) + self.assertEqual(bool(regex.search(r"(?iV1)fffi", "\uFB00\uFB01")), + True) + self.assertEqual(bool(regex.search(r"(?iV1)f\uFB03", + "\uFB00\uFB01")), True) + self.assertEqual(bool(regex.search(r"(?iV1)ff", "\uFB00\uFB01")), + True) + self.assertEqual(bool(regex.search(r"(?iV1)fi", "\uFB00\uFB01")), + True) + self.assertEqual(bool(regex.search(r"(?iV1)fffi", "\uFB00\uFB01")), + True) + self.assertEqual(bool(regex.search(r"(?iV1)f\uFB03", + "\uFB00\uFB01")), True) + self.assertEqual(bool(regex.search(r"(?iV1)f\uFB01", "\uFB00i")), + True) + self.assertEqual(bool(regex.search(r"(?iV1)f\uFB01", "\uFB00i")), + True) + + self.assertEqual(regex.findall(r"(?iV0)\m(?:word){e<=3}\M(?ne", "affine", + options=["\N{LATIN SMALL LIGATURE FFI}"]).span(), (0, 6)) + self.assertEqual(regex.search(r"(?fi)a\Lne", + "a\N{LATIN SMALL LIGATURE FFI}ne", options=["ffi"]).span(), (0, 4)) + + def test_category(self): + self.assertEqual(regex.match(r"(\s)", " ")[1], ' ') + + def test_not_literal(self): + self.assertEqual(regex.search(r"\s([^a])", " b")[1], 'b') + self.assertEqual(regex.search(r"\s([^a]*)", " bb")[1], 'bb') + + def test_search_coverage(self): + self.assertEqual(regex.search(r"\s(b)", " b")[1], 'b') + self.assertEqual(regex.search(r"a\s", "a ")[0], 'a ') + + def test_re_escape(self): + p = "" + self.assertEqual(regex.escape(p), p) + for i in range(0, 256): + p += chr(i) + self.assertEqual(bool(regex.match(regex.escape(chr(i)), chr(i))), + True) + self.assertEqual(regex.match(regex.escape(chr(i)), chr(i)).span(), + (0, 1)) + + pat = regex.compile(regex.escape(p)) + self.assertEqual(pat.match(p).span(), (0, 256)) + + def test_re_escape_byte(self): + p = b"" + self.assertEqual(regex.escape(p), p) + for i in range(0, 256): + b = bytes([i]) + p += b + self.assertEqual(bool(regex.match(regex.escape(b), b)), True) + self.assertEqual(regex.match(regex.escape(b), b).span(), (0, 1)) + + pat = regex.compile(regex.escape(p)) + self.assertEqual(pat.match(p).span(), (0, 256)) + + def test_constants(self): + if regex.I != regex.IGNORECASE: + self.fail() + if regex.L != regex.LOCALE: + self.fail() + if regex.M != regex.MULTILINE: + self.fail() + if regex.S != regex.DOTALL: + self.fail() + if regex.X != regex.VERBOSE: + self.fail() + + def test_flags(self): + for flag in [regex.I, regex.M, regex.X, regex.S, regex.L]: + self.assertEqual(repr(type(regex.compile('^pattern$', flag))), + self.PATTERN_CLASS) + + def test_sre_character_literals(self): + for i in [0, 8, 16, 32, 64, 127, 128, 255]: + self.assertEqual(bool(regex.match(r"\%03o" % i, chr(i))), True) + self.assertEqual(bool(regex.match(r"\%03o0" % i, chr(i) + "0")), + True) + self.assertEqual(bool(regex.match(r"\%03o8" % i, chr(i) + "8")), + True) + self.assertEqual(bool(regex.match(r"\x%02x" % i, chr(i))), True) + self.assertEqual(bool(regex.match(r"\x%02x0" % i, chr(i) + "0")), + True) + self.assertEqual(bool(regex.match(r"\x%02xz" % i, chr(i) + "z")), + True) + + self.assertRaisesRegex(regex.error, self.INVALID_GROUP_REF, lambda: + regex.match(r"\911", "")) + + def test_sre_character_class_literals(self): + for i in [0, 8, 16, 32, 64, 127, 128, 255]: + self.assertEqual(bool(regex.match(r"[\%03o]" % i, chr(i))), True) + self.assertEqual(bool(regex.match(r"[\%03o0]" % i, chr(i))), True) + self.assertEqual(bool(regex.match(r"[\%03o8]" % i, chr(i))), True) + self.assertEqual(bool(regex.match(r"[\x%02x]" % i, chr(i))), True) + self.assertEqual(bool(regex.match(r"[\x%02x0]" % i, chr(i))), True) + self.assertEqual(bool(regex.match(r"[\x%02xz]" % i, chr(i))), True) + + self.assertRaisesRegex(regex.error, self.BAD_OCTAL_ESCAPE, lambda: + regex.match(r"[\911]", "")) + + def test_bug_113254(self): + self.assertEqual(regex.match(r'(a)|(b)', 'b').start(1), -1) + self.assertEqual(regex.match(r'(a)|(b)', 'b').end(1), -1) + self.assertEqual(regex.match(r'(a)|(b)', 'b').span(1), (-1, -1)) + + def test_bug_527371(self): + # Bug described in patches 527371/672491. + self.assertEqual(regex.match(r'(a)?a','a').lastindex, None) + self.assertEqual(regex.match(r'(a)(b)?b','ab').lastindex, 1) + self.assertEqual(regex.match(r'(?Pa)(?Pb)?b','ab').lastgroup, + 'a') + self.assertEqual(regex.match("(?Pa(b))", "ab").lastgroup, 'a') + self.assertEqual(regex.match("((a))", "a").lastindex, 1) + + def test_bug_545855(self): + # Bug 545855 -- This pattern failed to cause a compile error as it + # should, instead provoking a TypeError. + self.assertRaisesRegex(regex.error, self.BAD_SET, lambda: + regex.compile('foo[a-')) + + def test_bug_418626(self): + # Bugs 418626 at al. -- Testing Greg Chapman's addition of op code + # SRE_OP_MIN_REPEAT_ONE for eliminating recursion on simple uses of + # pattern '*?' on a long string. + self.assertEqual(regex.match('.*?c', 10000 * 'ab' + 'cd').end(0), + 20001) + self.assertEqual(regex.match('.*?cd', 5000 * 'ab' + 'c' + 5000 * 'ab' + + 'cde').end(0), 20003) + self.assertEqual(regex.match('.*?cd', 20000 * 'abc' + 'de').end(0), + 60001) + # Non-simple '*?' still used to hit the recursion limit, before the + # non-recursive scheme was implemented. + self.assertEqual(regex.search('(a|b)*?c', 10000 * 'ab' + 'cd').end(0), + 20001) + + def test_bug_612074(self): + pat = "[" + regex.escape("\u2039") + "]" + self.assertEqual(regex.compile(pat) and 1, 1) + + def test_stack_overflow(self): + # Nasty cases that used to overflow the straightforward recursive + # implementation of repeated groups. + self.assertEqual(regex.match('(x)*', 50000 * 'x')[1], 'x') + self.assertEqual(regex.match('(x)*y', 50000 * 'x' + 'y')[1], 'x') + self.assertEqual(regex.match('(x)*?y', 50000 * 'x' + 'y')[1], 'x') + + def test_scanner(self): + def s_ident(scanner, token): return token + def s_operator(scanner, token): return "op%s" % token + def s_float(scanner, token): return float(token) + def s_int(scanner, token): return int(token) + + scanner = regex.Scanner([(r"[a-zA-Z_]\w*", s_ident), (r"\d+\.\d*", + s_float), (r"\d+", s_int), (r"=|\+|-|\*|/", s_operator), (r"\s+", + None), ]) + + self.assertEqual(repr(type(scanner.scanner.scanner("").pattern)), + self.PATTERN_CLASS) + + self.assertEqual(scanner.scan("sum = 3*foo + 312.50 + bar"), (['sum', + 'op=', 3, 'op*', 'foo', 'op+', 312.5, 'op+', 'bar'], '')) + + def test_bug_448951(self): + # Bug 448951 (similar to 429357, but with single char match). + # (Also test greedy matches.) + for op in '', '?', '*': + self.assertEqual(regex.match(r'((.%s):)?z' % op, 'z')[:], ('z', + None, None)) + self.assertEqual(regex.match(r'((.%s):)?z' % op, 'a:z')[:], ('a:z', + 'a:', 'a')) + + def test_bug_725106(self): + # Capturing groups in alternatives in repeats. + self.assertEqual(regex.match('^((a)|b)*', 'abc')[:], ('ab', 'b', 'a')) + self.assertEqual(regex.match('^(([ab])|c)*', 'abc')[:], ('abc', 'c', + 'b')) + self.assertEqual(regex.match('^((d)|[ab])*', 'abc')[:], ('ab', 'b', + None)) + self.assertEqual(regex.match('^((a)c|[ab])*', 'abc')[:], ('ab', 'b', + None)) + self.assertEqual(regex.match('^((a)|b)*?c', 'abc')[:], ('abc', 'b', + 'a')) + self.assertEqual(regex.match('^(([ab])|c)*?d', 'abcd')[:], ('abcd', + 'c', 'b')) + self.assertEqual(regex.match('^((d)|[ab])*?c', 'abc')[:], ('abc', 'b', + None)) + self.assertEqual(regex.match('^((a)c|[ab])*?c', 'abc')[:], ('abc', 'b', + None)) + + def test_bug_725149(self): + # Mark_stack_base restoring before restoring marks. + self.assertEqual(regex.match('(a)(?:(?=(b)*)c)*', 'abb')[:], ('a', 'a', + None)) + self.assertEqual(regex.match('(a)((?!(b)*))*', 'abb')[:], ('a', 'a', + None, None)) + + def test_bug_764548(self): + # Bug 764548, regex.compile() barfs on str/unicode subclasses. + class my_unicode(str): pass + pat = regex.compile(my_unicode("abc")) + self.assertEqual(pat.match("xyz"), None) + + def test_finditer(self): + it = regex.finditer(r":+", "a:b::c:::d") + self.assertEqual([item[0] for item in it], [':', '::', ':::']) + + def test_bug_926075(self): + if regex.compile('bug_926075') is regex.compile(b'bug_926075'): + self.fail() + + def test_bug_931848(self): + pattern = "[\u002E\u3002\uFF0E\uFF61]" + self.assertEqual(regex.compile(pattern).split("a.b.c"), ['a', 'b', + 'c']) + + def test_bug_581080(self): + it = regex.finditer(r"\s", "a b") + self.assertEqual(next(it).span(), (1, 2)) + self.assertRaises(StopIteration, lambda: next(it)) + + scanner = regex.compile(r"\s").scanner("a b") + self.assertEqual(scanner.search().span(), (1, 2)) + self.assertEqual(scanner.search(), None) + + def test_bug_817234(self): + it = regex.finditer(r".*", "asdf") + self.assertEqual(next(it).span(), (0, 4)) + self.assertEqual(next(it).span(), (4, 4)) + self.assertRaises(StopIteration, lambda: next(it)) + + def test_empty_array(self): + # SF buf 1647541. + import array + for typecode in 'bBuhHiIlLfd': + a = array.array(typecode) + self.assertEqual(regex.compile(b"bla").match(a), None) + self.assertEqual(regex.compile(b"").match(a)[1 : ], ()) + + def test_inline_flags(self): + # Bug #1700. + upper_char = chr(0x1ea0) # Latin Capital Letter A with Dot Below + lower_char = chr(0x1ea1) # Latin Small Letter A with Dot Below + + p = regex.compile(upper_char, regex.I | regex.U) + self.assertEqual(bool(p.match(lower_char)), True) + + p = regex.compile(lower_char, regex.I | regex.U) + self.assertEqual(bool(p.match(upper_char)), True) + + p = regex.compile('(?i)' + upper_char, regex.U) + self.assertEqual(bool(p.match(lower_char)), True) + + p = regex.compile('(?i)' + lower_char, regex.U) + self.assertEqual(bool(p.match(upper_char)), True) + + p = regex.compile('(?iu)' + upper_char) + self.assertEqual(bool(p.match(lower_char)), True) + + p = regex.compile('(?iu)' + lower_char) + self.assertEqual(bool(p.match(upper_char)), True) + + # Changed to positional flags in regex 2023.12.23. + self.assertEqual(bool(regex.match(r"(?i)a", "A")), True) + self.assertEqual(regex.match(r"a(?i)", "A"), None) + + def test_dollar_matches_twice(self): + # $ matches the end of string, and just before the terminating \n. + pattern = regex.compile('$') + self.assertEqual(pattern.sub('#', 'a\nb\n'), 'a\nb#\n#') + self.assertEqual(pattern.sub('#', 'a\nb\nc'), 'a\nb\nc#') + self.assertEqual(pattern.sub('#', '\n'), '#\n#') + + pattern = regex.compile('$', regex.MULTILINE) + self.assertEqual(pattern.sub('#', 'a\nb\n' ), 'a#\nb#\n#') + self.assertEqual(pattern.sub('#', 'a\nb\nc'), 'a#\nb#\nc#') + self.assertEqual(pattern.sub('#', '\n'), '#\n#') + + def test_bytes_str_mixing(self): + # Mixing str and bytes is disallowed. + pat = regex.compile('.') + bpat = regex.compile(b'.') + self.assertRaisesRegex(TypeError, self.STR_PAT_ON_BYTES, lambda: + pat.match(b'b')) + self.assertRaisesRegex(TypeError, self.BYTES_PAT_ON_STR, lambda: + bpat.match('b')) + self.assertRaisesRegex(TypeError, self.STR_PAT_BYTES_TEMPL, lambda: + pat.sub(b'b', 'c')) + self.assertRaisesRegex(TypeError, self.STR_PAT_ON_BYTES, lambda: + pat.sub('b', b'c')) + self.assertRaisesRegex(TypeError, self.STR_PAT_ON_BYTES, lambda: + pat.sub(b'b', b'c')) + self.assertRaisesRegex(TypeError, self.BYTES_PAT_ON_STR, lambda: + bpat.sub(b'b', 'c')) + self.assertRaisesRegex(TypeError, self.BYTES_PAT_STR_TEMPL, lambda: + bpat.sub('b', b'c')) + self.assertRaisesRegex(TypeError, self.BYTES_PAT_ON_STR, lambda: + bpat.sub('b', 'c')) + + self.assertRaisesRegex(ValueError, self.BYTES_PAT_UNI_FLAG, lambda: + regex.compile(br'\w', regex.UNICODE)) + self.assertRaisesRegex(ValueError, self.BYTES_PAT_UNI_FLAG, lambda: + regex.compile(br'(?u)\w')) + self.assertRaisesRegex(ValueError, self.MIXED_FLAGS, lambda: + regex.compile(r'\w', regex.UNICODE | regex.ASCII)) + self.assertRaisesRegex(ValueError, self.MIXED_FLAGS, lambda: + regex.compile(r'(?u)\w', regex.ASCII)) + self.assertRaisesRegex(ValueError, self.MIXED_FLAGS, lambda: + regex.compile(r'(?a)\w', regex.UNICODE)) + self.assertRaisesRegex(ValueError, self.MIXED_FLAGS, lambda: + regex.compile(r'(?au)\w')) + + def test_ascii_and_unicode_flag(self): + # String patterns. + for flags in (0, regex.UNICODE): + pat = regex.compile('\xc0', flags | regex.IGNORECASE) + self.assertEqual(bool(pat.match('\xe0')), True) + pat = regex.compile(r'\w', flags) + self.assertEqual(bool(pat.match('\xe0')), True) + + pat = regex.compile('\xc0', regex.ASCII | regex.IGNORECASE) + self.assertEqual(pat.match('\xe0'), None) + pat = regex.compile('(?a)\xc0', regex.IGNORECASE) + self.assertEqual(pat.match('\xe0'), None) + pat = regex.compile(r'\w', regex.ASCII) + self.assertEqual(pat.match('\xe0'), None) + pat = regex.compile(r'(?a)\w') + self.assertEqual(pat.match('\xe0'), None) + + # Bytes patterns. + for flags in (0, regex.ASCII): + pat = regex.compile(b'\xc0', flags | regex.IGNORECASE) + self.assertEqual(pat.match(b'\xe0'), None) + pat = regex.compile(br'\w') + self.assertEqual(pat.match(b'\xe0'), None) + + self.assertRaisesRegex(ValueError, self.MIXED_FLAGS, lambda: + regex.compile(r'(?au)\w')) + + def test_subscripting_match(self): + m = regex.match(r'(?\w)', 'xy') + if not m: + self.fail("Failed: expected match but returned None") + elif not m or m[0] != m.group(0) or m[1] != m.group(1): + self.fail("Failed") + if not m: + self.fail("Failed: expected match but returned None") + elif m[:] != ('x', 'x'): + self.fail("Failed: expected \"('x', 'x')\" but got {} instead".format(ascii(m[:]))) + + def test_new_named_groups(self): + m0 = regex.match(r'(?P\w)', 'x') + m1 = regex.match(r'(?\w)', 'x') + if not (m0 and m1 and m0[:] == m1[:]): + self.fail("Failed") + + def test_properties(self): + self.assertEqual(regex.match(b'(?ai)\xC0', b'\xE0'), None) + self.assertEqual(regex.match(br'(?ai)\xC0', b'\xE0'), None) + self.assertEqual(regex.match(br'(?a)\w', b'\xE0'), None) + self.assertEqual(bool(regex.match(r'\w', '\xE0')), True) + + # Dropped the following test. It's not possible to determine what the + # correct result should be in the general case. +# self.assertEqual(bool(regex.match(br'(?L)\w', b'\xE0')), +# b'\xE0'.isalnum()) + + self.assertEqual(bool(regex.match(br'(?L)\d', b'0')), True) + self.assertEqual(bool(regex.match(br'(?L)\s', b' ')), True) + self.assertEqual(bool(regex.match(br'(?L)\w', b'a')), True) + self.assertEqual(regex.match(br'(?L)\d', b'?'), None) + self.assertEqual(regex.match(br'(?L)\s', b'?'), None) + self.assertEqual(regex.match(br'(?L)\w', b'?'), None) + + self.assertEqual(regex.match(br'(?L)\D', b'0'), None) + self.assertEqual(regex.match(br'(?L)\S', b' '), None) + self.assertEqual(regex.match(br'(?L)\W', b'a'), None) + self.assertEqual(bool(regex.match(br'(?L)\D', b'?')), True) + self.assertEqual(bool(regex.match(br'(?L)\S', b'?')), True) + self.assertEqual(bool(regex.match(br'(?L)\W', b'?')), True) + + self.assertEqual(bool(regex.match(r'\p{Cyrillic}', + '\N{CYRILLIC CAPITAL LETTER A}')), True) + self.assertEqual(bool(regex.match(r'(?i)\p{Cyrillic}', + '\N{CYRILLIC CAPITAL LETTER A}')), True) + self.assertEqual(bool(regex.match(r'\p{IsCyrillic}', + '\N{CYRILLIC CAPITAL LETTER A}')), True) + self.assertEqual(bool(regex.match(r'\p{Script=Cyrillic}', + '\N{CYRILLIC CAPITAL LETTER A}')), True) + self.assertEqual(bool(regex.match(r'\p{InCyrillic}', + '\N{CYRILLIC CAPITAL LETTER A}')), True) + self.assertEqual(bool(regex.match(r'\p{Block=Cyrillic}', + '\N{CYRILLIC CAPITAL LETTER A}')), True) + self.assertEqual(bool(regex.match(r'[[:Cyrillic:]]', + '\N{CYRILLIC CAPITAL LETTER A}')), True) + self.assertEqual(bool(regex.match(r'[[:IsCyrillic:]]', + '\N{CYRILLIC CAPITAL LETTER A}')), True) + self.assertEqual(bool(regex.match(r'[[:Script=Cyrillic:]]', + '\N{CYRILLIC CAPITAL LETTER A}')), True) + self.assertEqual(bool(regex.match(r'[[:InCyrillic:]]', + '\N{CYRILLIC CAPITAL LETTER A}')), True) + self.assertEqual(bool(regex.match(r'[[:Block=Cyrillic:]]', + '\N{CYRILLIC CAPITAL LETTER A}')), True) + + self.assertEqual(bool(regex.match(r'\P{Cyrillic}', + '\N{LATIN CAPITAL LETTER A}')), True) + self.assertEqual(bool(regex.match(r'\P{IsCyrillic}', + '\N{LATIN CAPITAL LETTER A}')), True) + self.assertEqual(bool(regex.match(r'\P{Script=Cyrillic}', + '\N{LATIN CAPITAL LETTER A}')), True) + self.assertEqual(bool(regex.match(r'\P{InCyrillic}', + '\N{LATIN CAPITAL LETTER A}')), True) + self.assertEqual(bool(regex.match(r'\P{Block=Cyrillic}', + '\N{LATIN CAPITAL LETTER A}')), True) + self.assertEqual(bool(regex.match(r'\p{^Cyrillic}', + '\N{LATIN CAPITAL LETTER A}')), True) + self.assertEqual(bool(regex.match(r'\p{^IsCyrillic}', + '\N{LATIN CAPITAL LETTER A}')), True) + self.assertEqual(bool(regex.match(r'\p{^Script=Cyrillic}', + '\N{LATIN CAPITAL LETTER A}')), True) + self.assertEqual(bool(regex.match(r'\p{^InCyrillic}', + '\N{LATIN CAPITAL LETTER A}')), True) + self.assertEqual(bool(regex.match(r'\p{^Block=Cyrillic}', + '\N{LATIN CAPITAL LETTER A}')), True) + self.assertEqual(bool(regex.match(r'[[:^Cyrillic:]]', + '\N{LATIN CAPITAL LETTER A}')), True) + self.assertEqual(bool(regex.match(r'[[:^IsCyrillic:]]', + '\N{LATIN CAPITAL LETTER A}')), True) + self.assertEqual(bool(regex.match(r'[[:^Script=Cyrillic:]]', + '\N{LATIN CAPITAL LETTER A}')), True) + self.assertEqual(bool(regex.match(r'[[:^InCyrillic:]]', + '\N{LATIN CAPITAL LETTER A}')), True) + self.assertEqual(bool(regex.match(r'[[:^Block=Cyrillic:]]', + '\N{LATIN CAPITAL LETTER A}')), True) + + self.assertEqual(bool(regex.match(r'\d', '0')), True) + self.assertEqual(bool(regex.match(r'\s', ' ')), True) + self.assertEqual(bool(regex.match(r'\w', 'A')), True) + self.assertEqual(regex.match(r"\d", "?"), None) + self.assertEqual(regex.match(r"\s", "?"), None) + self.assertEqual(regex.match(r"\w", "?"), None) + self.assertEqual(regex.match(r"\D", "0"), None) + self.assertEqual(regex.match(r"\S", " "), None) + self.assertEqual(regex.match(r"\W", "A"), None) + self.assertEqual(bool(regex.match(r'\D', '?')), True) + self.assertEqual(bool(regex.match(r'\S', '?')), True) + self.assertEqual(bool(regex.match(r'\W', '?')), True) + + self.assertEqual(bool(regex.match(r'\p{L}', 'A')), True) + self.assertEqual(bool(regex.match(r'\p{L}', 'a')), True) + self.assertEqual(bool(regex.match(r'\p{Lu}', 'A')), True) + self.assertEqual(bool(regex.match(r'\p{Ll}', 'a')), True) + + self.assertEqual(bool(regex.match(r'(?i)a', 'a')), True) + self.assertEqual(bool(regex.match(r'(?i)a', 'A')), True) + + self.assertEqual(bool(regex.match(r'\w', '0')), True) + self.assertEqual(bool(regex.match(r'\w', 'a')), True) + self.assertEqual(bool(regex.match(r'\w', '_')), True) + + self.assertEqual(regex.match(r"\X", "\xE0").span(), (0, 1)) + self.assertEqual(regex.match(r"\X", "a\u0300").span(), (0, 2)) + self.assertEqual(regex.findall(r"\X", + "a\xE0a\u0300e\xE9e\u0301"), ['a', '\xe0', 'a\u0300', 'e', + '\xe9', 'e\u0301']) + self.assertEqual(regex.findall(r"\X{3}", + "a\xE0a\u0300e\xE9e\u0301"), ['a\xe0a\u0300', 'e\xe9e\u0301']) + self.assertEqual(regex.findall(r"\X", "\r\r\n\u0301A\u0301"), + ['\r', '\r\n', '\u0301', 'A\u0301']) + + self.assertEqual(bool(regex.match(r'\p{Ll}', 'a')), True) + + chars_u = "-09AZaz_\u0393\u03b3" + chars_b = b"-09AZaz_" + word_set = set("Ll Lm Lo Lt Lu Mc Me Mn Nd Nl No Pc".split()) + + tests = [ + (r"\w", chars_u, "09AZaz_\u0393\u03b3"), + (r"[[:word:]]", chars_u, "09AZaz_\u0393\u03b3"), + (r"\W", chars_u, "-"), + (r"[[:^word:]]", chars_u, "-"), + (r"\d", chars_u, "09"), + (r"[[:digit:]]", chars_u, "09"), + (r"\D", chars_u, "-AZaz_\u0393\u03b3"), + (r"[[:^digit:]]", chars_u, "-AZaz_\u0393\u03b3"), + (r"[[:alpha:]]", chars_u, "AZaz\u0393\u03b3"), + (r"[[:^alpha:]]", chars_u, "-09_"), + (r"[[:alnum:]]", chars_u, "09AZaz\u0393\u03b3"), + (r"[[:^alnum:]]", chars_u, "-_"), + (r"[[:xdigit:]]", chars_u, "09Aa"), + (r"[[:^xdigit:]]", chars_u, "-Zz_\u0393\u03b3"), + (r"\p{InBasicLatin}", "a\xE1", "a"), + (r"\P{InBasicLatin}", "a\xE1", "\xE1"), + (r"(?i)\p{InBasicLatin}", "a\xE1", "a"), + (r"(?i)\P{InBasicLatin}", "a\xE1", "\xE1"), + + (br"(?L)\w", chars_b, b"09AZaz_"), + (br"(?L)[[:word:]]", chars_b, b"09AZaz_"), + (br"(?L)\W", chars_b, b"-"), + (br"(?L)[[:^word:]]", chars_b, b"-"), + (br"(?L)\d", chars_b, b"09"), + (br"(?L)[[:digit:]]", chars_b, b"09"), + (br"(?L)\D", chars_b, b"-AZaz_"), + (br"(?L)[[:^digit:]]", chars_b, b"-AZaz_"), + (br"(?L)[[:alpha:]]", chars_b, b"AZaz"), + (br"(?L)[[:^alpha:]]", chars_b, b"-09_"), + (br"(?L)[[:alnum:]]", chars_b, b"09AZaz"), + (br"(?L)[[:^alnum:]]", chars_b, b"-_"), + (br"(?L)[[:xdigit:]]", chars_b, b"09Aa"), + (br"(?L)[[:^xdigit:]]", chars_b, b"-Zz_"), + + (br"(?a)\w", chars_b, b"09AZaz_"), + (br"(?a)[[:word:]]", chars_b, b"09AZaz_"), + (br"(?a)\W", chars_b, b"-"), + (br"(?a)[[:^word:]]", chars_b, b"-"), + (br"(?a)\d", chars_b, b"09"), + (br"(?a)[[:digit:]]", chars_b, b"09"), + (br"(?a)\D", chars_b, b"-AZaz_"), + (br"(?a)[[:^digit:]]", chars_b, b"-AZaz_"), + (br"(?a)[[:alpha:]]", chars_b, b"AZaz"), + (br"(?a)[[:^alpha:]]", chars_b, b"-09_"), + (br"(?a)[[:alnum:]]", chars_b, b"09AZaz"), + (br"(?a)[[:^alnum:]]", chars_b, b"-_"), + (br"(?a)[[:xdigit:]]", chars_b, b"09Aa"), + (br"(?a)[[:^xdigit:]]", chars_b, b"-Zz_"), + ] + for pattern, chars, expected in tests: + try: + if chars[ : 0].join(regex.findall(pattern, chars)) != expected: + self.fail("Failed: {}".format(pattern)) + except Exception as e: + self.fail("Failed: {} raised {}".format(pattern, ascii(e))) + + self.assertEqual(bool(regex.match(r"\p{NumericValue=0}", "0")), + True) + self.assertEqual(bool(regex.match(r"\p{NumericValue=1/2}", + "\N{VULGAR FRACTION ONE HALF}")), True) + self.assertEqual(bool(regex.match(r"\p{NumericValue=0.5}", + "\N{VULGAR FRACTION ONE HALF}")), True) + + def test_word_class(self): + self.assertEqual(regex.findall(r"\w+", + " \u0939\u093f\u0928\u094d\u0926\u0940,"), + ['\u0939\u093f\u0928\u094d\u0926\u0940']) + self.assertEqual(regex.findall(r"\W+", + " \u0939\u093f\u0928\u094d\u0926\u0940,"), [' ', ',']) + self.assertEqual(regex.split(r"(?V1)\b", + " \u0939\u093f\u0928\u094d\u0926\u0940,"), [' ', + '\u0939\u093f\u0928\u094d\u0926\u0940', ',']) + self.assertEqual(regex.split(r"(?V1)\B", + " \u0939\u093f\u0928\u094d\u0926\u0940,"), ['', ' \u0939', + '\u093f', '\u0928', '\u094d', '\u0926', '\u0940,', '']) + + def test_search_anchor(self): + self.assertEqual(regex.findall(r"\G\w{2}", "abcd ef"), ['ab', 'cd']) + + def test_search_reverse(self): + self.assertEqual(regex.findall(r"(?r).", "abc"), ['c', 'b', 'a']) + self.assertEqual(regex.findall(r"(?r).", "abc", overlapped=True), ['c', + 'b', 'a']) + self.assertEqual(regex.findall(r"(?r)..", "abcde"), ['de', 'bc']) + self.assertEqual(regex.findall(r"(?r)..", "abcde", overlapped=True), + ['de', 'cd', 'bc', 'ab']) + self.assertEqual(regex.findall(r"(?r)(.)(-)(.)", "a-b-c", + overlapped=True), [("b", "-", "c"), ("a", "-", "b")]) + + self.assertEqual([m[0] for m in regex.finditer(r"(?r).", "abc")], ['c', + 'b', 'a']) + self.assertEqual([m[0] for m in regex.finditer(r"(?r)..", "abcde", + overlapped=True)], ['de', 'cd', 'bc', 'ab']) + self.assertEqual([m[0] for m in regex.finditer(r"(?r).", "abc")], ['c', + 'b', 'a']) + self.assertEqual([m[0] for m in regex.finditer(r"(?r)..", "abcde", + overlapped=True)], ['de', 'cd', 'bc', 'ab']) + + self.assertEqual(regex.findall(r"^|\w+", "foo bar"), ['', 'foo', + 'bar']) + self.assertEqual(regex.findall(r"(?V1)^|\w+", "foo bar"), ['', 'foo', + 'bar']) + self.assertEqual(regex.findall(r"(?r)^|\w+", "foo bar"), ['bar', 'foo', + '']) + self.assertEqual(regex.findall(r"(?rV1)^|\w+", "foo bar"), ['bar', + 'foo', '']) + + self.assertEqual([m[0] for m in regex.finditer(r"^|\w+", "foo bar")], + ['', 'foo', 'bar']) + self.assertEqual([m[0] for m in regex.finditer(r"(?V1)^|\w+", + "foo bar")], ['', 'foo', 'bar']) + self.assertEqual([m[0] for m in regex.finditer(r"(?r)^|\w+", + "foo bar")], ['bar', 'foo', '']) + self.assertEqual([m[0] for m in regex.finditer(r"(?rV1)^|\w+", + "foo bar")], ['bar', 'foo', '']) + + self.assertEqual(regex.findall(r"\G\w{2}", "abcd ef"), ['ab', 'cd']) + self.assertEqual(regex.findall(r".{2}(?<=\G.*)", "abcd"), ['ab', 'cd']) + self.assertEqual(regex.findall(r"(?r)\G\w{2}", "abcd ef"), []) + self.assertEqual(regex.findall(r"(?r)\w{2}\G", "abcd ef"), ['ef']) + + self.assertEqual(regex.findall(r"q*", "qqwe"), ['qq', '', '', '']) + self.assertEqual(regex.findall(r"(?V1)q*", "qqwe"), ['qq', '', '', '']) + self.assertEqual(regex.findall(r"(?r)q*", "qqwe"), ['', '', 'qq', '']) + self.assertEqual(regex.findall(r"(?rV1)q*", "qqwe"), ['', '', 'qq', + '']) + + self.assertEqual(regex.findall(".", "abcd", pos=1, endpos=3), ['b', + 'c']) + self.assertEqual(regex.findall(".", "abcd", pos=1, endpos=-1), ['b', + 'c']) + self.assertEqual([m[0] for m in regex.finditer(".", "abcd", pos=1, + endpos=3)], ['b', 'c']) + self.assertEqual([m[0] for m in regex.finditer(".", "abcd", pos=1, + endpos=-1)], ['b', 'c']) + + self.assertEqual([m[0] for m in regex.finditer("(?r).", "abcd", pos=1, + endpos=3)], ['c', 'b']) + self.assertEqual([m[0] for m in regex.finditer("(?r).", "abcd", pos=1, + endpos=-1)], ['c', 'b']) + self.assertEqual(regex.findall("(?r).", "abcd", pos=1, endpos=3), ['c', + 'b']) + self.assertEqual(regex.findall("(?r).", "abcd", pos=1, endpos=-1), + ['c', 'b']) + + self.assertEqual(regex.findall(r"[ab]", "aB", regex.I), ['a', 'B']) + self.assertEqual(regex.findall(r"(?r)[ab]", "aB", regex.I), ['B', 'a']) + + self.assertEqual(regex.findall(r"(?r).{2}", "abc"), ['bc']) + self.assertEqual(regex.findall(r"(?r).{2}", "abc", overlapped=True), + ['bc', 'ab']) + self.assertEqual(regex.findall(r"(\w+) (\w+)", + "first second third fourth fifth"), [('first', 'second'), ('third', + 'fourth')]) + self.assertEqual(regex.findall(r"(?r)(\w+) (\w+)", + "first second third fourth fifth"), [('fourth', 'fifth'), ('second', + 'third')]) + + self.assertEqual([m[0] for m in regex.finditer(r"(?r).{2}", "abc")], + ['bc']) + self.assertEqual([m[0] for m in regex.finditer(r"(?r).{2}", "abc", + overlapped=True)], ['bc', 'ab']) + self.assertEqual([m[0] for m in regex.finditer(r"(\w+) (\w+)", + "first second third fourth fifth")], ['first second', + 'third fourth']) + self.assertEqual([m[0] for m in regex.finditer(r"(?r)(\w+) (\w+)", + "first second third fourth fifth")], ['fourth fifth', + 'second third']) + + self.assertEqual(regex.search("abcdef", "abcdef").span(), (0, 6)) + self.assertEqual(regex.search("(?r)abcdef", "abcdef").span(), (0, 6)) + self.assertEqual(regex.search("(?i)abcdef", "ABCDEF").span(), (0, 6)) + self.assertEqual(regex.search("(?ir)abcdef", "ABCDEF").span(), (0, 6)) + + self.assertEqual(regex.sub(r"(.)", r"\1", "abc"), 'abc') + self.assertEqual(regex.sub(r"(?r)(.)", r"\1", "abc"), 'abc') + + def test_atomic(self): + # Issue 433030. + self.assertEqual(regex.search(r"(?>a*)a", "aa"), None) + + def test_possessive(self): + # Single-character non-possessive. + self.assertEqual(regex.search(r"a?a", "a").span(), (0, 1)) + self.assertEqual(regex.search(r"a*a", "aaa").span(), (0, 3)) + self.assertEqual(regex.search(r"a+a", "aaa").span(), (0, 3)) + self.assertEqual(regex.search(r"a{1,3}a", "aaa").span(), (0, 3)) + + # Multiple-character non-possessive. + self.assertEqual(regex.search(r"(?:ab)?ab", "ab").span(), (0, 2)) + self.assertEqual(regex.search(r"(?:ab)*ab", "ababab").span(), (0, 6)) + self.assertEqual(regex.search(r"(?:ab)+ab", "ababab").span(), (0, 6)) + self.assertEqual(regex.search(r"(?:ab){1,3}ab", "ababab").span(), (0, + 6)) + + # Single-character possessive. + self.assertEqual(regex.search(r"a?+a", "a"), None) + self.assertEqual(regex.search(r"a*+a", "aaa"), None) + self.assertEqual(regex.search(r"a++a", "aaa"), None) + self.assertEqual(regex.search(r"a{1,3}+a", "aaa"), None) + + # Multiple-character possessive. + self.assertEqual(regex.search(r"(?:ab)?+ab", "ab"), None) + self.assertEqual(regex.search(r"(?:ab)*+ab", "ababab"), None) + self.assertEqual(regex.search(r"(?:ab)++ab", "ababab"), None) + self.assertEqual(regex.search(r"(?:ab){1,3}+ab", "ababab"), None) + + def test_zerowidth(self): + # Issue 3262. + if sys.version_info >= (3, 7, 0): + self.assertEqual(regex.split(r"\b", "a b"), ['', 'a', ' ', 'b', + '']) + else: + self.assertEqual(regex.split(r"\b", "a b"), ['a b']) + self.assertEqual(regex.split(r"(?V1)\b", "a b"), ['', 'a', ' ', 'b', + '']) + + # Issue 1647489. + self.assertEqual(regex.findall(r"^|\w+", "foo bar"), ['', 'foo', + 'bar']) + self.assertEqual([m[0] for m in regex.finditer(r"^|\w+", "foo bar")], + ['', 'foo', 'bar']) + self.assertEqual(regex.findall(r"(?r)^|\w+", "foo bar"), ['bar', + 'foo', '']) + self.assertEqual([m[0] for m in regex.finditer(r"(?r)^|\w+", + "foo bar")], ['bar', 'foo', '']) + self.assertEqual(regex.findall(r"(?V1)^|\w+", "foo bar"), ['', 'foo', + 'bar']) + self.assertEqual([m[0] for m in regex.finditer(r"(?V1)^|\w+", + "foo bar")], ['', 'foo', 'bar']) + self.assertEqual(regex.findall(r"(?rV1)^|\w+", "foo bar"), ['bar', + 'foo', '']) + self.assertEqual([m[0] for m in regex.finditer(r"(?rV1)^|\w+", + "foo bar")], ['bar', 'foo', '']) + + if sys.version_info >= (3, 7, 0): + self.assertEqual(regex.split("", "xaxbxc"), ['', 'x', 'a', 'x', + 'b', 'x', 'c', '']) + self.assertEqual([m for m in regex.splititer("", "xaxbxc")], ['', + 'x', 'a', 'x', 'b', 'x', 'c', '']) + else: + self.assertEqual(regex.split("", "xaxbxc"), ['xaxbxc']) + self.assertEqual([m for m in regex.splititer("", "xaxbxc")], + ['xaxbxc']) + + if sys.version_info >= (3, 7, 0): + self.assertEqual(regex.split("(?r)", "xaxbxc"), ['', 'c', 'x', 'b', + 'x', 'a', 'x', '']) + self.assertEqual([m for m in regex.splititer("(?r)", "xaxbxc")], + ['', 'c', 'x', 'b', 'x', 'a', 'x', '']) + else: + self.assertEqual(regex.split("(?r)", "xaxbxc"), ['xaxbxc']) + self.assertEqual([m for m in regex.splititer("(?r)", "xaxbxc")], + ['xaxbxc']) + + self.assertEqual(regex.split("(?V1)", "xaxbxc"), ['', 'x', 'a', 'x', + 'b', 'x', 'c', '']) + self.assertEqual([m for m in regex.splititer("(?V1)", "xaxbxc")], ['', + 'x', 'a', 'x', 'b', 'x', 'c', '']) + + self.assertEqual(regex.split("(?rV1)", "xaxbxc"), ['', 'c', 'x', 'b', + 'x', 'a', 'x', '']) + self.assertEqual([m for m in regex.splititer("(?rV1)", "xaxbxc")], ['', + 'c', 'x', 'b', 'x', 'a', 'x', '']) + + def test_scoped_and_inline_flags(self): + # Issues 433028, 433024, 433027. + self.assertEqual(regex.search(r"(?i)Ab", "ab").span(), (0, 2)) + self.assertEqual(regex.search(r"(?i:A)b", "ab").span(), (0, 2)) + # Changed to positional flags in regex 2023.12.23. + self.assertEqual(regex.search(r"A(?i)b", "ab"), None) + + self.assertEqual(regex.search(r"(?V0)Ab", "ab"), None) + self.assertEqual(regex.search(r"(?V1)Ab", "ab"), None) + self.assertEqual(regex.search(r"(?-i)Ab", "ab", flags=regex.I), None) + self.assertEqual(regex.search(r"(?-i:A)b", "ab", flags=regex.I), None) + self.assertEqual(regex.search(r"A(?-i)b", "ab", flags=regex.I).span(), + (0, 2)) + + def test_repeated_repeats(self): + # Issue 2537. + self.assertEqual(regex.search(r"(?:a+)+", "aaa").span(), (0, 3)) + self.assertEqual(regex.search(r"(?:(?:ab)+c)+", "abcabc").span(), (0, + 6)) + + # Hg issue 286. + self.assertEqual(regex.search(r"(?:a+){2,}", "aaa").span(), (0, 3)) + + def test_lookbehind(self): + self.assertEqual(regex.search(r"123(?<=a\d+)", "a123").span(), (1, 4)) + self.assertEqual(regex.search(r"123(?<=a\d+)", "b123"), None) + self.assertEqual(regex.search(r"123(?= (3, 7, 0): + self.assertEqual(regex.sub(r"(?V0)(x)?(y)?", r"\2-\1", "xy"), + 'y-x-') + else: + self.assertEqual(regex.sub(r"(?V0)(x)?(y)?", r"\2-\1", "xy"), + 'y-x') + self.assertEqual(regex.sub(r"(?V1)(x)?(y)?", r"\2-\1", "xy"), 'y-x-') + if sys.version_info >= (3, 7, 0): + self.assertEqual(regex.sub(r"(?V0)(x)?(y)?", r"\2-\1", "x"), '-x-') + else: + self.assertEqual(regex.sub(r"(?V0)(x)?(y)?", r"\2-\1", "x"), '-x') + self.assertEqual(regex.sub(r"(?V1)(x)?(y)?", r"\2-\1", "x"), '-x-') + if sys.version_info >= (3, 7, 0): + self.assertEqual(regex.sub(r"(?V0)(x)?(y)?", r"\2-\1", "y"), 'y--') + else: + self.assertEqual(regex.sub(r"(?V0)(x)?(y)?", r"\2-\1", "y"), 'y-') + self.assertEqual(regex.sub(r"(?V1)(x)?(y)?", r"\2-\1", "y"), 'y--') + + def test_bug_10328 (self): + # Issue 10328. + pat = regex.compile(r'(?mV0)(?P[ \t]+\r*$)|(?P(?<=[^\n])\Z)') + if sys.version_info >= (3, 7, 0): + self.assertEqual(pat.subn(lambda m: '<' + m.lastgroup + '>', + 'foobar '), ('foobar', 2)) + else: + self.assertEqual(pat.subn(lambda m: '<' + m.lastgroup + '>', + 'foobar '), ('foobar', 1)) + self.assertEqual([m.group() for m in pat.finditer('foobar ')], [' ', + '']) + pat = regex.compile(r'(?mV1)(?P[ \t]+\r*$)|(?P(?<=[^\n])\Z)') + self.assertEqual(pat.subn(lambda m: '<' + m.lastgroup + '>', + 'foobar '), ('foobar', 2)) + self.assertEqual([m.group() for m in pat.finditer('foobar ')], [' ', + '']) + + def test_overlapped(self): + self.assertEqual(regex.findall(r"..", "abcde"), ['ab', 'cd']) + self.assertEqual(regex.findall(r"..", "abcde", overlapped=True), ['ab', + 'bc', 'cd', 'de']) + self.assertEqual(regex.findall(r"(?r)..", "abcde"), ['de', 'bc']) + self.assertEqual(regex.findall(r"(?r)..", "abcde", overlapped=True), + ['de', 'cd', 'bc', 'ab']) + self.assertEqual(regex.findall(r"(.)(-)(.)", "a-b-c", overlapped=True), + [("a", "-", "b"), ("b", "-", "c")]) + + self.assertEqual([m[0] for m in regex.finditer(r"..", "abcde")], ['ab', + 'cd']) + self.assertEqual([m[0] for m in regex.finditer(r"..", "abcde", + overlapped=True)], ['ab', 'bc', 'cd', 'de']) + self.assertEqual([m[0] for m in regex.finditer(r"(?r)..", "abcde")], + ['de', 'bc']) + self.assertEqual([m[0] for m in regex.finditer(r"(?r)..", "abcde", + overlapped=True)], ['de', 'cd', 'bc', 'ab']) + + self.assertEqual([m.groups() for m in regex.finditer(r"(.)(-)(.)", + "a-b-c", overlapped=True)], [("a", "-", "b"), ("b", "-", "c")]) + self.assertEqual([m.groups() for m in regex.finditer(r"(?r)(.)(-)(.)", + "a-b-c", overlapped=True)], [("b", "-", "c"), ("a", "-", "b")]) + + def test_splititer(self): + self.assertEqual(regex.split(r",", "a,b,,c,"), ['a', 'b', '', 'c', '']) + self.assertEqual([m for m in regex.splititer(r",", "a,b,,c,")], ['a', + 'b', '', 'c', '']) + + def test_grapheme(self): + self.assertEqual(regex.match(r"\X", "\xE0").span(), (0, 1)) + self.assertEqual(regex.match(r"\X", "a\u0300").span(), (0, 2)) + + self.assertEqual(regex.findall(r"\X", + "a\xE0a\u0300e\xE9e\u0301"), ['a', '\xe0', 'a\u0300', 'e', + '\xe9', 'e\u0301']) + self.assertEqual(regex.findall(r"\X{3}", + "a\xE0a\u0300e\xE9e\u0301"), ['a\xe0a\u0300', 'e\xe9e\u0301']) + self.assertEqual(regex.findall(r"\X", "\r\r\n\u0301A\u0301"), + ['\r', '\r\n', '\u0301', 'A\u0301']) + + def test_word_boundary(self): + text = 'The quick ("brown") fox can\'t jump 32.3 feet, right?' + self.assertEqual(regex.split(r'(?V1)\b', text), ['', 'The', ' ', + 'quick', ' ("', 'brown', '") ', 'fox', ' ', 'can', "'", 't', + ' ', 'jump', ' ', '32', '.', '3', ' ', 'feet', ', ', + 'right', '?']) + self.assertEqual(regex.split(r'(?V1w)\b', text), ['', 'The', ' ', + 'quick', ' ', '(', '"', 'brown', '"', ')', ' ', 'fox', ' ', + "can't", ' ', 'jump', ' ', '32.3', ' ', 'feet', ',', ' ', + 'right', '?', '']) + + text = "The fox" + self.assertEqual(regex.split(r'(?V1)\b', text), ['', 'The', ' ', + 'fox', '']) + self.assertEqual(regex.split(r'(?V1w)\b', text), ['', 'The', ' ', + 'fox', '']) + + text = "can't aujourd'hui l'objectif" + self.assertEqual(regex.split(r'(?V1)\b', text), ['', 'can', "'", + 't', ' ', 'aujourd', "'", 'hui', ' ', 'l', "'", 'objectif', + '']) + self.assertEqual(regex.split(r'(?V1w)\b', text), ['', "can't", ' ', + "aujourd'hui", ' ', "l'objectif", '']) + + def test_line_boundary(self): + self.assertEqual(regex.findall(r".+", "Line 1\nLine 2\n"), ["Line 1", + "Line 2"]) + self.assertEqual(regex.findall(r".+", "Line 1\rLine 2\r"), + ["Line 1\rLine 2\r"]) + self.assertEqual(regex.findall(r".+", "Line 1\r\nLine 2\r\n"), + ["Line 1\r", "Line 2\r"]) + self.assertEqual(regex.findall(r"(?w).+", "Line 1\nLine 2\n"), + ["Line 1", "Line 2"]) + self.assertEqual(regex.findall(r"(?w).+", "Line 1\rLine 2\r"), + ["Line 1", "Line 2"]) + self.assertEqual(regex.findall(r"(?w).+", "Line 1\r\nLine 2\r\n"), + ["Line 1", "Line 2"]) + + self.assertEqual(regex.search(r"^abc", "abc").start(), 0) + self.assertEqual(regex.search(r"^abc", "\nabc"), None) + self.assertEqual(regex.search(r"^abc", "\rabc"), None) + self.assertEqual(regex.search(r"(?w)^abc", "abc").start(), 0) + self.assertEqual(regex.search(r"(?w)^abc", "\nabc"), None) + self.assertEqual(regex.search(r"(?w)^abc", "\rabc"), None) + + self.assertEqual(regex.search(r"abc$", "abc").start(), 0) + self.assertEqual(regex.search(r"abc$", "abc\n").start(), 0) + self.assertEqual(regex.search(r"abc$", "abc\r"), None) + self.assertEqual(regex.search(r"(?w)abc$", "abc").start(), 0) + self.assertEqual(regex.search(r"(?w)abc$", "abc\n").start(), 0) + self.assertEqual(regex.search(r"(?w)abc$", "abc\r").start(), 0) + + self.assertEqual(regex.search(r"(?m)^abc", "abc").start(), 0) + self.assertEqual(regex.search(r"(?m)^abc", "\nabc").start(), 1) + self.assertEqual(regex.search(r"(?m)^abc", "\rabc"), None) + self.assertEqual(regex.search(r"(?mw)^abc", "abc").start(), 0) + self.assertEqual(regex.search(r"(?mw)^abc", "\nabc").start(), 1) + self.assertEqual(regex.search(r"(?mw)^abc", "\rabc").start(), 1) + + self.assertEqual(regex.search(r"(?m)abc$", "abc").start(), 0) + self.assertEqual(regex.search(r"(?m)abc$", "abc\n").start(), 0) + self.assertEqual(regex.search(r"(?m)abc$", "abc\r"), None) + self.assertEqual(regex.search(r"(?mw)abc$", "abc").start(), 0) + self.assertEqual(regex.search(r"(?mw)abc$", "abc\n").start(), 0) + self.assertEqual(regex.search(r"(?mw)abc$", "abc\r").start(), 0) + + def test_branch_reset(self): + self.assertEqual(regex.match(r"(?:(a)|(b))(c)", "ac").groups(), ('a', + None, 'c')) + self.assertEqual(regex.match(r"(?:(a)|(b))(c)", "bc").groups(), (None, + 'b', 'c')) + self.assertEqual(regex.match(r"(?:(?a)|(?b))(?c)", + "ac").groups(), ('a', None, 'c')) + self.assertEqual(regex.match(r"(?:(?a)|(?b))(?c)", + "bc").groups(), (None, 'b', 'c')) + + self.assertEqual(regex.match(r"(?a)(?:(?b)|(?c))(?d)", + "abd").groups(), ('a', 'b', None, 'd')) + self.assertEqual(regex.match(r"(?a)(?:(?b)|(?c))(?d)", + "acd").groups(), ('a', None, 'c', 'd')) + self.assertEqual(regex.match(r"(a)(?:(b)|(c))(d)", "abd").groups(), + ('a', 'b', None, 'd')) + + self.assertEqual(regex.match(r"(a)(?:(b)|(c))(d)", "acd").groups(), + ('a', None, 'c', 'd')) + self.assertEqual(regex.match(r"(a)(?|(b)|(b))(d)", "abd").groups(), + ('a', 'b', 'd')) + self.assertEqual(regex.match(r"(?|(?a)|(?b))(c)", "ac").groups(), + ('a', None, 'c')) + self.assertEqual(regex.match(r"(?|(?a)|(?b))(c)", "bc").groups(), + (None, 'b', 'c')) + self.assertEqual(regex.match(r"(?|(?a)|(?b))(c)", "ac").groups(), + ('a', 'c')) + + self.assertEqual(regex.match(r"(?|(?a)|(?b))(c)", "bc").groups(), + ('b', 'c')) + + self.assertEqual(regex.match(r"(?|(?a)(?b)|(?c)(?d))(e)", + "abe").groups(), ('a', 'b', 'e')) + self.assertEqual(regex.match(r"(?|(?a)(?b)|(?c)(?d))(e)", + "cde").groups(), ('d', 'c', 'e')) + self.assertEqual(regex.match(r"(?|(?a)(?b)|(?c)(d))(e)", + "abe").groups(), ('a', 'b', 'e')) + self.assertEqual(regex.match(r"(?|(?a)(?b)|(?c)(d))(e)", + "cde").groups(), ('d', 'c', 'e')) + self.assertEqual(regex.match(r"(?|(?a)(?b)|(c)(d))(e)", + "abe").groups(), ('a', 'b', 'e')) + self.assertEqual(regex.match(r"(?|(?a)(?b)|(c)(d))(e)", + "cde").groups(), ('c', 'd', 'e')) + + # Hg issue 87: Allow duplicate names of groups + self.assertEqual(regex.match(r"(?|(?a)(?b)|(c)(?d))(e)", + "abe").groups(), ("a", "b", "e")) + self.assertEqual(regex.match(r"(?|(?a)(?b)|(c)(?d))(e)", + "abe").capturesdict(), {"a": ["a"], "b": ["b"]}) + self.assertEqual(regex.match(r"(?|(?a)(?b)|(c)(?d))(e)", + "cde").groups(), ("d", None, "e")) + self.assertEqual(regex.match(r"(?|(?a)(?b)|(c)(?d))(e)", + "cde").capturesdict(), {"a": ["c", "d"], "b": []}) + + def test_set(self): + self.assertEqual(regex.match(r"[a]", "a").span(), (0, 1)) + self.assertEqual(regex.match(r"(?i)[a]", "A").span(), (0, 1)) + self.assertEqual(regex.match(r"[a-b]", r"a").span(), (0, 1)) + self.assertEqual(regex.match(r"(?i)[a-b]", r"A").span(), (0, 1)) + + self.assertEqual(regex.sub(r"(?V0)([][])", r"-", "a[b]c"), "a-b-c") + + self.assertEqual(regex.findall(r"[\p{Alpha}]", "a0"), ["a"]) + self.assertEqual(regex.findall(r"(?i)[\p{Alpha}]", "A0"), ["A"]) + + self.assertEqual(regex.findall(r"[a\p{Alpha}]", "ab0"), ["a", "b"]) + self.assertEqual(regex.findall(r"[a\P{Alpha}]", "ab0"), ["a", "0"]) + self.assertEqual(regex.findall(r"(?i)[a\p{Alpha}]", "ab0"), ["a", + "b"]) + self.assertEqual(regex.findall(r"(?i)[a\P{Alpha}]", "ab0"), ["a", + "0"]) + + self.assertEqual(regex.findall(r"[a-b\p{Alpha}]", "abC0"), ["a", + "b", "C"]) + self.assertEqual(regex.findall(r"(?i)[a-b\p{Alpha}]", "AbC0"), ["A", + "b", "C"]) + + self.assertEqual(regex.findall(r"[\p{Alpha}]", "a0"), ["a"]) + self.assertEqual(regex.findall(r"[\P{Alpha}]", "a0"), ["0"]) + self.assertEqual(regex.findall(r"[^\p{Alpha}]", "a0"), ["0"]) + self.assertEqual(regex.findall(r"[^\P{Alpha}]", "a0"), ["a"]) + + self.assertEqual("".join(regex.findall(r"[^\d-h]", "a^b12c-h")), + 'a^bc') + self.assertEqual("".join(regex.findall(r"[^\dh]", "a^b12c-h")), + 'a^bc-') + self.assertEqual("".join(regex.findall(r"[^h\s\db]", "a^b 12c-h")), + 'a^c-') + self.assertEqual("".join(regex.findall(r"[^b\w]", "a b")), ' ') + self.assertEqual("".join(regex.findall(r"[^b\S]", "a b")), ' ') + self.assertEqual("".join(regex.findall(r"[^8\d]", "a 1b2")), 'a b') + + all_chars = "".join(chr(c) for c in range(0x100)) + self.assertEqual(len(regex.findall(r"\p{ASCII}", all_chars)), 128) + self.assertEqual(len(regex.findall(r"\p{Letter}", all_chars)), + 117) + self.assertEqual(len(regex.findall(r"\p{Digit}", all_chars)), 10) + + # Set operators + self.assertEqual(len(regex.findall(r"(?V1)[\p{ASCII}&&\p{Letter}]", + all_chars)), 52) + self.assertEqual(len(regex.findall(r"(?V1)[\p{ASCII}&&\p{Alnum}&&\p{Letter}]", + all_chars)), 52) + self.assertEqual(len(regex.findall(r"(?V1)[\p{ASCII}&&\p{Alnum}&&\p{Digit}]", + all_chars)), 10) + self.assertEqual(len(regex.findall(r"(?V1)[\p{ASCII}&&\p{Cc}]", + all_chars)), 33) + self.assertEqual(len(regex.findall(r"(?V1)[\p{ASCII}&&\p{Graph}]", + all_chars)), 94) + self.assertEqual(len(regex.findall(r"(?V1)[\p{ASCII}--\p{Cc}]", + all_chars)), 95) + self.assertEqual(len(regex.findall(r"[\p{Letter}\p{Digit}]", + all_chars)), 127) + self.assertEqual(len(regex.findall(r"(?V1)[\p{Letter}||\p{Digit}]", + all_chars)), 127) + self.assertEqual(len(regex.findall(r"\p{HexDigit}", all_chars)), + 22) + self.assertEqual(len(regex.findall(r"(?V1)[\p{HexDigit}~~\p{Digit}]", + all_chars)), 12) + self.assertEqual(len(regex.findall(r"(?V1)[\p{Digit}~~\p{HexDigit}]", + all_chars)), 12) + + self.assertEqual(repr(type(regex.compile(r"(?V0)([][-])"))), + self.PATTERN_CLASS) + self.assertEqual(regex.findall(r"(?V1)[[a-z]--[aei]]", "abc"), ["b", + "c"]) + self.assertEqual(regex.findall(r"(?iV1)[[a-z]--[aei]]", "abc"), ["b", + "c"]) + self.assertEqual(regex.findall(r"(?V1)[\w--a]","abc"), ["b", "c"]) + self.assertEqual(regex.findall(r"(?iV1)[\w--a]","abc"), ["b", "c"]) + + def test_various(self): + tests = [ + # Test ?P< and ?P= extensions. + ('(?Pa)', '', '', regex.error, self.BAD_GROUP_NAME), # Begins with a digit. + ('(?Pa)', '', '', regex.error, self.BAD_GROUP_NAME), # Begins with an illegal char. + ('(?Pa)', '', '', regex.error, self.BAD_GROUP_NAME), # Begins with an illegal char. + + # Same tests, for the ?P= form. + ('(?Pa)(?P=foo_123', 'aa', '', regex.error, + self.MISSING_RPAREN), + ('(?Pa)(?P=1)', 'aa', '1', ascii('a')), + ('(?Pa)(?P=0)', 'aa', '', regex.error, + self.BAD_GROUP_NAME), + ('(?Pa)(?P=-1)', 'aa', '', regex.error, + self.BAD_GROUP_NAME), + ('(?Pa)(?P=!)', 'aa', '', regex.error, + self.BAD_GROUP_NAME), + ('(?Pa)(?P=foo_124)', 'aa', '', regex.error, + self.UNKNOWN_GROUP), # Backref to undefined group. + + ('(?Pa)', 'a', '1', ascii('a')), + ('(?Pa)(?P=foo_123)', 'aa', '1', ascii('a')), + + # Mal-formed \g in pattern treated as literal for compatibility. + (r'(?a)\ga)\g<1>', 'aa', '1', ascii('a')), + (r'(?a)\g', 'aa', '', ascii(None)), + (r'(?a)\g', 'aa', '', regex.error, + self.UNKNOWN_GROUP), # Backref to undefined group. + + ('(?a)', 'a', '1', ascii('a')), + (r'(?a)\g', 'aa', '1', ascii('a')), + + # Test octal escapes. + ('\\1', 'a', '', regex.error, self.INVALID_GROUP_REF), # Backreference. + ('[\\1]', '\1', '0', "'\\x01'"), # Character. + ('\\09', chr(0) + '9', '0', ascii(chr(0) + '9')), + ('\\141', 'a', '0', ascii('a')), + ('(a)(b)(c)(d)(e)(f)(g)(h)(i)(j)(k)(l)\\119', 'abcdefghijklk9', + '0,11', ascii(('abcdefghijklk9', 'k'))), + + # Test \0 is handled everywhere. + (r'\0', '\0', '0', ascii('\0')), + (r'[\0a]', '\0', '0', ascii('\0')), + (r'[a\0]', '\0', '0', ascii('\0')), + (r'[^a\0]', '\0', '', ascii(None)), + + # Test various letter escapes. + (r'\a[\b]\f\n\r\t\v', '\a\b\f\n\r\t\v', '0', + ascii('\a\b\f\n\r\t\v')), + (r'[\a][\b][\f][\n][\r][\t][\v]', '\a\b\f\n\r\t\v', '0', + ascii('\a\b\f\n\r\t\v')), + (r'\xff', '\377', '0', ascii(chr(255))), + + # New \x semantics. + (r'\x00ffffffffffffff', '\377', '', ascii(None)), + (r'\x00f', '\017', '', ascii(None)), + (r'\x00fe', '\376', '', ascii(None)), + + (r'\x00ff', '\377', '', ascii(None)), + (r'\t\n\v\r\f\a\g', '\t\n\v\r\f\ag', '0', ascii('\t\n\v\r\f\ag')), + ('\t\n\v\r\f\a\\g', '\t\n\v\r\f\ag', '0', ascii('\t\n\v\r\f\ag')), + (r'\t\n\v\r\f\a', '\t\n\v\r\f\a', '0', ascii(chr(9) + chr(10) + + chr(11) + chr(13) + chr(12) + chr(7))), + (r'[\t][\n][\v][\r][\f][\b]', '\t\n\v\r\f\b', '0', + ascii('\t\n\v\r\f\b')), + + (r"^\w+=(\\[\000-\277]|[^\n\\])*", + "SRC=eval.c g.c blah blah blah \\\\\n\tapes.c", '0', + ascii("SRC=eval.c g.c blah blah blah \\\\")), + + # Test that . only matches \n in DOTALL mode. + ('a.b', 'acb', '0', ascii('acb')), + ('a.b', 'a\nb', '', ascii(None)), + ('a.*b', 'acc\nccb', '', ascii(None)), + ('a.{4,5}b', 'acc\nccb', '', ascii(None)), + ('a.b', 'a\rb', '0', ascii('a\rb')), + # Changed to positional flags in regex 2023.12.23. + ('a.b(?s)', 'a\nb', '', ascii(None)), + ('(?s)a.b', 'a\nb', '0', ascii('a\nb')), + ('a.*(?s)b', 'acc\nccb', '', ascii(None)), + ('(?s)a.*b', 'acc\nccb', '0', ascii('acc\nccb')), + ('(?s)a.{4,5}b', 'acc\nccb', '0', ascii('acc\nccb')), + + (')', '', '', regex.error, self.TRAILING_CHARS), # Unmatched right bracket. + ('', '', '0', "''"), # Empty pattern. + ('abc', 'abc', '0', ascii('abc')), + ('abc', 'xbc', '', ascii(None)), + ('abc', 'axc', '', ascii(None)), + ('abc', 'abx', '', ascii(None)), + ('abc', 'xabcy', '0', ascii('abc')), + ('abc', 'ababc', '0', ascii('abc')), + ('ab*c', 'abc', '0', ascii('abc')), + ('ab*bc', 'abc', '0', ascii('abc')), + + ('ab*bc', 'abbc', '0', ascii('abbc')), + ('ab*bc', 'abbbbc', '0', ascii('abbbbc')), + ('ab+bc', 'abbc', '0', ascii('abbc')), + ('ab+bc', 'abc', '', ascii(None)), + ('ab+bc', 'abq', '', ascii(None)), + ('ab+bc', 'abbbbc', '0', ascii('abbbbc')), + ('ab?bc', 'abbc', '0', ascii('abbc')), + ('ab?bc', 'abc', '0', ascii('abc')), + ('ab?bc', 'abbbbc', '', ascii(None)), + ('ab?c', 'abc', '0', ascii('abc')), + + ('^abc$', 'abc', '0', ascii('abc')), + ('^abc$', 'abcc', '', ascii(None)), + ('^abc', 'abcc', '0', ascii('abc')), + ('^abc$', 'aabc', '', ascii(None)), + ('abc$', 'aabc', '0', ascii('abc')), + ('^', 'abc', '0', ascii('')), + ('$', 'abc', '0', ascii('')), + ('a.c', 'abc', '0', ascii('abc')), + ('a.c', 'axc', '0', ascii('axc')), + ('a.*c', 'axyzc', '0', ascii('axyzc')), + + ('a.*c', 'axyzd', '', ascii(None)), + ('a[bc]d', 'abc', '', ascii(None)), + ('a[bc]d', 'abd', '0', ascii('abd')), + ('a[b-d]e', 'abd', '', ascii(None)), + ('a[b-d]e', 'ace', '0', ascii('ace')), + ('a[b-d]', 'aac', '0', ascii('ac')), + ('a[-b]', 'a-', '0', ascii('a-')), + ('a[\\-b]', 'a-', '0', ascii('a-')), + ('a[b-]', 'a-', '0', ascii('a-')), + ('a[]b', '-', '', regex.error, self.BAD_SET), + + ('a[', '-', '', regex.error, self.BAD_SET), + ('a\\', '-', '', regex.error, self.BAD_ESCAPE), + ('abc)', '-', '', regex.error, self.TRAILING_CHARS), + ('(abc', '-', '', regex.error, self.MISSING_RPAREN), + ('a]', 'a]', '0', ascii('a]')), + ('a[]]b', 'a]b', '0', ascii('a]b')), + ('a[]]b', 'a]b', '0', ascii('a]b')), + ('a[^bc]d', 'aed', '0', ascii('aed')), + ('a[^bc]d', 'abd', '', ascii(None)), + ('a[^-b]c', 'adc', '0', ascii('adc')), + + ('a[^-b]c', 'a-c', '', ascii(None)), + ('a[^]b]c', 'a]c', '', ascii(None)), + ('a[^]b]c', 'adc', '0', ascii('adc')), + ('\\ba\\b', 'a-', '0', ascii('a')), + ('\\ba\\b', '-a', '0', ascii('a')), + ('\\ba\\b', '-a-', '0', ascii('a')), + ('\\by\\b', 'xy', '', ascii(None)), + ('\\by\\b', 'yz', '', ascii(None)), + ('\\by\\b', 'xyz', '', ascii(None)), + ('x\\b', 'xyz', '', ascii(None)), + + ('x\\B', 'xyz', '0', ascii('x')), + ('\\Bz', 'xyz', '0', ascii('z')), + ('z\\B', 'xyz', '', ascii(None)), + ('\\Bx', 'xyz', '', ascii(None)), + ('\\Ba\\B', 'a-', '', ascii(None)), + ('\\Ba\\B', '-a', '', ascii(None)), + ('\\Ba\\B', '-a-', '', ascii(None)), + ('\\By\\B', 'xy', '', ascii(None)), + ('\\By\\B', 'yz', '', ascii(None)), + ('\\By\\b', 'xy', '0', ascii('y')), + + ('\\by\\B', 'yz', '0', ascii('y')), + ('\\By\\B', 'xyz', '0', ascii('y')), + ('ab|cd', 'abc', '0', ascii('ab')), + ('ab|cd', 'abcd', '0', ascii('ab')), + ('()ef', 'def', '0,1', ascii(('ef', ''))), + ('$b', 'b', '', ascii(None)), + ('a\\(b', 'a(b', '', ascii(('a(b',))), + ('a\\(*b', 'ab', '0', ascii('ab')), + ('a\\(*b', 'a((b', '0', ascii('a((b')), + ('a\\\\b', 'a\\b', '0', ascii('a\\b')), + + ('((a))', 'abc', '0,1,2', ascii(('a', 'a', 'a'))), + ('(a)b(c)', 'abc', '0,1,2', ascii(('abc', 'a', 'c'))), + ('a+b+c', 'aabbabc', '0', ascii('abc')), + ('(a+|b)*', 'ab', '0,1', ascii(('ab', 'b'))), + ('(a+|b)+', 'ab', '0,1', ascii(('ab', 'b'))), + ('(a+|b)?', 'ab', '0,1', ascii(('a', 'a'))), + (')(', '-', '', regex.error, self.TRAILING_CHARS), + ('[^ab]*', 'cde', '0', ascii('cde')), + ('abc', '', '', ascii(None)), + ('a*', '', '0', ascii('')), + + ('a|b|c|d|e', 'e', '0', ascii('e')), + ('(a|b|c|d|e)f', 'ef', '0,1', ascii(('ef', 'e'))), + ('abcd*efg', 'abcdefg', '0', ascii('abcdefg')), + ('ab*', 'xabyabbbz', '0', ascii('ab')), + ('ab*', 'xayabbbz', '0', ascii('a')), + ('(ab|cd)e', 'abcde', '0,1', ascii(('cde', 'cd'))), + ('[abhgefdc]ij', 'hij', '0', ascii('hij')), + ('^(ab|cd)e', 'abcde', '', ascii(None)), + ('(abc|)ef', 'abcdef', '0,1', ascii(('ef', ''))), + ('(a|b)c*d', 'abcd', '0,1', ascii(('bcd', 'b'))), + + ('(ab|ab*)bc', 'abc', '0,1', ascii(('abc', 'a'))), + ('a([bc]*)c*', 'abc', '0,1', ascii(('abc', 'bc'))), + ('a([bc]*)(c*d)', 'abcd', '0,1,2', ascii(('abcd', 'bc', 'd'))), + ('a([bc]+)(c*d)', 'abcd', '0,1,2', ascii(('abcd', 'bc', 'd'))), + ('a([bc]*)(c+d)', 'abcd', '0,1,2', ascii(('abcd', 'b', 'cd'))), + ('a[bcd]*dcdcde', 'adcdcde', '0', ascii('adcdcde')), + ('a[bcd]+dcdcde', 'adcdcde', '', ascii(None)), + ('(ab|a)b*c', 'abc', '0,1', ascii(('abc', 'ab'))), + ('((a)(b)c)(d)', 'abcd', '1,2,3,4', ascii(('abc', 'a', 'b', 'd'))), + ('[a-zA-Z_][a-zA-Z0-9_]*', 'alpha', '0', ascii('alpha')), + + ('^a(bc+|b[eh])g|.h$', 'abh', '0,1', ascii(('bh', None))), + ('(bc+d$|ef*g.|h?i(j|k))', 'effgz', '0,1,2', ascii(('effgz', + 'effgz', None))), + ('(bc+d$|ef*g.|h?i(j|k))', 'ij', '0,1,2', ascii(('ij', 'ij', + 'j'))), + ('(bc+d$|ef*g.|h?i(j|k))', 'effg', '', ascii(None)), + ('(bc+d$|ef*g.|h?i(j|k))', 'bcdd', '', ascii(None)), + ('(bc+d$|ef*g.|h?i(j|k))', 'reffgz', '0,1,2', ascii(('effgz', + 'effgz', None))), + ('(((((((((a)))))))))', 'a', '0', ascii('a')), + ('multiple words of text', 'uh-uh', '', ascii(None)), + ('multiple words', 'multiple words, yeah', '0', + ascii('multiple words')), + ('(.*)c(.*)', 'abcde', '0,1,2', ascii(('abcde', 'ab', 'de'))), + + ('\\((.*), (.*)\\)', '(a, b)', '2,1', ascii(('b', 'a'))), + ('[k]', 'ab', '', ascii(None)), + ('a[-]?c', 'ac', '0', ascii('ac')), + ('(abc)\\1', 'abcabc', '1', ascii('abc')), + ('([a-c]*)\\1', 'abcabc', '1', ascii('abc')), + ('^(.+)?B', 'AB', '1', ascii('A')), + ('(a+).\\1$', 'aaaaa', '0,1', ascii(('aaaaa', 'aa'))), + ('^(a+).\\1$', 'aaaa', '', ascii(None)), + ('(abc)\\1', 'abcabc', '0,1', ascii(('abcabc', 'abc'))), + ('([a-c]+)\\1', 'abcabc', '0,1', ascii(('abcabc', 'abc'))), + + ('(a)\\1', 'aa', '0,1', ascii(('aa', 'a'))), + ('(a+)\\1', 'aa', '0,1', ascii(('aa', 'a'))), + ('(a+)+\\1', 'aa', '0,1', ascii(('aa', 'a'))), + ('(a).+\\1', 'aba', '0,1', ascii(('aba', 'a'))), + ('(a)ba*\\1', 'aba', '0,1', ascii(('aba', 'a'))), + ('(aa|a)a\\1$', 'aaa', '0,1', ascii(('aaa', 'a'))), + ('(a|aa)a\\1$', 'aaa', '0,1', ascii(('aaa', 'a'))), + ('(a+)a\\1$', 'aaa', '0,1', ascii(('aaa', 'a'))), + ('([abc]*)\\1', 'abcabc', '0,1', ascii(('abcabc', 'abc'))), + ('(a)(b)c|ab', 'ab', '0,1,2', ascii(('ab', None, None))), + + ('(a)+x', 'aaax', '0,1', ascii(('aaax', 'a'))), + ('([ac])+x', 'aacx', '0,1', ascii(('aacx', 'c'))), + ('([^/]*/)*sub1/', 'd:msgs/tdir/sub1/trial/away.cpp', '0,1', + ascii(('d:msgs/tdir/sub1/', 'tdir/'))), + ('([^.]*)\\.([^:]*):[T ]+(.*)', 'track1.title:TBlah blah blah', + '0,1,2,3', ascii(('track1.title:TBlah blah blah', 'track1', + 'title', 'Blah blah blah'))), + ('([^N]*N)+', 'abNNxyzN', '0,1', ascii(('abNNxyzN', 'xyzN'))), + ('([^N]*N)+', 'abNNxyz', '0,1', ascii(('abNN', 'N'))), + ('([abc]*)x', 'abcx', '0,1', ascii(('abcx', 'abc'))), + ('([abc]*)x', 'abc', '', ascii(None)), + ('([xyz]*)x', 'abcx', '0,1', ascii(('x', ''))), + ('(a)+b|aac', 'aac', '0,1', ascii(('aac', None))), + + # Test symbolic groups. + ('(?Paaa)a', 'aaaa', '', regex.error, self.BAD_GROUP_NAME), + ('(?Paaa)a', 'aaaa', '0,id', ascii(('aaaa', 'aaa'))), + ('(?Paa)(?P=id)', 'aaaa', '0,id', ascii(('aaaa', 'aa'))), + ('(?Paa)(?P=xd)', 'aaaa', '', regex.error, self.UNKNOWN_GROUP), + + # Character properties. + (r"\g", "g", '0', ascii('g')), + (r"\g<1>", "g", '', regex.error, self.INVALID_GROUP_REF), + (r"(.)\g<1>", "gg", '0', ascii('gg')), + (r"(.)\g<1>", "gg", '', ascii(('gg', 'g'))), + (r"\N", "N", '0', ascii('N')), + (r"\N{LATIN SMALL LETTER A}", "a", '0', ascii('a')), + (r"\p", "p", '0', ascii('p')), + (r"\p{Ll}", "a", '0', ascii('a')), + (r"\P", "P", '0', ascii('P')), + (r"\P{Lu}", "p", '0', ascii('p')), + + # All tests from Perl. + ('abc', 'abc', '0', ascii('abc')), + ('abc', 'xbc', '', ascii(None)), + ('abc', 'axc', '', ascii(None)), + ('abc', 'abx', '', ascii(None)), + ('abc', 'xabcy', '0', ascii('abc')), + ('abc', 'ababc', '0', ascii('abc')), + + ('ab*c', 'abc', '0', ascii('abc')), + ('ab*bc', 'abc', '0', ascii('abc')), + ('ab*bc', 'abbc', '0', ascii('abbc')), + ('ab*bc', 'abbbbc', '0', ascii('abbbbc')), + ('ab{0,}bc', 'abbbbc', '0', ascii('abbbbc')), + ('ab+bc', 'abbc', '0', ascii('abbc')), + ('ab+bc', 'abc', '', ascii(None)), + ('ab+bc', 'abq', '', ascii(None)), + ('ab{1,}bc', 'abq', '', ascii(None)), + ('ab+bc', 'abbbbc', '0', ascii('abbbbc')), + + ('ab{1,}bc', 'abbbbc', '0', ascii('abbbbc')), + ('ab{1,3}bc', 'abbbbc', '0', ascii('abbbbc')), + ('ab{3,4}bc', 'abbbbc', '0', ascii('abbbbc')), + ('ab{4,5}bc', 'abbbbc', '', ascii(None)), + ('ab?bc', 'abbc', '0', ascii('abbc')), + ('ab?bc', 'abc', '0', ascii('abc')), + ('ab{0,1}bc', 'abc', '0', ascii('abc')), + ('ab?bc', 'abbbbc', '', ascii(None)), + ('ab?c', 'abc', '0', ascii('abc')), + ('ab{0,1}c', 'abc', '0', ascii('abc')), + + ('^abc$', 'abc', '0', ascii('abc')), + ('^abc$', 'abcc', '', ascii(None)), + ('^abc', 'abcc', '0', ascii('abc')), + ('^abc$', 'aabc', '', ascii(None)), + ('abc$', 'aabc', '0', ascii('abc')), + ('^', 'abc', '0', ascii('')), + ('$', 'abc', '0', ascii('')), + ('a.c', 'abc', '0', ascii('abc')), + ('a.c', 'axc', '0', ascii('axc')), + ('a.*c', 'axyzc', '0', ascii('axyzc')), + + ('a.*c', 'axyzd', '', ascii(None)), + ('a[bc]d', 'abc', '', ascii(None)), + ('a[bc]d', 'abd', '0', ascii('abd')), + ('a[b-d]e', 'abd', '', ascii(None)), + ('a[b-d]e', 'ace', '0', ascii('ace')), + ('a[b-d]', 'aac', '0', ascii('ac')), + ('a[-b]', 'a-', '0', ascii('a-')), + ('a[b-]', 'a-', '0', ascii('a-')), + ('a[b-a]', '-', '', regex.error, self.BAD_CHAR_RANGE), + ('a[]b', '-', '', regex.error, self.BAD_SET), + + ('a[', '-', '', regex.error, self.BAD_SET), + ('a]', 'a]', '0', ascii('a]')), + ('a[]]b', 'a]b', '0', ascii('a]b')), + ('a[^bc]d', 'aed', '0', ascii('aed')), + ('a[^bc]d', 'abd', '', ascii(None)), + ('a[^-b]c', 'adc', '0', ascii('adc')), + ('a[^-b]c', 'a-c', '', ascii(None)), + ('a[^]b]c', 'a]c', '', ascii(None)), + ('a[^]b]c', 'adc', '0', ascii('adc')), + ('ab|cd', 'abc', '0', ascii('ab')), + + ('ab|cd', 'abcd', '0', ascii('ab')), + ('()ef', 'def', '0,1', ascii(('ef', ''))), + ('*a', '-', '', regex.error, self.NOTHING_TO_REPEAT), + ('(*)b', '-', '', regex.error, self.NOTHING_TO_REPEAT), + ('$b', 'b', '', ascii(None)), + ('a\\', '-', '', regex.error, self.BAD_ESCAPE), + ('a\\(b', 'a(b', '', ascii(('a(b',))), + ('a\\(*b', 'ab', '0', ascii('ab')), + ('a\\(*b', 'a((b', '0', ascii('a((b')), + ('a\\\\b', 'a\\b', '0', ascii('a\\b')), + + ('abc)', '-', '', regex.error, self.TRAILING_CHARS), + ('(abc', '-', '', regex.error, self.MISSING_RPAREN), + ('((a))', 'abc', '0,1,2', ascii(('a', 'a', 'a'))), + ('(a)b(c)', 'abc', '0,1,2', ascii(('abc', 'a', 'c'))), + ('a+b+c', 'aabbabc', '0', ascii('abc')), + ('a{1,}b{1,}c', 'aabbabc', '0', ascii('abc')), + ('a**', '-', '', regex.error, self.MULTIPLE_REPEAT), + ('a.+?c', 'abcabc', '0', ascii('abc')), + ('(a+|b)*', 'ab', '0,1', ascii(('ab', 'b'))), + ('(a+|b){0,}', 'ab', '0,1', ascii(('ab', 'b'))), + + ('(a+|b)+', 'ab', '0,1', ascii(('ab', 'b'))), + ('(a+|b){1,}', 'ab', '0,1', ascii(('ab', 'b'))), + ('(a+|b)?', 'ab', '0,1', ascii(('a', 'a'))), + ('(a+|b){0,1}', 'ab', '0,1', ascii(('a', 'a'))), + (')(', '-', '', regex.error, self.TRAILING_CHARS), + ('[^ab]*', 'cde', '0', ascii('cde')), + ('abc', '', '', ascii(None)), + ('a*', '', '0', ascii('')), + ('([abc])*d', 'abbbcd', '0,1', ascii(('abbbcd', 'c'))), + ('([abc])*bcd', 'abcd', '0,1', ascii(('abcd', 'a'))), + + ('a|b|c|d|e', 'e', '0', ascii('e')), + ('(a|b|c|d|e)f', 'ef', '0,1', ascii(('ef', 'e'))), + ('abcd*efg', 'abcdefg', '0', ascii('abcdefg')), + ('ab*', 'xabyabbbz', '0', ascii('ab')), + ('ab*', 'xayabbbz', '0', ascii('a')), + ('(ab|cd)e', 'abcde', '0,1', ascii(('cde', 'cd'))), + ('[abhgefdc]ij', 'hij', '0', ascii('hij')), + ('^(ab|cd)e', 'abcde', '', ascii(None)), + ('(abc|)ef', 'abcdef', '0,1', ascii(('ef', ''))), + ('(a|b)c*d', 'abcd', '0,1', ascii(('bcd', 'b'))), + + ('(ab|ab*)bc', 'abc', '0,1', ascii(('abc', 'a'))), + ('a([bc]*)c*', 'abc', '0,1', ascii(('abc', 'bc'))), + ('a([bc]*)(c*d)', 'abcd', '0,1,2', ascii(('abcd', 'bc', 'd'))), + ('a([bc]+)(c*d)', 'abcd', '0,1,2', ascii(('abcd', 'bc', 'd'))), + ('a([bc]*)(c+d)', 'abcd', '0,1,2', ascii(('abcd', 'b', 'cd'))), + ('a[bcd]*dcdcde', 'adcdcde', '0', ascii('adcdcde')), + ('a[bcd]+dcdcde', 'adcdcde', '', ascii(None)), + ('(ab|a)b*c', 'abc', '0,1', ascii(('abc', 'ab'))), + ('((a)(b)c)(d)', 'abcd', '1,2,3,4', ascii(('abc', 'a', 'b', 'd'))), + ('[a-zA-Z_][a-zA-Z0-9_]*', 'alpha', '0', ascii('alpha')), + + ('^a(bc+|b[eh])g|.h$', 'abh', '0,1', ascii(('bh', None))), + ('(bc+d$|ef*g.|h?i(j|k))', 'effgz', '0,1,2', ascii(('effgz', + 'effgz', None))), + ('(bc+d$|ef*g.|h?i(j|k))', 'ij', '0,1,2', ascii(('ij', 'ij', + 'j'))), + ('(bc+d$|ef*g.|h?i(j|k))', 'effg', '', ascii(None)), + ('(bc+d$|ef*g.|h?i(j|k))', 'bcdd', '', ascii(None)), + ('(bc+d$|ef*g.|h?i(j|k))', 'reffgz', '0,1,2', ascii(('effgz', + 'effgz', None))), + ('((((((((((a))))))))))', 'a', '10', ascii('a')), + ('((((((((((a))))))))))\\10', 'aa', '0', ascii('aa')), + + # Python does not have the same rules for \\41 so this is a syntax error + # ('((((((((((a))))))))))\\41', 'aa', '', ascii(None)), + # ('((((((((((a))))))))))\\41', 'a!', '0', ascii('a!')), + ('((((((((((a))))))))))\\41', '', '', regex.error, + self.INVALID_GROUP_REF), + ('(?i)((((((((((a))))))))))\\41', '', '', regex.error, + self.INVALID_GROUP_REF), + + ('(((((((((a)))))))))', 'a', '0', ascii('a')), + ('multiple words of text', 'uh-uh', '', ascii(None)), + ('multiple words', 'multiple words, yeah', '0', + ascii('multiple words')), + ('(.*)c(.*)', 'abcde', '0,1,2', ascii(('abcde', 'ab', 'de'))), + ('\\((.*), (.*)\\)', '(a, b)', '2,1', ascii(('b', 'a'))), + ('[k]', 'ab', '', ascii(None)), + ('a[-]?c', 'ac', '0', ascii('ac')), + ('(abc)\\1', 'abcabc', '1', ascii('abc')), + ('([a-c]*)\\1', 'abcabc', '1', ascii('abc')), + ('(?i)abc', 'ABC', '0', ascii('ABC')), + + ('(?i)abc', 'XBC', '', ascii(None)), + ('(?i)abc', 'AXC', '', ascii(None)), + ('(?i)abc', 'ABX', '', ascii(None)), + ('(?i)abc', 'XABCY', '0', ascii('ABC')), + ('(?i)abc', 'ABABC', '0', ascii('ABC')), + ('(?i)ab*c', 'ABC', '0', ascii('ABC')), + ('(?i)ab*bc', 'ABC', '0', ascii('ABC')), + ('(?i)ab*bc', 'ABBC', '0', ascii('ABBC')), + ('(?i)ab*?bc', 'ABBBBC', '0', ascii('ABBBBC')), + ('(?i)ab{0,}?bc', 'ABBBBC', '0', ascii('ABBBBC')), + + ('(?i)ab+?bc', 'ABBC', '0', ascii('ABBC')), + ('(?i)ab+bc', 'ABC', '', ascii(None)), + ('(?i)ab+bc', 'ABQ', '', ascii(None)), + ('(?i)ab{1,}bc', 'ABQ', '', ascii(None)), + ('(?i)ab+bc', 'ABBBBC', '0', ascii('ABBBBC')), + ('(?i)ab{1,}?bc', 'ABBBBC', '0', ascii('ABBBBC')), + ('(?i)ab{1,3}?bc', 'ABBBBC', '0', ascii('ABBBBC')), + ('(?i)ab{3,4}?bc', 'ABBBBC', '0', ascii('ABBBBC')), + ('(?i)ab{4,5}?bc', 'ABBBBC', '', ascii(None)), + ('(?i)ab??bc', 'ABBC', '0', ascii('ABBC')), + + ('(?i)ab??bc', 'ABC', '0', ascii('ABC')), + ('(?i)ab{0,1}?bc', 'ABC', '0', ascii('ABC')), + ('(?i)ab??bc', 'ABBBBC', '', ascii(None)), + ('(?i)ab??c', 'ABC', '0', ascii('ABC')), + ('(?i)ab{0,1}?c', 'ABC', '0', ascii('ABC')), + ('(?i)^abc$', 'ABC', '0', ascii('ABC')), + ('(?i)^abc$', 'ABCC', '', ascii(None)), + ('(?i)^abc', 'ABCC', '0', ascii('ABC')), + ('(?i)^abc$', 'AABC', '', ascii(None)), + ('(?i)abc$', 'AABC', '0', ascii('ABC')), + + ('(?i)^', 'ABC', '0', ascii('')), + ('(?i)$', 'ABC', '0', ascii('')), + ('(?i)a.c', 'ABC', '0', ascii('ABC')), + ('(?i)a.c', 'AXC', '0', ascii('AXC')), + ('(?i)a.*?c', 'AXYZC', '0', ascii('AXYZC')), + ('(?i)a.*c', 'AXYZD', '', ascii(None)), + ('(?i)a[bc]d', 'ABC', '', ascii(None)), + ('(?i)a[bc]d', 'ABD', '0', ascii('ABD')), + ('(?i)a[b-d]e', 'ABD', '', ascii(None)), + ('(?i)a[b-d]e', 'ACE', '0', ascii('ACE')), + + ('(?i)a[b-d]', 'AAC', '0', ascii('AC')), + ('(?i)a[-b]', 'A-', '0', ascii('A-')), + ('(?i)a[b-]', 'A-', '0', ascii('A-')), + ('(?i)a[b-a]', '-', '', regex.error, self.BAD_CHAR_RANGE), + ('(?i)a[]b', '-', '', regex.error, self.BAD_SET), + ('(?i)a[', '-', '', regex.error, self.BAD_SET), + ('(?i)a]', 'A]', '0', ascii('A]')), + ('(?i)a[]]b', 'A]B', '0', ascii('A]B')), + ('(?i)a[^bc]d', 'AED', '0', ascii('AED')), + ('(?i)a[^bc]d', 'ABD', '', ascii(None)), + + ('(?i)a[^-b]c', 'ADC', '0', ascii('ADC')), + ('(?i)a[^-b]c', 'A-C', '', ascii(None)), + ('(?i)a[^]b]c', 'A]C', '', ascii(None)), + ('(?i)a[^]b]c', 'ADC', '0', ascii('ADC')), + ('(?i)ab|cd', 'ABC', '0', ascii('AB')), + ('(?i)ab|cd', 'ABCD', '0', ascii('AB')), + ('(?i)()ef', 'DEF', '0,1', ascii(('EF', ''))), + ('(?i)*a', '-', '', regex.error, self.NOTHING_TO_REPEAT), + ('(?i)(*)b', '-', '', regex.error, self.NOTHING_TO_REPEAT), + ('(?i)$b', 'B', '', ascii(None)), + + ('(?i)a\\', '-', '', regex.error, self.BAD_ESCAPE), + ('(?i)a\\(b', 'A(B', '', ascii(('A(B',))), + ('(?i)a\\(*b', 'AB', '0', ascii('AB')), + ('(?i)a\\(*b', 'A((B', '0', ascii('A((B')), + ('(?i)a\\\\b', 'A\\B', '0', ascii('A\\B')), + ('(?i)abc)', '-', '', regex.error, self.TRAILING_CHARS), + ('(?i)(abc', '-', '', regex.error, self.MISSING_RPAREN), + ('(?i)((a))', 'ABC', '0,1,2', ascii(('A', 'A', 'A'))), + ('(?i)(a)b(c)', 'ABC', '0,1,2', ascii(('ABC', 'A', 'C'))), + ('(?i)a+b+c', 'AABBABC', '0', ascii('ABC')), + + ('(?i)a{1,}b{1,}c', 'AABBABC', '0', ascii('ABC')), + ('(?i)a**', '-', '', regex.error, self.MULTIPLE_REPEAT), + ('(?i)a.+?c', 'ABCABC', '0', ascii('ABC')), + ('(?i)a.*?c', 'ABCABC', '0', ascii('ABC')), + ('(?i)a.{0,5}?c', 'ABCABC', '0', ascii('ABC')), + ('(?i)(a+|b)*', 'AB', '0,1', ascii(('AB', 'B'))), + ('(?i)(a+|b){0,}', 'AB', '0,1', ascii(('AB', 'B'))), + ('(?i)(a+|b)+', 'AB', '0,1', ascii(('AB', 'B'))), + ('(?i)(a+|b){1,}', 'AB', '0,1', ascii(('AB', 'B'))), + ('(?i)(a+|b)?', 'AB', '0,1', ascii(('A', 'A'))), + + ('(?i)(a+|b){0,1}', 'AB', '0,1', ascii(('A', 'A'))), + ('(?i)(a+|b){0,1}?', 'AB', '0,1', ascii(('', None))), + ('(?i))(', '-', '', regex.error, self.TRAILING_CHARS), + ('(?i)[^ab]*', 'CDE', '0', ascii('CDE')), + ('(?i)abc', '', '', ascii(None)), + ('(?i)a*', '', '0', ascii('')), + ('(?i)([abc])*d', 'ABBBCD', '0,1', ascii(('ABBBCD', 'C'))), + ('(?i)([abc])*bcd', 'ABCD', '0,1', ascii(('ABCD', 'A'))), + ('(?i)a|b|c|d|e', 'E', '0', ascii('E')), + ('(?i)(a|b|c|d|e)f', 'EF', '0,1', ascii(('EF', 'E'))), + + ('(?i)abcd*efg', 'ABCDEFG', '0', ascii('ABCDEFG')), + ('(?i)ab*', 'XABYABBBZ', '0', ascii('AB')), + ('(?i)ab*', 'XAYABBBZ', '0', ascii('A')), + ('(?i)(ab|cd)e', 'ABCDE', '0,1', ascii(('CDE', 'CD'))), + ('(?i)[abhgefdc]ij', 'HIJ', '0', ascii('HIJ')), + ('(?i)^(ab|cd)e', 'ABCDE', '', ascii(None)), + ('(?i)(abc|)ef', 'ABCDEF', '0,1', ascii(('EF', ''))), + ('(?i)(a|b)c*d', 'ABCD', '0,1', ascii(('BCD', 'B'))), + ('(?i)(ab|ab*)bc', 'ABC', '0,1', ascii(('ABC', 'A'))), + ('(?i)a([bc]*)c*', 'ABC', '0,1', ascii(('ABC', 'BC'))), + + ('(?i)a([bc]*)(c*d)', 'ABCD', '0,1,2', ascii(('ABCD', 'BC', 'D'))), + ('(?i)a([bc]+)(c*d)', 'ABCD', '0,1,2', ascii(('ABCD', 'BC', 'D'))), + ('(?i)a([bc]*)(c+d)', 'ABCD', '0,1,2', ascii(('ABCD', 'B', 'CD'))), + ('(?i)a[bcd]*dcdcde', 'ADCDCDE', '0', ascii('ADCDCDE')), + ('(?i)a[bcd]+dcdcde', 'ADCDCDE', '', ascii(None)), + ('(?i)(ab|a)b*c', 'ABC', '0,1', ascii(('ABC', 'AB'))), + ('(?i)((a)(b)c)(d)', 'ABCD', '1,2,3,4', ascii(('ABC', 'A', 'B', + 'D'))), + ('(?i)[a-zA-Z_][a-zA-Z0-9_]*', 'ALPHA', '0', ascii('ALPHA')), + ('(?i)^a(bc+|b[eh])g|.h$', 'ABH', '0,1', ascii(('BH', None))), + ('(?i)(bc+d$|ef*g.|h?i(j|k))', 'EFFGZ', '0,1,2', ascii(('EFFGZ', + 'EFFGZ', None))), + + ('(?i)(bc+d$|ef*g.|h?i(j|k))', 'IJ', '0,1,2', ascii(('IJ', 'IJ', + 'J'))), + ('(?i)(bc+d$|ef*g.|h?i(j|k))', 'EFFG', '', ascii(None)), + ('(?i)(bc+d$|ef*g.|h?i(j|k))', 'BCDD', '', ascii(None)), + ('(?i)(bc+d$|ef*g.|h?i(j|k))', 'REFFGZ', '0,1,2', ascii(('EFFGZ', + 'EFFGZ', None))), + ('(?i)((((((((((a))))))))))', 'A', '10', ascii('A')), + ('(?i)((((((((((a))))))))))\\10', 'AA', '0', ascii('AA')), + #('(?i)((((((((((a))))))))))\\41', 'AA', '', ascii(None)), + #('(?i)((((((((((a))))))))))\\41', 'A!', '0', ascii('A!')), + ('(?i)(((((((((a)))))))))', 'A', '0', ascii('A')), + ('(?i)(?:(?:(?:(?:(?:(?:(?:(?:(?:(a))))))))))', 'A', '1', + ascii('A')), + ('(?i)(?:(?:(?:(?:(?:(?:(?:(?:(?:(a|b|c))))))))))', 'C', '1', + ascii('C')), + ('(?i)multiple words of text', 'UH-UH', '', ascii(None)), + + ('(?i)multiple words', 'MULTIPLE WORDS, YEAH', '0', + ascii('MULTIPLE WORDS')), + ('(?i)(.*)c(.*)', 'ABCDE', '0,1,2', ascii(('ABCDE', 'AB', 'DE'))), + ('(?i)\\((.*), (.*)\\)', '(A, B)', '2,1', ascii(('B', 'A'))), + ('(?i)[k]', 'AB', '', ascii(None)), + # ('(?i)abcd', 'ABCD', SUCCEED, 'found+"-"+\\found+"-"+\\\\found', ascii(ABCD-$&-\\ABCD)), + # ('(?i)a(bc)d', 'ABCD', SUCCEED, 'g1+"-"+\\g1+"-"+\\\\g1', ascii(BC-$1-\\BC)), + ('(?i)a[-]?c', 'AC', '0', ascii('AC')), + ('(?i)(abc)\\1', 'ABCABC', '1', ascii('ABC')), + ('(?i)([a-c]*)\\1', 'ABCABC', '1', ascii('ABC')), + ('a(?!b).', 'abad', '0', ascii('ad')), + ('a(?=d).', 'abad', '0', ascii('ad')), + ('a(?=c|d).', 'abad', '0', ascii('ad')), + + ('a(?:b|c|d)(.)', 'ace', '1', ascii('e')), + ('a(?:b|c|d)*(.)', 'ace', '1', ascii('e')), + ('a(?:b|c|d)+?(.)', 'ace', '1', ascii('e')), + ('a(?:b|(c|e){1,2}?|d)+?(.)', 'ace', '1,2', ascii(('c', 'e'))), + + # Lookbehind: split by : but not if it is escaped by -. + ('(?]*?b', 'a>b', '', ascii(None)), + # Bug 490573: minimizing repeat problem. + (r'^a*?$', 'foo', '', ascii(None)), + # Bug 470582: nested groups problem. + (r'^((a)c)?(ab)$', 'ab', '1,2,3', ascii((None, None, 'ab'))), + # Another minimizing repeat problem (capturing groups in assertions). + ('^([ab]*?)(?=(b)?)c', 'abc', '1,2', ascii(('ab', None))), + ('^([ab]*?)(?!(b))c', 'abc', '1,2', ascii(('ab', None))), + ('^([ab]*?)(?(.){0,2})d", "abcd").captures(1), + ['b', 'c']) + self.assertEqual(regex.search(r"(.)+", "a").captures(1), ['a']) + + def test_guards(self): + m = regex.search(r"(X.*?Y\s*){3}(X\s*)+AB:", + "XY\nX Y\nX Y\nXY\nXX AB:") + self.assertEqual(m.span(0, 1, 2), ((3, 21), (12, 15), (16, 18))) + + m = regex.search(r"(X.*?Y\s*){3,}(X\s*)+AB:", + "XY\nX Y\nX Y\nXY\nXX AB:") + self.assertEqual(m.span(0, 1, 2), ((0, 21), (12, 15), (16, 18))) + + m = regex.search(r'\d{4}(\s*\w)?\W*((?!\d)\w){2}', "9999XX") + self.assertEqual(m.span(0, 1, 2), ((0, 6), (-1, -1), (5, 6))) + + m = regex.search(r'A\s*?.*?(\n+.*?\s*?){0,2}\(X', 'A\n1\nS\n1 (X') + self.assertEqual(m.span(0, 1), ((0, 10), (5, 8))) + + m = regex.search(r'Derde\s*:', 'aaaaaa:\nDerde:') + self.assertEqual(m.span(), (8, 14)) + m = regex.search(r'Derde\s*:', 'aaaaa:\nDerde:') + self.assertEqual(m.span(), (7, 13)) + + def test_turkic(self): + # Turkish has dotted and dotless I/i. + pairs = "I=i;I=\u0131;i=\u0130" + + all_chars = set() + matching = set() + for pair in pairs.split(";"): + ch1, ch2 = pair.split("=") + all_chars.update((ch1, ch2)) + matching.add((ch1, ch1)) + matching.add((ch1, ch2)) + matching.add((ch2, ch1)) + matching.add((ch2, ch2)) + + for ch1 in all_chars: + for ch2 in all_chars: + m = regex.match(r"(?i)\A" + ch1 + r"\Z", ch2) + if m: + if (ch1, ch2) not in matching: + self.fail("{} matching {}".format(ascii(ch1), + ascii(ch2))) + else: + if (ch1, ch2) in matching: + self.fail("{} not matching {}".format(ascii(ch1), + ascii(ch2))) + + def test_named_lists(self): + options = ["one", "two", "three"] + self.assertEqual(regex.match(r"333\L444", "333one444", + bar=options).group(), "333one444") + self.assertEqual(regex.match(r"(?i)333\L444", "333TWO444", + bar=options).group(), "333TWO444") + self.assertEqual(regex.match(r"333\L444", "333four444", + bar=options), None) + + options = [b"one", b"two", b"three"] + self.assertEqual(regex.match(br"333\L444", b"333one444", + bar=options).group(), b"333one444") + self.assertEqual(regex.match(br"(?i)333\L444", b"333TWO444", + bar=options).group(), b"333TWO444") + self.assertEqual(regex.match(br"333\L444", b"333four444", + bar=options), None) + + self.assertEqual(repr(type(regex.compile(r"3\L4\L+5", + bar=["one", "two", "three"]))), self.PATTERN_CLASS) + + self.assertEqual(regex.findall(r"^\L", "solid QWERT", + options=set(['good', 'brilliant', '+s\\ol[i}d'])), []) + self.assertEqual(regex.findall(r"^\L", "+solid QWERT", + options=set(['good', 'brilliant', '+solid'])), ['+solid']) + + options = ["STRASSE"] + self.assertEqual(regex.match(r"(?fi)\L", + "stra\N{LATIN SMALL LETTER SHARP S}e", words=options).span(), (0, + 6)) + + options = ["STRASSE", "stress"] + self.assertEqual(regex.match(r"(?fi)\L", + "stra\N{LATIN SMALL LETTER SHARP S}e", words=options).span(), (0, + 6)) + + options = ["stra\N{LATIN SMALL LETTER SHARP S}e"] + self.assertEqual(regex.match(r"(?fi)\L", "STRASSE", + words=options).span(), (0, 7)) + + options = ["kit"] + self.assertEqual(regex.search(r"(?i)\L", "SKITS", + words=options).span(), (1, 4)) + self.assertEqual(regex.search(r"(?i)\L", + "SK\N{LATIN CAPITAL LETTER I WITH DOT ABOVE}TS", + words=options).span(), (1, 4)) + + self.assertEqual(regex.search(r"(?fi)\b(\w+) +\1\b", + " stra\N{LATIN SMALL LETTER SHARP S}e STRASSE ").span(), (1, 15)) + self.assertEqual(regex.search(r"(?fi)\b(\w+) +\1\b", + " STRASSE stra\N{LATIN SMALL LETTER SHARP S}e ").span(), (1, 15)) + + self.assertEqual(regex.search(r"^\L$", "", options=[]).span(), + (0, 0)) + + def test_fuzzy(self): + # Some tests borrowed from TRE library tests. + self.assertEqual(repr(type(regex.compile('(fou){s,e<=1}'))), + self.PATTERN_CLASS) + self.assertEqual(repr(type(regex.compile('(fuu){s}'))), + self.PATTERN_CLASS) + self.assertEqual(repr(type(regex.compile('(fuu){s,e}'))), + self.PATTERN_CLASS) + self.assertEqual(repr(type(regex.compile('(anaconda){1i+1d<1,s<=1}'))), + self.PATTERN_CLASS) + self.assertEqual(repr(type(regex.compile('(anaconda){1i+1d<1,s<=1,e<=10}'))), + self.PATTERN_CLASS) + self.assertEqual(repr(type(regex.compile('(anaconda){s<=1,e<=1,1i+1d<1}'))), + self.PATTERN_CLASS) + + text = 'molasses anaconda foo bar baz smith anderson ' + self.assertEqual(regex.search('(znacnda){s<=1,e<=3,1i+1d<1}', text), + None) + self.assertEqual(regex.search('(znacnda){s<=1,e<=3,1i+1d<2}', + text).span(0, 1), ((9, 17), (9, 17))) + self.assertEqual(regex.search('(ananda){1i+1d<2}', text), None) + self.assertEqual(regex.search(r"(?:\bznacnda){e<=2}", text)[0], + "anaconda") + self.assertEqual(regex.search(r"(?:\bnacnda){e<=2}", text)[0], + "anaconda") + + text = 'anaconda foo bar baz smith anderson' + self.assertEqual(regex.search('(fuu){i<=3,d<=3,e<=5}', text).span(0, + 1), ((0, 0), (0, 0))) + self.assertEqual(regex.search('(?b)(fuu){i<=3,d<=3,e<=5}', + text).span(0, 1), ((9, 10), (9, 10))) + self.assertEqual(regex.search('(fuu){i<=2,d<=2,e<=5}', text).span(0, + 1), ((7, 10), (7, 10))) + self.assertEqual(regex.search('(?e)(fuu){i<=2,d<=2,e<=5}', + text).span(0, 1), ((9, 10), (9, 10))) + self.assertEqual(regex.search('(fuu){i<=3,d<=3,e}', text).span(0, 1), + ((0, 0), (0, 0))) + self.assertEqual(regex.search('(?b)(fuu){i<=3,d<=3,e}', text).span(0, + 1), ((9, 10), (9, 10))) + + self.assertEqual(repr(type(regex.compile('(approximate){s<=3,1i+1d<3}'))), + self.PATTERN_CLASS) + + # No cost limit. + self.assertEqual(regex.search('(foobar){e}', + 'xirefoabralfobarxie').span(0, 1), ((0, 6), (0, 6))) + self.assertEqual(regex.search('(?e)(foobar){e}', + 'xirefoabralfobarxie').span(0, 1), ((0, 3), (0, 3))) + self.assertEqual(regex.search('(?b)(foobar){e}', + 'xirefoabralfobarxie').span(0, 1), ((11, 16), (11, 16))) + + # At most two errors. + self.assertEqual(regex.search('(foobar){e<=2}', + 'xirefoabrzlfd').span(0, 1), ((4, 9), (4, 9))) + self.assertEqual(regex.search('(foobar){e<=2}', 'xirefoabzlfd'), None) + + # At most two inserts or substitutions and max two errors total. + self.assertEqual(regex.search('(foobar){i<=2,s<=2,e<=2}', + 'oobargoobaploowap').span(0, 1), ((5, 11), (5, 11))) + + # Find best whole word match for "foobar". + self.assertEqual(regex.search('\\b(foobar){e}\\b', 'zfoobarz').span(0, + 1), ((0, 8), (0, 8))) + self.assertEqual(regex.search('\\b(foobar){e}\\b', + 'boing zfoobarz goobar woop').span(0, 1), ((0, 6), (0, 6))) + self.assertEqual(regex.search('(?b)\\b(foobar){e}\\b', + 'boing zfoobarz goobar woop').span(0, 1), ((15, 21), (15, 21))) + + # Match whole string, allow only 1 error. + self.assertEqual(regex.search('^(foobar){e<=1}$', 'foobar').span(0, 1), + ((0, 6), (0, 6))) + self.assertEqual(regex.search('^(foobar){e<=1}$', 'xfoobar').span(0, + 1), ((0, 7), (0, 7))) + self.assertEqual(regex.search('^(foobar){e<=1}$', 'foobarx').span(0, + 1), ((0, 7), (0, 7))) + self.assertEqual(regex.search('^(foobar){e<=1}$', 'fooxbar').span(0, + 1), ((0, 7), (0, 7))) + self.assertEqual(regex.search('^(foobar){e<=1}$', 'foxbar').span(0, 1), + ((0, 6), (0, 6))) + self.assertEqual(regex.search('^(foobar){e<=1}$', 'xoobar').span(0, 1), + ((0, 6), (0, 6))) + self.assertEqual(regex.search('^(foobar){e<=1}$', 'foobax').span(0, 1), + ((0, 6), (0, 6))) + self.assertEqual(regex.search('^(foobar){e<=1}$', 'oobar').span(0, 1), + ((0, 5), (0, 5))) + self.assertEqual(regex.search('^(foobar){e<=1}$', 'fobar').span(0, 1), + ((0, 5), (0, 5))) + self.assertEqual(regex.search('^(foobar){e<=1}$', 'fooba').span(0, 1), + ((0, 5), (0, 5))) + self.assertEqual(regex.search('^(foobar){e<=1}$', 'xfoobarx'), None) + self.assertEqual(regex.search('^(foobar){e<=1}$', 'foobarxx'), None) + self.assertEqual(regex.search('^(foobar){e<=1}$', 'xxfoobar'), None) + self.assertEqual(regex.search('^(foobar){e<=1}$', 'xfoxbar'), None) + self.assertEqual(regex.search('^(foobar){e<=1}$', 'foxbarx'), None) + + # At most one insert, two deletes, and three substitutions. + # Additionally, deletes cost two and substitutes one, and total + # cost must be less than 4. + self.assertEqual(regex.search('(foobar){i<=1,d<=2,s<=3,2d+1s<4}', + '3oifaowefbaoraofuiebofasebfaobfaorfeoaro').span(0, 1), ((6, 13), (6, + 13))) + self.assertEqual(regex.search('(?b)(foobar){i<=1,d<=2,s<=3,2d+1s<4}', + '3oifaowefbaoraofuiebofasebfaobfaorfeoaro').span(0, 1), ((34, 39), + (34, 39))) + + # Partially fuzzy matches. + self.assertEqual(regex.search('foo(bar){e<=1}zap', 'foobarzap').span(0, + 1), ((0, 9), (3, 6))) + self.assertEqual(regex.search('foo(bar){e<=1}zap', 'fobarzap'), None) + self.assertEqual(regex.search('foo(bar){e<=1}zap', 'foobrzap').span(0, + 1), ((0, 8), (3, 5))) + + text = ('www.cnn.com 64.236.16.20\nwww.slashdot.org 66.35.250.150\n' + 'For useful information, use www.slashdot.org\nthis is demo data!\n') + self.assertEqual(regex.search(r'(?s)^.*(dot.org){e}.*$', text).span(0, + 1), ((0, 120), (120, 120))) + self.assertEqual(regex.search(r'(?es)^.*(dot.org){e}.*$', text).span(0, + 1), ((0, 120), (93, 100))) + self.assertEqual(regex.search(r'^.*(dot.org){e}.*$', text).span(0, 1), + ((0, 119), (24, 101))) + + # Behaviour is unexpected, but arguably not wrong. It first finds the + # best match, then the best in what follows, etc. + self.assertEqual(regex.findall(r"\b\L{e<=1}\b", + " book cot dog desk ", words="cat dog".split()), ["cot", "dog"]) + self.assertEqual(regex.findall(r"\b\L{e<=1}\b", + " book dog cot desk ", words="cat dog".split()), [" dog", "cot"]) + self.assertEqual(regex.findall(r"(?e)\b\L{e<=1}\b", + " book dog cot desk ", words="cat dog".split()), ["dog", "cot"]) + self.assertEqual(regex.findall(r"(?r)\b\L{e<=1}\b", + " book cot dog desk ", words="cat dog".split()), ["dog ", "cot"]) + self.assertEqual(regex.findall(r"(?er)\b\L{e<=1}\b", + " book cot dog desk ", words="cat dog".split()), ["dog", "cot"]) + self.assertEqual(regex.findall(r"(?r)\b\L{e<=1}\b", + " book dog cot desk ", words="cat dog".split()), ["cot", "dog"]) + self.assertEqual(regex.findall(br"\b\L{e<=1}\b", + b" book cot dog desk ", words=b"cat dog".split()), [b"cot", b"dog"]) + self.assertEqual(regex.findall(br"\b\L{e<=1}\b", + b" book dog cot desk ", words=b"cat dog".split()), [b" dog", b"cot"]) + self.assertEqual(regex.findall(br"(?e)\b\L{e<=1}\b", + b" book dog cot desk ", words=b"cat dog".split()), [b"dog", b"cot"]) + self.assertEqual(regex.findall(br"(?r)\b\L{e<=1}\b", + b" book cot dog desk ", words=b"cat dog".split()), [b"dog ", b"cot"]) + self.assertEqual(regex.findall(br"(?er)\b\L{e<=1}\b", + b" book cot dog desk ", words=b"cat dog".split()), [b"dog", b"cot"]) + self.assertEqual(regex.findall(br"(?r)\b\L{e<=1}\b", + b" book dog cot desk ", words=b"cat dog".split()), [b"cot", b"dog"]) + + self.assertEqual(regex.search(r"(\w+) (\1{e<=1})", "foo fou").groups(), + ("foo", "fou")) + self.assertEqual(regex.search(r"(?r)(\2{e<=1}) (\w+)", + "foo fou").groups(), ("foo", "fou")) + self.assertEqual(regex.search(br"(\w+) (\1{e<=1})", + b"foo fou").groups(), (b"foo", b"fou")) + + self.assertEqual(regex.findall(r"(?:(?:QR)+){e}", "abcde"), ["abcde", + ""]) + self.assertEqual(regex.findall(r"(?:Q+){e}", "abc"), ["abc", ""]) + + # Hg issue 41: = for fuzzy matches + self.assertEqual(regex.match(r"(?:service detection){0[^()]+)|(?R))*\)", "(ab(cd)ef)")[ + : ], ("(ab(cd)ef)", "ef")) + self.assertEqual(regex.search(r"\(((?>[^()]+)|(?R))*\)", + "(ab(cd)ef)").captures(1), ["ab", "cd", "(cd)", "ef"]) + + self.assertEqual(regex.search(r"(?r)\(((?R)|(?>[^()]+))*\)", + "(ab(cd)ef)")[ : ], ("(ab(cd)ef)", "ab")) + self.assertEqual(regex.search(r"(?r)\(((?R)|(?>[^()]+))*\)", + "(ab(cd)ef)").captures(1), ["ef", "cd", "(cd)", "ab"]) + + self.assertEqual(regex.search(r"\(([^()]+|(?R))*\)", + "some text (a(b(c)d)e) more text")[ : ], ("(a(b(c)d)e)", "e")) + + self.assertEqual(regex.search(r"(?r)\(((?R)|[^()]+)*\)", + "some text (a(b(c)d)e) more text")[ : ], ("(a(b(c)d)e)", "a")) + + self.assertEqual(regex.search(r"(foo(\(((?:(?>[^()]+)|(?2))*)\)))", + "foo(bar(baz)+baz(bop))")[ : ], ("foo(bar(baz)+baz(bop))", + "foo(bar(baz)+baz(bop))", "(bar(baz)+baz(bop))", + "bar(baz)+baz(bop)")) + + self.assertEqual(regex.search(r"(?r)(foo(\(((?:(?2)|(?>[^()]+))*)\)))", + "foo(bar(baz)+baz(bop))")[ : ], ("foo(bar(baz)+baz(bop))", + "foo(bar(baz)+baz(bop))", "(bar(baz)+baz(bop))", + "bar(baz)+baz(bop)")) + + rgx = regex.compile(r"""^\s*(<\s*([a-zA-Z:]+)(?:\s*[a-zA-Z:]*\s*=\s*(?:'[^']*'|"[^"]*"))*\s*(/\s*)?>(?:[^<>]*|(?1))*(?(3)|<\s*/\s*\2\s*>))\s*$""") + self.assertEqual(bool(rgx.search('')), True) + self.assertEqual(bool(rgx.search('')), False) + self.assertEqual(bool(rgx.search('')), True) + self.assertEqual(bool(rgx.search('')), False) + self.assertEqual(bool(rgx.search('')), False) + + self.assertEqual(bool(rgx.search('')), False) + self.assertEqual(bool(rgx.search('')), True) + self.assertEqual(bool(rgx.search('< fooo / >')), True) + # The next regex should and does match. Perl 5.14 agrees. + #self.assertEqual(bool(rgx.search('foo')), False) + self.assertEqual(bool(rgx.search('foo')), False) + + self.assertEqual(bool(rgx.search('foo')), True) + self.assertEqual(bool(rgx.search('foo')), True) + self.assertEqual(bool(rgx.search('')), True) + + def test_copy(self): + # PatternObjects are immutable, therefore there's no need to clone them. + r = regex.compile("a") + self.assertTrue(copy.copy(r) is r) + self.assertTrue(copy.deepcopy(r) is r) + + # MatchObjects are normally mutable because the target string can be + # detached. However, after the target string has been detached, a + # MatchObject becomes immutable, so there's no need to clone it. + m = r.match("a") + self.assertTrue(copy.copy(m) is not m) + self.assertTrue(copy.deepcopy(m) is not m) + + self.assertTrue(m.string is not None) + m2 = copy.copy(m) + m2.detach_string() + self.assertTrue(m.string is not None) + self.assertTrue(m2.string is None) + + # The following behaviour matches that of the re module. + it = regex.finditer(".", "ab") + it2 = copy.copy(it) + self.assertEqual(next(it).group(), "a") + self.assertEqual(next(it2).group(), "b") + + # The following behaviour matches that of the re module. + it = regex.finditer(".", "ab") + it2 = copy.deepcopy(it) + self.assertEqual(next(it).group(), "a") + self.assertEqual(next(it2).group(), "b") + + # The following behaviour is designed to match that of copying 'finditer'. + it = regex.splititer(" ", "a b") + it2 = copy.copy(it) + self.assertEqual(next(it), "a") + self.assertEqual(next(it2), "b") + + # The following behaviour is designed to match that of copying 'finditer'. + it = regex.splititer(" ", "a b") + it2 = copy.deepcopy(it) + self.assertEqual(next(it), "a") + self.assertEqual(next(it2), "b") + + def test_format(self): + self.assertEqual(regex.subf(r"(\w+) (\w+)", "{0} => {2} {1}", + "foo bar"), "foo bar => bar foo") + self.assertEqual(regex.subf(r"(?\w+) (?\w+)", + "{word2} {word1}", "foo bar"), "bar foo") + + self.assertEqual(regex.subfn(r"(\w+) (\w+)", "{0} => {2} {1}", + "foo bar"), ("foo bar => bar foo", 1)) + self.assertEqual(regex.subfn(r"(?\w+) (?\w+)", + "{word2} {word1}", "foo bar"), ("bar foo", 1)) + + self.assertEqual(regex.match(r"(\w+) (\w+)", + "foo bar").expandf("{0} => {2} {1}"), "foo bar => bar foo") + + def test_fullmatch(self): + self.assertEqual(bool(regex.fullmatch(r"abc", "abc")), True) + self.assertEqual(bool(regex.fullmatch(r"abc", "abcx")), False) + self.assertEqual(bool(regex.fullmatch(r"abc", "abcx", endpos=3)), True) + + self.assertEqual(bool(regex.fullmatch(r"abc", "xabc", pos=1)), True) + self.assertEqual(bool(regex.fullmatch(r"abc", "xabcy", pos=1)), False) + self.assertEqual(bool(regex.fullmatch(r"abc", "xabcy", pos=1, + endpos=4)), True) + + self.assertEqual(bool(regex.fullmatch(r"(?r)abc", "abc")), True) + self.assertEqual(bool(regex.fullmatch(r"(?r)abc", "abcx")), False) + self.assertEqual(bool(regex.fullmatch(r"(?r)abc", "abcx", endpos=3)), + True) + + self.assertEqual(bool(regex.fullmatch(r"(?r)abc", "xabc", pos=1)), + True) + self.assertEqual(bool(regex.fullmatch(r"(?r)abc", "xabcy", pos=1)), + False) + self.assertEqual(bool(regex.fullmatch(r"(?r)abc", "xabcy", pos=1, + endpos=4)), True) + + def test_issue_18468(self): + self.assertTypedEqual(regex.sub('y', 'a', 'xyz'), 'xaz') + self.assertTypedEqual(regex.sub('y', StrSubclass('a'), + StrSubclass('xyz')), 'xaz') + self.assertTypedEqual(regex.sub(b'y', b'a', b'xyz'), b'xaz') + self.assertTypedEqual(regex.sub(b'y', BytesSubclass(b'a'), + BytesSubclass(b'xyz')), b'xaz') + self.assertTypedEqual(regex.sub(b'y', bytearray(b'a'), + bytearray(b'xyz')), b'xaz') + self.assertTypedEqual(regex.sub(b'y', memoryview(b'a'), + memoryview(b'xyz')), b'xaz') + + for string in ":a:b::c", StrSubclass(":a:b::c"): + self.assertTypedEqual(regex.split(":", string), ['', 'a', 'b', '', + 'c']) + if sys.version_info >= (3, 7, 0): + self.assertTypedEqual(regex.split(":*", string), ['', '', 'a', + '', 'b', '', 'c', '']) + self.assertTypedEqual(regex.split("(:*)", string), ['', ':', + '', '', 'a', ':', '', '', 'b', '::', '', '', 'c', '', '']) + else: + self.assertTypedEqual(regex.split(":*", string), ['', 'a', 'b', + 'c']) + self.assertTypedEqual(regex.split("(:*)", string), ['', ':', + 'a', ':', 'b', '::', 'c']) + + for string in (b":a:b::c", BytesSubclass(b":a:b::c"), + bytearray(b":a:b::c"), memoryview(b":a:b::c")): + self.assertTypedEqual(regex.split(b":", string), [b'', b'a', b'b', + b'', b'c']) + if sys.version_info >= (3, 7, 0): + self.assertTypedEqual(regex.split(b":*", string), [b'', b'', + b'a', b'', b'b', b'', b'c', b'']) + self.assertTypedEqual(regex.split(b"(:*)", string), [b'', b':', + b'', b'', b'a', b':', b'', b'', b'b', b'::', b'', b'', b'c', + b'', b'']) + else: + self.assertTypedEqual(regex.split(b":*", string), [b'', b'a', + b'b', b'c']) + self.assertTypedEqual(regex.split(b"(:*)", string), [b'', b':', + b'a', b':', b'b', b'::', b'c']) + + for string in "a:b::c:::d", StrSubclass("a:b::c:::d"): + self.assertTypedEqual(regex.findall(":+", string), [":", "::", + ":::"]) + self.assertTypedEqual(regex.findall("(:+)", string), [":", "::", + ":::"]) + self.assertTypedEqual(regex.findall("(:)(:*)", string), [(":", ""), + (":", ":"), (":", "::")]) + + for string in (b"a:b::c:::d", BytesSubclass(b"a:b::c:::d"), + bytearray(b"a:b::c:::d"), memoryview(b"a:b::c:::d")): + self.assertTypedEqual(regex.findall(b":+", string), [b":", b"::", + b":::"]) + self.assertTypedEqual(regex.findall(b"(:+)", string), [b":", b"::", + b":::"]) + self.assertTypedEqual(regex.findall(b"(:)(:*)", string), [(b":", + b""), (b":", b":"), (b":", b"::")]) + + for string in 'a', StrSubclass('a'): + self.assertEqual(regex.match('a', string).groups(), ()) + self.assertEqual(regex.match('(a)', string).groups(), ('a',)) + self.assertEqual(regex.match('(a)', string).group(0), 'a') + self.assertEqual(regex.match('(a)', string).group(1), 'a') + self.assertEqual(regex.match('(a)', string).group(1, 1), ('a', + 'a')) + + for string in (b'a', BytesSubclass(b'a'), bytearray(b'a'), + memoryview(b'a')): + self.assertEqual(regex.match(b'a', string).groups(), ()) + self.assertEqual(regex.match(b'(a)', string).groups(), (b'a',)) + self.assertEqual(regex.match(b'(a)', string).group(0), b'a') + self.assertEqual(regex.match(b'(a)', string).group(1), b'a') + self.assertEqual(regex.match(b'(a)', string).group(1, 1), (b'a', + b'a')) + + def test_partial(self): + self.assertEqual(regex.match('ab', 'a', partial=True).partial, True) + self.assertEqual(regex.match('ab', 'a', partial=True).span(), (0, 1)) + self.assertEqual(regex.match(r'cats', 'cat', partial=True).partial, + True) + self.assertEqual(regex.match(r'cats', 'cat', partial=True).span(), (0, + 3)) + self.assertEqual(regex.match(r'cats', 'catch', partial=True), None) + self.assertEqual(regex.match(r'abc\w{3}', 'abcdef', + partial=True).partial, False) + self.assertEqual(regex.match(r'abc\w{3}', 'abcdef', + partial=True).span(), (0, 6)) + self.assertEqual(regex.match(r'abc\w{3}', 'abcde', + partial=True).partial, True) + self.assertEqual(regex.match(r'abc\w{3}', 'abcde', + partial=True).span(), (0, 5)) + + self.assertEqual(regex.match(r'\d{4}$', '1234', partial=True).partial, + False) + + self.assertEqual(regex.match(r'\L', 'post', partial=True, + words=['post']).partial, False) + self.assertEqual(regex.match(r'\L', 'post', partial=True, + words=['post']).span(), (0, 4)) + self.assertEqual(regex.match(r'\L', 'pos', partial=True, + words=['post']).partial, True) + self.assertEqual(regex.match(r'\L', 'pos', partial=True, + words=['post']).span(), (0, 3)) + + self.assertEqual(regex.match(r'(?fi)\L', 'POST', partial=True, + words=['po\uFB06']).partial, False) + self.assertEqual(regex.match(r'(?fi)\L', 'POST', partial=True, + words=['po\uFB06']).span(), (0, 4)) + self.assertEqual(regex.match(r'(?fi)\L', 'POS', partial=True, + words=['po\uFB06']).partial, True) + self.assertEqual(regex.match(r'(?fi)\L', 'POS', partial=True, + words=['po\uFB06']).span(), (0, 3)) + self.assertEqual(regex.match(r'(?fi)\L', 'po\uFB06', + partial=True, words=['POS']), None) + + self.assertEqual(regex.match(r'[a-z]*4R$', 'a', partial=True).span(), + (0, 1)) + self.assertEqual(regex.match(r'[a-z]*4R$', 'ab', partial=True).span(), + (0, 2)) + self.assertEqual(regex.match(r'[a-z]*4R$', 'ab4', partial=True).span(), + (0, 3)) + self.assertEqual(regex.match(r'[a-z]*4R$', 'a4', partial=True).span(), + (0, 2)) + self.assertEqual(regex.match(r'[a-z]*4R$', 'a4R', partial=True).span(), + (0, 3)) + self.assertEqual(regex.match(r'[a-z]*4R$', '4a', partial=True), None) + self.assertEqual(regex.match(r'[a-z]*4R$', 'a44', partial=True), None) + + def test_hg_bugs(self): + # Hg issue 28: regex.compile("(?>b)") causes "TypeError: 'Character' + # object is not subscriptable" + self.assertEqual(bool(regex.compile("(?>b)", flags=regex.V1)), True) + + # Hg issue 29: regex.compile("^((?>\w+)|(?>\s+))*$") causes + # "TypeError: 'GreedyRepeat' object is not iterable" + self.assertEqual(bool(regex.compile(r"^((?>\w+)|(?>\s+))*$", + flags=regex.V1)), True) + + # Hg issue 31: atomic and normal groups in recursive patterns + self.assertEqual(regex.findall(r"\((?:(?>[^()]+)|(?R))*\)", + "a(bcd(e)f)g(h)"), ['(bcd(e)f)', '(h)']) + self.assertEqual(regex.findall(r"\((?:(?:[^()]+)|(?R))*\)", + "a(bcd(e)f)g(h)"), ['(bcd(e)f)', '(h)']) + self.assertEqual(regex.findall(r"\((?:(?>[^()]+)|(?R))*\)", + "a(b(cd)e)f)g)h"), ['(b(cd)e)']) + self.assertEqual(regex.findall(r"\((?:(?>[^()]+)|(?R))*\)", + "a(bc(d(e)f)gh"), ['(d(e)f)']) + self.assertEqual(regex.findall(r"(?r)\((?:(?>[^()]+)|(?R))*\)", + "a(bc(d(e)f)gh"), ['(d(e)f)']) + self.assertEqual([m.group() for m in + regex.finditer(r"\((?:[^()]*+|(?0))*\)", "a(b(c(de)fg)h")], + ['(c(de)fg)']) + + # Hg issue 32: regex.search("a(bc)d", "abcd", regex.I|regex.V1) returns + # None + self.assertEqual(regex.search("a(bc)d", "abcd", regex.I | + regex.V1).group(0), "abcd") + + # Hg issue 33: regex.search("([\da-f:]+)$", "E", regex.I|regex.V1) + # returns None + self.assertEqual(regex.search(r"([\da-f:]+)$", "E", regex.I | + regex.V1).group(0), "E") + self.assertEqual(regex.search(r"([\da-f:]+)$", "e", regex.I | + regex.V1).group(0), "e") + + # Hg issue 34: regex.search("^(?=ab(de))(abd)(e)", "abde").groups() + # returns (None, 'abd', 'e') instead of ('de', 'abd', 'e') + self.assertEqual(regex.search("^(?=ab(de))(abd)(e)", "abde").groups(), + ('de', 'abd', 'e')) + + # Hg issue 35: regex.compile("\ ", regex.X) causes "_regex_core.error: + # bad escape" + self.assertEqual(bool(regex.match(r"\ ", " ", flags=regex.X)), True) + + # Hg issue 36: regex.search("^(a|)\1{2}b", "b") returns None + self.assertEqual(regex.search(r"^(a|)\1{2}b", "b").group(0, 1), ('b', + '')) + + # Hg issue 37: regex.search("^(a){0,0}", "abc").group(0,1) returns + # ('a', 'a') instead of ('', None) + self.assertEqual(regex.search("^(a){0,0}", "abc").group(0, 1), ('', + None)) + + # Hg issue 38: regex.search("(?>.*/)b", "a/b") returns None + self.assertEqual(regex.search("(?>.*/)b", "a/b").group(0), "a/b") + + # Hg issue 39: regex.search("((?i)blah)\\s+\\1", "blah BLAH") doesn't + # return None + # Changed to positional flags in regex 2023.12.23. + self.assertEqual(regex.search(r"((?i)blah)\s+\1", "blah BLAH"), None) + + # Hg issue 40: regex.search("(\()?[^()]+(?(1)\)|)", "(abcd").group(0) + # returns "bcd" instead of "abcd" + self.assertEqual(regex.search(r"(\()?[^()]+(?(1)\)|)", + "(abcd").group(0), "abcd") + + # Hg issue 42: regex.search("(a*)*", "a", flags=regex.V1).span(1) + # returns (0, 1) instead of (1, 1) + self.assertEqual(regex.search("(a*)*", "a").span(1), (1, 1)) + self.assertEqual(regex.search("(a*)*", "aa").span(1), (2, 2)) + self.assertEqual(regex.search("(a*)*", "aaa").span(1), (3, 3)) + + # Hg issue 43: regex.compile("a(?#xxx)*") causes "_regex_core.error: + # nothing to repeat" + self.assertEqual(regex.search("a(?#xxx)*", "aaa").group(), "aaa") + + # Hg issue 44: regex.compile("(?=abc){3}abc") causes + # "_regex_core.error: nothing to repeat" + self.assertEqual(regex.search("(?=abc){3}abc", "abcabcabc").span(), (0, + 3)) + + # Hg issue 45: regex.compile("^(?:a(?:(?:))+)+") causes + # "_regex_core.error: nothing to repeat" + self.assertEqual(regex.search("^(?:a(?:(?:))+)+", "a").span(), (0, 1)) + self.assertEqual(regex.search("^(?:a(?:(?:))+)+", "aa").span(), (0, 2)) + + # Hg issue 46: regex.compile("a(?x: b c )d") causes + # "_regex_core.error: missing )" + self.assertEqual(regex.search("a(?x: b c )d", "abcd").group(0), "abcd") + + # Hg issue 47: regex.compile("a#comment\n*", flags=regex.X) causes + # "_regex_core.error: nothing to repeat" + self.assertEqual(regex.search("a#comment\n*", "aaa", + flags=regex.X).group(0), "aaa") + + # Hg issue 48: regex.search("(a(?(1)\\1)){4}", "a"*10, + # flags=regex.V1).group(0,1) returns ('aaaaa', 'a') instead of ('aaaaaaaaaa', 'aaaa') + self.assertEqual(regex.search(r"(?V1)(a(?(1)\1)){1}", + "aaaaaaaaaa").span(0, 1), ((0, 1), (0, 1))) + self.assertEqual(regex.search(r"(?V1)(a(?(1)\1)){2}", + "aaaaaaaaaa").span(0, 1), ((0, 3), (1, 3))) + self.assertEqual(regex.search(r"(?V1)(a(?(1)\1)){3}", + "aaaaaaaaaa").span(0, 1), ((0, 6), (3, 6))) + self.assertEqual(regex.search(r"(?V1)(a(?(1)\1)){4}", + "aaaaaaaaaa").span(0, 1), ((0, 10), (6, 10))) + + # Hg issue 49: regex.search("(a)(?<=b(?1))", "baz", regex.V1) returns + # None incorrectly + self.assertEqual(regex.search("(?V1)(a)(?<=b(?1))", "baz").group(0), + "a") + + # Hg issue 50: not all keywords are found by named list with + # overlapping keywords when full Unicode casefolding is required + self.assertEqual(regex.findall(r'(?fi)\L', + 'POST, Post, post, po\u017Ft, po\uFB06, and po\uFB05', + keywords=['post','pos']), ['POST', 'Post', 'post', 'po\u017Ft', + 'po\uFB06', 'po\uFB05']) + self.assertEqual(regex.findall(r'(?fi)pos|post', + 'POST, Post, post, po\u017Ft, po\uFB06, and po\uFB05'), ['POS', + 'Pos', 'pos', 'po\u017F', 'po\uFB06', 'po\uFB05']) + self.assertEqual(regex.findall(r'(?fi)post|pos', + 'POST, Post, post, po\u017Ft, po\uFB06, and po\uFB05'), ['POST', + 'Post', 'post', 'po\u017Ft', 'po\uFB06', 'po\uFB05']) + self.assertEqual(regex.findall(r'(?fi)post|another', + 'POST, Post, post, po\u017Ft, po\uFB06, and po\uFB05'), ['POST', + 'Post', 'post', 'po\u017Ft', 'po\uFB06', 'po\uFB05']) + + # Hg issue 51: regex.search("((a)(?1)|(?2))", "a", flags=regex.V1) + # returns None incorrectly + self.assertEqual(regex.search("(?V1)((a)(?1)|(?2))", "a").group(0, 1, + 2), ('a', 'a', None)) + + # Hg issue 52: regex.search("(\\1xx|){6}", "xx", + # flags=regex.V1).span(0,1) returns incorrect value + self.assertEqual(regex.search(r"(?V1)(\1xx|){6}", "xx").span(0, 1), + ((0, 2), (2, 2))) + + # Hg issue 53: regex.search("(a|)+", "a") causes MemoryError + self.assertEqual(regex.search("(a|)+", "a").group(0, 1), ("a", "")) + + # Hg issue 54: regex.search("(a|)*\\d", "a"*80) causes MemoryError + self.assertEqual(regex.search(r"(a|)*\d", "a" * 80), None) + + # Hg issue 55: regex.search("^(?:a?b?)*$", "ac") take a very long time. + self.assertEqual(regex.search("^(?:a?b?)*$", "ac"), None) + + # Hg issue 58: bad named character escape sequences like "\\N{1}" + # treats as "N" + self.assertRaisesRegex(regex.error, self.UNDEF_CHAR_NAME, lambda: + regex.compile("\\N{1}")) + + # Hg issue 59: regex.search("\\Z", "a\na\n") returns None incorrectly + self.assertEqual(regex.search("\\Z", "a\na\n").span(0), (4, 4)) + + # Hg issue 60: regex.search("(q1|.)*(q2|.)*(x(a|bc)*y){2,}", "xayxay") + # returns None incorrectly + self.assertEqual(regex.search("(q1|.)*(q2|.)*(x(a|bc)*y){2,}", + "xayxay").group(0), "xayxay") + + # Hg issue 61: regex.search("[^a]", "A", regex.I).group(0) returns '' + # incorrectly + self.assertEqual(regex.search("(?i)[^a]", "A"), None) + + # Hg issue 63: regex.search("[[:ascii:]]", "\N{KELVIN SIGN}", + # flags=regex.I|regex.V1) doesn't return None + self.assertEqual(regex.search("(?i)[[:ascii:]]", "\N{KELVIN SIGN}"), + None) + + # Hg issue 66: regex.search("((a|b(?1)c){3,5})", "baaaaca", + # flags=regex.V1).groups() returns ('baaaac', 'baaaac') instead of ('aaaa', 'a') + self.assertEqual(regex.search("((a|b(?1)c){3,5})", "baaaaca").group(0, + 1, 2), ('aaaa', 'aaaa', 'a')) + + # Hg issue 71: non-greedy quantifier in lookbehind + self.assertEqual(regex.findall(r"(?<=:\S+ )\w+", ":9 abc :10 def"), + ['abc', 'def']) + self.assertEqual(regex.findall(r"(?<=:\S* )\w+", ":9 abc :10 def"), + ['abc', 'def']) + self.assertEqual(regex.findall(r"(?<=:\S+? )\w+", ":9 abc :10 def"), + ['abc', 'def']) + self.assertEqual(regex.findall(r"(?<=:\S*? )\w+", ":9 abc :10 def"), + ['abc', 'def']) + + # Hg issue 73: conditional patterns + self.assertEqual(regex.search(r"(?:fe)?male", "female").group(), + "female") + self.assertEqual([m.group() for m in + regex.finditer(r"(fe)?male: h(?(1)(er)|(is)) (\w+)", + "female: her dog; male: his cat. asdsasda")], ['female: her dog', + 'male: his cat']) + + # Hg issue 78: "Captures" doesn't work for recursive calls + self.assertEqual(regex.search(r'(?\((?:[^()]++|(?&rec))*\))', + 'aaa(((1+0)+1)+1)bbb').captures('rec'), ['(1+0)', '((1+0)+1)', + '(((1+0)+1)+1)']) + + # Hg issue 80: Escape characters throws an exception + self.assertRaisesRegex(regex.error, self.BAD_ESCAPE, lambda: + regex.sub('x', '\\', 'x'), ) + + # Hg issue 82: error range does not work + fz = "(CAGCCTCCCATTTCAGAATATACATCC){1a(?b))', "ab").spans("x"), [(1, + 2), (0, 2)]) + + # Hg issue 91: match.expand is extremely slow + # Check that the replacement cache works. + self.assertEqual(regex.sub(r'(-)', lambda m: m.expand(r'x'), 'a-b-c'), + 'axbxc') + + # Hg issue 94: Python crashes when executing regex updates + # pattern.findall + rx = regex.compile(r'\bt(est){i<2}', flags=regex.V1) + self.assertEqual(rx.search("Some text"), None) + self.assertEqual(rx.findall("Some text"), []) + + # Hg issue 95: 'pos' for regex.error + self.assertRaisesRegex(regex.error, self.MULTIPLE_REPEAT, lambda: + regex.compile(r'.???')) + + # Hg issue 97: behaviour of regex.escape's special_only is wrong + # + # Hg issue 244: Make `special_only=True` the default in + # `regex.escape()` + self.assertEqual(regex.escape('foo!?', special_only=False), 'foo\\!\\?') + self.assertEqual(regex.escape('foo!?', special_only=True), 'foo!\\?') + self.assertEqual(regex.escape('foo!?'), 'foo!\\?') + + self.assertEqual(regex.escape(b'foo!?', special_only=False), b'foo\\!\\?') + self.assertEqual(regex.escape(b'foo!?', special_only=True), + b'foo!\\?') + self.assertEqual(regex.escape(b'foo!?'), b'foo!\\?') + + # Hg issue 100: strange results from regex.search + self.assertEqual(regex.search('^([^z]*(?:WWWi|W))?$', + 'WWWi').groups(), ('WWWi', )) + self.assertEqual(regex.search('^([^z]*(?:WWWi|w))?$', + 'WWWi').groups(), ('WWWi', )) + self.assertEqual(regex.search('^([^z]*?(?:WWWi|W))?$', + 'WWWi').groups(), ('WWWi', )) + + # Hg issue 101: findall() broken (seems like memory corruption) + pat = regex.compile(r'xxx', flags=regex.FULLCASE | regex.UNICODE) + self.assertEqual([x.group() for x in pat.finditer('yxxx')], ['xxx']) + self.assertEqual(pat.findall('yxxx'), ['xxx']) + + raw = 'yxxx' + self.assertEqual([x.group() for x in pat.finditer(raw)], ['xxx']) + self.assertEqual(pat.findall(raw), ['xxx']) + + pat = regex.compile(r'xxx', flags=regex.FULLCASE | regex.IGNORECASE | + regex.UNICODE) + self.assertEqual([x.group() for x in pat.finditer('yxxx')], ['xxx']) + self.assertEqual(pat.findall('yxxx'), ['xxx']) + + raw = 'yxxx' + self.assertEqual([x.group() for x in pat.finditer(raw)], ['xxx']) + self.assertEqual(pat.findall(raw), ['xxx']) + + # Hg issue 106: * operator not working correctly with sub() + if sys.version_info >= (3, 7, 0): + self.assertEqual(regex.sub('(?V0).*', 'x', 'test'), 'xx') + else: + self.assertEqual(regex.sub('(?V0).*', 'x', 'test'), 'x') + self.assertEqual(regex.sub('(?V1).*', 'x', 'test'), 'xx') + + if sys.version_info >= (3, 7, 0): + self.assertEqual(regex.sub('(?V0).*?', '|', 'test'), '|||||||||') + else: + self.assertEqual(regex.sub('(?V0).*?', '|', 'test'), '|t|e|s|t|') + self.assertEqual(regex.sub('(?V1).*?', '|', 'test'), '|||||||||') + + # Hg issue 112: re: OK, but regex: SystemError + self.assertEqual(regex.sub(r'^(@)\n(?!.*?@)(.*)', + r'\1\n==========\n\2', '@\n', flags=regex.DOTALL), '@\n==========\n') + + # Hg issue 109: Edit distance of fuzzy match + self.assertEqual(regex.match(r'(?:cats|cat){e<=1}', + 'caz').fuzzy_counts, (1, 0, 0)) + self.assertEqual(regex.match(r'(?e)(?:cats|cat){e<=1}', + 'caz').fuzzy_counts, (1, 0, 0)) + self.assertEqual(regex.match(r'(?b)(?:cats|cat){e<=1}', + 'caz').fuzzy_counts, (1, 0, 0)) + + self.assertEqual(regex.match(r'(?:cat){e<=1}', 'caz').fuzzy_counts, + (1, 0, 0)) + self.assertEqual(regex.match(r'(?e)(?:cat){e<=1}', + 'caz').fuzzy_counts, (1, 0, 0)) + self.assertEqual(regex.match(r'(?b)(?:cat){e<=1}', + 'caz').fuzzy_counts, (1, 0, 0)) + + self.assertEqual(regex.match(r'(?:cats){e<=2}', 'c ats').fuzzy_counts, + (1, 1, 0)) + self.assertEqual(regex.match(r'(?e)(?:cats){e<=2}', + 'c ats').fuzzy_counts, (0, 1, 0)) + self.assertEqual(regex.match(r'(?b)(?:cats){e<=2}', + 'c ats').fuzzy_counts, (0, 1, 0)) + + self.assertEqual(regex.match(r'(?:cats){e<=2}', + 'c a ts').fuzzy_counts, (0, 2, 0)) + self.assertEqual(regex.match(r'(?e)(?:cats){e<=2}', + 'c a ts').fuzzy_counts, (0, 2, 0)) + self.assertEqual(regex.match(r'(?b)(?:cats){e<=2}', + 'c a ts').fuzzy_counts, (0, 2, 0)) + + self.assertEqual(regex.match(r'(?:cats){e<=1}', 'c ats').fuzzy_counts, + (0, 1, 0)) + self.assertEqual(regex.match(r'(?e)(?:cats){e<=1}', + 'c ats').fuzzy_counts, (0, 1, 0)) + self.assertEqual(regex.match(r'(?b)(?:cats){e<=1}', + 'c ats').fuzzy_counts, (0, 1, 0)) + + # Hg issue 115: Infinite loop when processing backreferences + self.assertEqual(regex.findall(r'\bof ([a-z]+) of \1\b', + 'To make use of one of these modules'), []) + + # Hg issue 125: Reference to entire match (\g<0>) in + # Pattern.sub() doesn't work as of 2014.09.22 release. + self.assertEqual(regex.sub(r'x', r'\g<0>', 'x'), 'x') + + # Unreported issue: no such builtin as 'ascii' in Python 2. + self.assertEqual(bool(regex.match(r'a', 'a', regex.DEBUG)), True) + + # Hg issue 131: nested sets behaviour + self.assertEqual(regex.findall(r'(?V1)[[b-e]--cd]', 'abcdef'), ['b', + 'e']) + self.assertEqual(regex.findall(r'(?V1)[b-e--cd]', 'abcdef'), ['b', + 'e']) + self.assertEqual(regex.findall(r'(?V1)[[bcde]--cd]', 'abcdef'), ['b', + 'e']) + self.assertEqual(regex.findall(r'(?V1)[bcde--cd]', 'abcdef'), ['b', + 'e']) + + # Hg issue 132: index out of range on null property \p{} + self.assertRaisesRegex(regex.error, '^unknown property at position 4$', + lambda: regex.compile(r'\p{}')) + + # Issue 23692. + self.assertEqual(regex.match('(?:()|(?(1)()|z)){2}(?(2)a|z)', + 'a').group(0, 1, 2), ('a', '', '')) + self.assertEqual(regex.match('(?:()|(?(1)()|z)){0,2}(?(2)a|z)', + 'a').group(0, 1, 2), ('a', '', '')) + + # Hg issue 137: Posix character class :punct: does not seem to be + # supported. + + # Posix compatibility as recommended here: + # http://www.unicode.org/reports/tr18/#Compatibility_Properties + + # Posix in Unicode. + chars = ''.join(chr(c) for c in range(0x10000)) + + self.assertEqual(ascii(''.join(regex.findall(r'''[[:alnum:]]+''', + chars))), ascii(''.join(regex.findall(r'''[\p{Alpha}\p{PosixDigit}]+''', + chars)))) + self.assertEqual(ascii(''.join(regex.findall(r'''[[:alpha:]]+''', + chars))), ascii(''.join(regex.findall(r'''\p{Alpha}+''', + chars)))) + self.assertEqual(ascii(''.join(regex.findall(r'''[[:ascii:]]+''', + chars))), ascii(''.join(regex.findall(r'''[\p{InBasicLatin}]+''', + chars)))) + self.assertEqual(ascii(''.join(regex.findall(r'''[[:blank:]]+''', + chars))), ascii(''.join(regex.findall(r'''[\p{gc=Space_Separator}\t]+''', + chars)))) + self.assertEqual(ascii(''.join(regex.findall(r'''[[:cntrl:]]+''', + chars))), ascii(''.join(regex.findall(r'''\p{gc=Control}+''', chars)))) + self.assertEqual(ascii(''.join(regex.findall(r'''[[:digit:]]+''', + chars))), ascii(''.join(regex.findall(r'''[0-9]+''', chars)))) + self.assertEqual(ascii(''.join(regex.findall(r'''[[:graph:]]+''', + chars))), ascii(''.join(regex.findall(r'''[^\p{Space}\p{gc=Control}\p{gc=Surrogate}\p{gc=Unassigned}]+''', + chars)))) + self.assertEqual(ascii(''.join(regex.findall(r'''[[:lower:]]+''', + chars))), ascii(''.join(regex.findall(r'''\p{Lower}+''', + chars)))) + self.assertEqual(ascii(''.join(regex.findall(r'''[[:print:]]+''', + chars))), ascii(''.join(regex.findall(r'''(?V1)[\p{Graph}\p{Blank}--\p{Cntrl}]+''', chars)))) + self.assertEqual(ascii(''.join(regex.findall(r'''[[:punct:]]+''', + chars))), + ascii(''.join(regex.findall(r'''(?V1)[\p{gc=Punctuation}\p{gc=Symbol}--\p{Alpha}]+''', + chars)))) + self.assertEqual(ascii(''.join(regex.findall(r'''[[:space:]]+''', + chars))), ascii(''.join(regex.findall(r'''\p{Whitespace}+''', + chars)))) + self.assertEqual(ascii(''.join(regex.findall(r'''[[:upper:]]+''', + chars))), ascii(''.join(regex.findall(r'''\p{Upper}+''', + chars)))) + self.assertEqual(ascii(''.join(regex.findall(r'''[[:word:]]+''', + chars))), ascii(''.join(regex.findall(r'''[\p{Alpha}\p{gc=Mark}\p{Digit}\p{gc=Connector_Punctuation}\p{Join_Control}]+''', + chars)))) + self.assertEqual(ascii(''.join(regex.findall(r'''[[:xdigit:]]+''', + chars))), ascii(''.join(regex.findall(r'''[0-9A-Fa-f]+''', + chars)))) + + # Posix in ASCII. + chars = bytes(range(0x100)) + + self.assertEqual(ascii(b''.join(regex.findall(br'''(?a)[[:alnum:]]+''', + chars))), ascii(b''.join(regex.findall(br'''(?a)[\p{Alpha}\p{PosixDigit}]+''', + chars)))) + self.assertEqual(ascii(b''.join(regex.findall(br'''(?a)[[:alpha:]]+''', + chars))), ascii(b''.join(regex.findall(br'''(?a)\p{Alpha}+''', chars)))) + self.assertEqual(ascii(b''.join(regex.findall(br'''(?a)[[:ascii:]]+''', + chars))), ascii(b''.join(regex.findall(br'''(?a)[\x00-\x7F]+''', chars)))) + self.assertEqual(ascii(b''.join(regex.findall(br'''(?a)[[:blank:]]+''', + chars))), ascii(b''.join(regex.findall(br'''(?a)[\p{gc=Space_Separator}\t]+''', + chars)))) + self.assertEqual(ascii(b''.join(regex.findall(br'''(?a)[[:cntrl:]]+''', + chars))), ascii(b''.join(regex.findall(br'''(?a)\p{gc=Control}+''', + chars)))) + self.assertEqual(ascii(b''.join(regex.findall(br'''(?a)[[:digit:]]+''', + chars))), ascii(b''.join(regex.findall(br'''(?a)[0-9]+''', chars)))) + self.assertEqual(ascii(b''.join(regex.findall(br'''(?a)[[:graph:]]+''', + chars))), ascii(b''.join(regex.findall(br'''(?a)[^\p{Space}\p{gc=Control}\p{gc=Surrogate}\p{gc=Unassigned}]+''', chars)))) + self.assertEqual(ascii(b''.join(regex.findall(br'''(?a)[[:lower:]]+''', + chars))), ascii(b''.join(regex.findall(br'''(?a)\p{Lower}+''', chars)))) + self.assertEqual(ascii(b''.join(regex.findall(br'''(?a)[[:print:]]+''', + chars))), ascii(b''.join(regex.findall(br'''(?aV1)[\p{Graph}\p{Blank}--\p{Cntrl}]+''', chars)))) + self.assertEqual(ascii(b''.join(regex.findall(br'''(?a)[[:punct:]]+''', + chars))), ascii(b''.join(regex.findall(br'''(?aV1)[\p{gc=Punctuation}\p{gc=Symbol}--\p{Alpha}]+''', + chars)))) + self.assertEqual(ascii(b''.join(regex.findall(br'''(?a)[[:space:]]+''', + chars))), ascii(b''.join(regex.findall(br'''(?a)\p{Whitespace}+''', chars)))) + self.assertEqual(ascii(b''.join(regex.findall(br'''(?a)[[:upper:]]+''', + chars))), ascii(b''.join(regex.findall(br'''(?a)\p{Upper}+''', chars)))) + self.assertEqual(ascii(b''.join(regex.findall(br'''(?a)[[:word:]]+''', + chars))), ascii(b''.join(regex.findall(br'''(?a)[\p{Alpha}\p{gc=Mark}\p{Digit}\p{gc=Connector_Punctuation}\p{Join_Control}]+''', chars)))) + self.assertEqual(ascii(b''.join(regex.findall(br'''(?a)[[:xdigit:]]+''', + chars))), ascii(b''.join(regex.findall(br'''(?a)[0-9A-Fa-f]+''', chars)))) + + # Hg issue 138: grapheme anchored search not working properly. + self.assertEqual(ascii(regex.search(r'\X$', 'ab\u2103').group()), + ascii('\u2103')) + + # Hg issue 139: Regular expression with multiple wildcards where first + # should match empty string does not always work. + self.assertEqual(regex.search("([^L]*)([^R]*R)", "LtR").groups(), ('', + 'LtR')) + + # Hg issue 140: Replace with REVERSE and groups has unexpected + # behavior. + self.assertEqual(regex.sub(r'(.)', r'x\1y', 'ab'), 'xayxby') + self.assertEqual(regex.sub(r'(?r)(.)', r'x\1y', 'ab'), 'xayxby') + self.assertEqual(regex.subf(r'(.)', 'x{1}y', 'ab'), 'xayxby') + self.assertEqual(regex.subf(r'(?r)(.)', 'x{1}y', 'ab'), 'xayxby') + + # Hg issue 141: Crash on a certain partial match. + self.assertEqual(regex.fullmatch('(a)*abc', 'ab', + partial=True).span(), (0, 2)) + self.assertEqual(regex.fullmatch('(a)*abc', 'ab', + partial=True).partial, True) + + # Hg issue 143: Partial matches have incorrect span if prefix is '.' + # wildcard. + self.assertEqual(regex.search('OXRG', 'OOGOX', partial=True).span(), + (3, 5)) + self.assertEqual(regex.search('.XRG', 'OOGOX', partial=True).span(), + (3, 5)) + self.assertEqual(regex.search('.{1,3}XRG', 'OOGOX', + partial=True).span(), (1, 5)) + + # Hg issue 144: Latest version problem with matching 'R|R'. + self.assertEqual(regex.match('R|R', 'R').span(), (0, 1)) + + # Hg issue 146: Forced-fail (?!) works improperly in conditional. + self.assertEqual(regex.match(r'(.)(?(1)(?!))', 'xy'), None) + + # Groups cleared after failure. + self.assertEqual(regex.findall(r'(y)?(\d)(?(1)\b\B)', 'ax1y2z3b'), + [('', '1'), ('', '2'), ('', '3')]) + self.assertEqual(regex.findall(r'(y)?+(\d)(?(1)\b\B)', 'ax1y2z3b'), + [('', '1'), ('', '2'), ('', '3')]) + + # Hg issue 147: Fuzzy match can return match points beyond buffer end. + self.assertEqual([m.span() for m in regex.finditer(r'(?i)(?:error){e}', + 'regex failure')], [(0, 5), (5, 10), (10, 13), (13, 13)]) + self.assertEqual([m.span() for m in + regex.finditer(r'(?fi)(?:error){e}', 'regex failure')], [(0, 5), (5, + 10), (10, 13), (13, 13)]) + + # Hg issue 150: Have an option for POSIX-compatible longest match of + # alternates. + self.assertEqual(regex.search(r'(?p)\d+(\w(\d*)?|[eE]([+-]\d+))', + '10b12')[0], '10b12') + self.assertEqual(regex.search(r'(?p)\d+(\w(\d*)?|[eE]([+-]\d+))', + '10E+12')[0], '10E+12') + + self.assertEqual(regex.search(r'(?p)(\w|ae|oe|ue|ss)', 'ae')[0], 'ae') + self.assertEqual(regex.search(r'(?p)one(self)?(selfsufficient)?', + 'oneselfsufficient')[0], 'oneselfsufficient') + + # Hg issue 151: Request: \K. + self.assertEqual(regex.search(r'(ab\Kcd)', 'abcd').group(0, 1), ('cd', + 'abcd')) + self.assertEqual(regex.findall(r'\w\w\K\w\w', 'abcdefgh'), ['cd', + 'gh']) + self.assertEqual(regex.findall(r'(\w\w\K\w\w)', 'abcdefgh'), ['abcd', + 'efgh']) + + self.assertEqual(regex.search(r'(?r)(ab\Kcd)', 'abcd').group(0, 1), + ('ab', 'abcd')) + self.assertEqual(regex.findall(r'(?r)\w\w\K\w\w', 'abcdefgh'), ['ef', + 'ab']) + self.assertEqual(regex.findall(r'(?r)(\w\w\K\w\w)', 'abcdefgh'), + ['efgh', 'abcd']) + + # Hg issue 152: Request: Request: (?(DEFINE)...). + self.assertEqual(regex.search(r'(?(DEFINE)(?\d+)(?\w+))(?&quant) (?&item)', + '5 elephants')[0], '5 elephants') + + self.assertEqual(regex.search(r'(?&routine)(?(DEFINE)(?.))', 'a').group('routine'), None) + self.assertEqual(regex.search(r'(?&routine)(?(DEFINE)(?.))', 'a').captures('routine'), ['a']) + + # Hg issue 153: Request: (*SKIP). + self.assertEqual(regex.search(r'12(*FAIL)|3', '123')[0], '3') + self.assertEqual(regex.search(r'(?r)12(*FAIL)|3', '123')[0], '3') + + self.assertEqual(regex.search(r'\d+(*PRUNE)\d', '123'), None) + self.assertEqual(regex.search(r'\d+(?=(*PRUNE))\d', '123')[0], '123') + self.assertEqual(regex.search(r'\d+(*PRUNE)bcd|[3d]', '123bcd')[0], + '123bcd') + self.assertEqual(regex.search(r'\d+(*PRUNE)bcd|[3d]', '123zzd')[0], + 'd') + self.assertEqual(regex.search(r'\d+?(*PRUNE)bcd|[3d]', '123bcd')[0], + '3bcd') + self.assertEqual(regex.search(r'\d+?(*PRUNE)bcd|[3d]', '123zzd')[0], + 'd') + self.assertEqual(regex.search(r'\d++(?<=3(*PRUNE))zzd|[4d]$', + '123zzd')[0], '123zzd') + self.assertEqual(regex.search(r'\d++(?<=3(*PRUNE))zzd|[4d]$', + '124zzd')[0], 'd') + self.assertEqual(regex.search(r'\d++(?<=(*PRUNE)3)zzd|[4d]$', + '124zzd')[0], 'd') + self.assertEqual(regex.search(r'\d++(?<=2(*PRUNE)3)zzd|[3d]$', + '124zzd')[0], 'd') + + self.assertEqual(regex.search(r'(?r)\d(*PRUNE)\d+', '123'), None) + self.assertEqual(regex.search(r'(?r)\d(?<=(*PRUNE))\d+', '123')[0], + '123') + self.assertEqual(regex.search(r'(?r)\d+(*PRUNE)bcd|[3d]', + '123bcd')[0], '123bcd') + self.assertEqual(regex.search(r'(?r)\d+(*PRUNE)bcd|[3d]', + '123zzd')[0], 'd') + self.assertEqual(regex.search(r'(?r)\d++(?<=3(*PRUNE))zzd|[4d]$', + '123zzd')[0], '123zzd') + self.assertEqual(regex.search(r'(?r)\d++(?<=3(*PRUNE))zzd|[4d]$', + '124zzd')[0], 'd') + self.assertEqual(regex.search(r'(?r)\d++(?<=(*PRUNE)3)zzd|[4d]$', + '124zzd')[0], 'd') + self.assertEqual(regex.search(r'(?r)\d++(?<=2(*PRUNE)3)zzd|[3d]$', + '124zzd')[0], 'd') + + self.assertEqual(regex.search(r'\d+(*SKIP)bcd|[3d]', '123bcd')[0], + '123bcd') + self.assertEqual(regex.search(r'\d+(*SKIP)bcd|[3d]', '123zzd')[0], + 'd') + self.assertEqual(regex.search(r'\d+?(*SKIP)bcd|[3d]', '123bcd')[0], + '3bcd') + self.assertEqual(regex.search(r'\d+?(*SKIP)bcd|[3d]', '123zzd')[0], + 'd') + self.assertEqual(regex.search(r'\d++(?<=3(*SKIP))zzd|[4d]$', + '123zzd')[0], '123zzd') + self.assertEqual(regex.search(r'\d++(?<=3(*SKIP))zzd|[4d]$', + '124zzd')[0], 'd') + self.assertEqual(regex.search(r'\d++(?<=(*SKIP)3)zzd|[4d]$', + '124zzd')[0], 'd') + self.assertEqual(regex.search(r'\d++(?<=2(*SKIP)3)zzd|[3d]$', + '124zzd')[0], 'd') + + self.assertEqual(regex.search(r'(?r)\d+(*SKIP)bcd|[3d]', '123bcd')[0], + '123bcd') + self.assertEqual(regex.search(r'(?r)\d+(*SKIP)bcd|[3d]', '123zzd')[0], + 'd') + self.assertEqual(regex.search(r'(?r)\d++(?<=3(*SKIP))zzd|[4d]$', + '123zzd')[0], '123zzd') + self.assertEqual(regex.search(r'(?r)\d++(?<=3(*SKIP))zzd|[4d]$', + '124zzd')[0], 'd') + self.assertEqual(regex.search(r'(?r)\d++(?<=(*SKIP)3)zzd|[4d]$', + '124zzd')[0], 'd') + self.assertEqual(regex.search(r'(?r)\d++(?<=2(*SKIP)3)zzd|[3d]$', + '124zzd')[0], 'd') + + # Hg issue 154: Segmentation fault 11 when working with an atomic group + text = """June 30, December 31, 2013 2012 +some words follow: +more words and numbers 1,234,567 9,876,542 +more words and numbers 1,234,567 9,876,542""" + self.assertEqual(len(regex.findall(r'(?2014|2013 ?2012)', text)), 1) + + # Hg issue 156: regression on atomic grouping + self.assertEqual(regex.match('1(?>2)', '12').span(), (0, 2)) + + # Hg issue 157: regression: segfault on complex lookaround + self.assertEqual(regex.match(r'(?V1w)(?=(?=[^A-Z]*+[A-Z])(?=[^a-z]*+[a-z]))(?=\D*+\d)(?=\p{Alphanumeric}*+\P{Alphanumeric})\A(?s:.){8,255}+\Z', + 'AAaa11!!')[0], 'AAaa11!!') + + # Hg issue 158: Group issue with (?(DEFINE)...) + TEST_REGEX = regex.compile(r'''(?smx) +(?(DEFINE) + (? + ^,[^,]+, + ) +) + +# Group 2 is defined on this line +^,([^,]+), + +(?:(?!(?&subcat)[\r\n]+(?&subcat)).)+ +''') + + TEST_DATA = ''' +,Cat 1, +,Brand 1, +some +thing +,Brand 2, +other +things +,Cat 2, +,Brand, +Some +thing +''' + + self.assertEqual([m.span(1, 2) for m in + TEST_REGEX.finditer(TEST_DATA)], [((-1, -1), (2, 7)), ((-1, -1), (54, + 59))]) + + # Hg issue 161: Unexpected fuzzy match results + self.assertEqual(regex.search('(abcdefgh){e}', + '******abcdefghijklmnopqrtuvwxyz', regex.BESTMATCH).span(), (6, 14)) + self.assertEqual(regex.search('(abcdefghi){e}', + '******abcdefghijklmnopqrtuvwxyz', regex.BESTMATCH).span(), (6, 15)) + + # Hg issue 163: allow lookarounds in conditionals. + self.assertEqual(regex.match(r'(?:(?=\d)\d+\b|\w+)', '123abc').span(), + (0, 6)) + self.assertEqual(regex.match(r'(?(?=\d)\d+\b|\w+)', '123abc'), None) + self.assertEqual(regex.search(r'(?(?<=love\s)you|(?<=hate\s)her)', + "I love you").span(), (7, 10)) + self.assertEqual(regex.findall(r'(?(?<=love\s)you|(?<=hate\s)her)', + "I love you but I don't hate her either"), ['you', 'her']) + + # Hg issue 180: bug of POSIX matching. + self.assertEqual(regex.search(r'(?p)a*(.*?)', 'aaabbb').group(0, 1), + ('aaabbb', 'bbb')) + self.assertEqual(regex.search(r'(?p)a*(.*)', 'aaabbb').group(0, 1), + ('aaabbb', 'bbb')) + self.assertEqual(regex.sub(r'(?p)a*(.*?)', r'\1', 'aaabbb'), 'bbb') + self.assertEqual(regex.sub(r'(?p)a*(.*)', r'\1', 'aaabbb'), 'bbb') + + # Hg issue 192: Named lists reverse matching doesn't work with + # IGNORECASE and V1 + self.assertEqual(regex.match(r'(?irV0)\L', '21', kw=['1']).span(), + (1, 2)) + self.assertEqual(regex.match(r'(?irV1)\L', '21', kw=['1']).span(), + (1, 2)) + + # Hg issue 193: Alternation and .REVERSE flag. + self.assertEqual(regex.search('a|b', '111a222').span(), (3, 4)) + self.assertEqual(regex.search('(?r)a|b', '111a222').span(), (3, 4)) + + # Hg issue 194: .FULLCASE and Backreference + self.assertEqual(regex.search(r'(?if)<(CLI)><\1>', + '').span(), (0, 10)) + self.assertEqual(regex.search(r'(?if)<(CLI)><\1>', + '').span(), (0, 10)) + self.assertEqual(regex.search(r'(?ifr)<\1><(CLI)>', + '').span(), (0, 10)) + + # Hg issue 195: Pickle (or otherwise serial) the compiled regex + r = regex.compile(r'\L', options=['foo', 'bar']) + p = pickle.dumps(r) + r = pickle.loads(p) + self.assertEqual(r.match('foo').span(), (0, 3)) + + # Hg issue 196: Fuzzy matching on repeated regex not working as + # expected + self.assertEqual(regex.match('(x{6}){e<=1}', 'xxxxxx', + flags=regex.BESTMATCH).span(), (0, 6)) + self.assertEqual(regex.match('(x{6}){e<=1}', 'xxxxx', + flags=regex.BESTMATCH).span(), (0, 5)) + self.assertEqual(regex.match('(x{6}){e<=1}', 'x', + flags=regex.BESTMATCH), None) + self.assertEqual(regex.match('(?r)(x{6}){e<=1}', 'xxxxxx', + flags=regex.BESTMATCH).span(), (0, 6)) + self.assertEqual(regex.match('(?r)(x{6}){e<=1}', 'xxxxx', + flags=regex.BESTMATCH).span(), (0, 5)) + self.assertEqual(regex.match('(?r)(x{6}){e<=1}', 'x', + flags=regex.BESTMATCH), None) + + # Hg issue 197: ValueError in regex.compile + self.assertRaises(regex.error, lambda: + regex.compile(b'00000\\0\\00\\^\50\\00\\U05000000')) + + # Hg issue 198: ValueError in regex.compile + self.assertRaises(regex.error, lambda: regex.compile(b"{e', '22', aa=['121', + '22'])), True) + self.assertEqual(bool(regex.search(r'(?ri)\L', '22', aa=['121', + '22'])), True) + self.assertEqual(bool(regex.search(r'(?fi)\L', '22', aa=['121', + '22'])), True) + self.assertEqual(bool(regex.search(r'(?fri)\L', '22', aa=['121', + '22'])), True) + + # Hg issue 208: Named list, (?ri) flags, Backreference + self.assertEqual(regex.search(r'(?r)\1dog..(?<=(\L))$', 'ccdogcc', + aa=['bcb', 'cc']). span(), (0, 7)) + self.assertEqual(regex.search(r'(?ir)\1dog..(?<=(\L))$', + 'ccdogcc', aa=['bcb', 'cc']). span(), (0, 7)) + + # Hg issue 210: Fuzzy matching and Backreference + self.assertEqual(regex.search(r'(2)(?:\1{5}){e<=1}', + '3222212').span(), (1, 7)) + self.assertEqual(regex.search(r'(\d)(?:\1{5}){e<=1}', + '3222212').span(), (1, 7)) + + # Hg issue 211: Segmentation fault with recursive matches and atomic + # groups + self.assertEqual(regex.match(r'''\A(?P(?>\((?&whole)\)|[+\-]))\Z''', + '((-))').span(), (0, 5)) + self.assertEqual(regex.match(r'''\A(?P(?>\((?&whole)\)|[+\-]))\Z''', + '((-)+)'), None) + + # Hg issue 212: Unexpected matching difference with .*? between re and + # regex + self.assertEqual(regex.match(r"x.*? (.).*\1(.*)\1", + 'x |y| z|').span(), (0, 9)) + self.assertEqual(regex.match(r"\.sr (.*?) (.)(.*)\2(.*)\2(.*)", + r'.sr h |||').span(), (0, 35)) + + # Hg issue 213: Segmentation Fault + a = '"\\xF9\\x80\\xAEqdz\\x95L\\xA7\\x89[\\xFE \\x91)\\xF9]\\xDB\'\\x99\\x09=\\x00\\xFD\\x98\\x22\\xDD\\xF1\\xB6\\xC3 Z\\xB6gv\\xA5x\\x93P\\xE1r\\x14\\x8Cv\\x0C\\xC0w\\x15r\\xFFc%" ' + py_regex_pattern = r'''(?P((?>(?"(?>\\.|[^\\"]+)+"|""|(?>'(?>\\.|[^\\']+)+')|''|(?>`(?>\\.|[^\\`]+)+`)|``)))) (?P((?>(?"(?>\\.|[^\\"]+)+"|""|(?>'(?>\\.|[^\\']+)+')|''|(?>`(?>\\.|[^\\`]+)+`)|``))))''' + self.assertEqual(bool(regex.search(py_regex_pattern, a)), False) + + # Hg Issue 216: Invalid match when using negative lookbehind and pipe + self.assertEqual(bool(regex.match('foo(?<=foo)', 'foo')), True) + self.assertEqual(bool(regex.match('foo(?.*\!\w*\:.*)|(?P.*))', + '!')), False) + + # Hg issue 220: Misbehavior of group capture with OR operand + self.assertEqual(regex.match(r'\w*(ea)\w*|\w*e(?!a)\w*', + 'easier').groups(), ('ea', )) + + # Hg issue 225: BESTMATCH in fuzzy match not working + self.assertEqual(regex.search('(^1234$){i,d}', '12234', + regex.BESTMATCH).span(), (0, 5)) + self.assertEqual(regex.search('(^1234$){i,d}', '12234', + regex.BESTMATCH).fuzzy_counts, (0, 1, 0)) + + self.assertEqual(regex.search('(^1234$){s,i,d}', '12234', + regex.BESTMATCH).span(), (0, 5)) + self.assertEqual(regex.search('(^1234$){s,i,d}', '12234', + regex.BESTMATCH).fuzzy_counts, (0, 1, 0)) + + # Hg issue 226: Error matching at start of string + self.assertEqual(regex.search('(^123$){s,i,d}', 'xxxxxxxx123', + regex.BESTMATCH).span(), (0, 11)) + self.assertEqual(regex.search('(^123$){s,i,d}', 'xxxxxxxx123', + regex.BESTMATCH).fuzzy_counts, (0, 8, 0)) + + # Hg issue 227: Incorrect behavior for ? operator with UNICODE + + # IGNORECASE + self.assertEqual(regex.search(r'a?yz', 'xxxxyz', flags=regex.FULLCASE | + regex.IGNORECASE).span(), (4, 6)) + + # Hg issue 230: Is it a bug of (?(DEFINE)...) + self.assertEqual(regex.findall(r'(?:(?![a-d]).)+', 'abcdefgh'), + ['efgh']) + self.assertEqual(regex.findall(r'''(?(DEFINE)(?P(?:(?![a-d]).)))(?&mydef)+''', + 'abcdefgh'), ['efgh']) + + # Hg issue 238: Not fully re backward compatible + self.assertEqual(regex.findall(r'((\w{1,3})(\.{2,10})){1,3}', + '"Erm....yes. T..T...Thank you for that."'), [('Erm....', 'Erm', + '....'), ('T...', 'T', '...')]) + self.assertEqual(regex.findall(r'((\w{1,3})(\.{2,10})){3}', + '"Erm....yes. T..T...Thank you for that."'), []) + self.assertEqual(regex.findall(r'((\w{1,3})(\.{2,10})){2}', + '"Erm....yes. T..T...Thank you for that."'), [('T...', 'T', '...')]) + self.assertEqual(regex.findall(r'((\w{1,3})(\.{2,10})){1}', + '"Erm....yes. T..T...Thank you for that."'), [('Erm....', 'Erm', + '....'), ('T..', 'T', '..'), ('T...', 'T', '...')]) + + # Hg issue 247: Unexpected result with fuzzy matching and lookahead + # expression + self.assertEqual(regex.search(r'(?:ESTONIA(?!\w)){e<=1}', + 'ESTONIAN WORKERS').group(), 'ESTONIAN') + self.assertEqual(regex.search(r'(?:ESTONIA(?=\W)){e<=1}', + 'ESTONIAN WORKERS').group(), 'ESTONIAN') + + self.assertEqual(regex.search(r'(?:(?.))(?&func)', + 'abc').groups(), (None, )) + self.assertEqual(regex.search(r'(?(DEFINE)(?.))(?&func)', + 'abc').groupdict(), {'func': None}) + self.assertEqual(regex.search(r'(?(DEFINE)(?.))(?&func)', + 'abc').capturesdict(), {'func': ['a']}) + + self.assertEqual(regex.search(r'(?(DEFINE)(?.))(?=(?&func))', + 'abc').groups(), (None, )) + self.assertEqual(regex.search(r'(?(DEFINE)(?.))(?=(?&func))', + 'abc').groupdict(), {'func': None}) + self.assertEqual(regex.search(r'(?(DEFINE)(?.))(?=(?&func))', + 'abc').capturesdict(), {'func': ['a']}) + + self.assertEqual(regex.search(r'(?(DEFINE)(?.)).(?<=(?&func))', + 'abc').groups(), (None, )) + self.assertEqual(regex.search(r'(?(DEFINE)(?.)).(?<=(?&func))', + 'abc').groupdict(), {'func': None}) + self.assertEqual(regex.search(r'(?(DEFINE)(?.)).(?<=(?&func))', + 'abc').capturesdict(), {'func': ['a']}) + + # Hg issue 271: Comment logic different between Re and Regex + self.assertEqual(bool(regex.match(r'ab(?#comment\))cd', 'abcd')), True) + + # Hg issue 276: Partial Matches yield incorrect matches and bounds + self.assertEqual(regex.search(r'[a-z]+ [a-z]*?:', 'foo bar', + partial=True).span(), (0, 7)) + self.assertEqual(regex.search(r'(?r):[a-z]*? [a-z]+', 'foo bar', + partial=True).span(), (0, 7)) + + # Hg issue 291: Include Script Extensions as a supported Unicode property + self.assertEqual(bool(regex.match(r'(?u)\p{Script:Beng}', + '\u09EF')), True) + self.assertEqual(bool(regex.match(r'(?u)\p{Script:Bengali}', + '\u09EF')), True) + self.assertEqual(bool(regex.match(r'(?u)\p{Script_Extensions:Bengali}', + '\u09EF')), True) + self.assertEqual(bool(regex.match(r'(?u)\p{Script_Extensions:Beng}', + '\u09EF')), True) + self.assertEqual(bool(regex.match(r'(?u)\p{Script_Extensions:Cakm}', + '\u09EF')), True) + self.assertEqual(bool(regex.match(r'(?u)\p{Script_Extensions:Sylo}', + '\u09EF')), True) + + # Hg issue #293: scx (Script Extensions) property currently matches + # incorrectly + self.assertEqual(bool(regex.match(r'(?u)\p{scx:Latin}', 'P')), True) + self.assertEqual(bool(regex.match(r'(?u)\p{scx:Ahom}', 'P')), False) + self.assertEqual(bool(regex.match(r'(?u)\p{scx:Common}', '4')), True) + self.assertEqual(bool(regex.match(r'(?u)\p{scx:Caucasian_Albanian}', '4')), + False) + self.assertEqual(bool(regex.match(r'(?u)\p{scx:Arabic}', '\u062A')), True) + self.assertEqual(bool(regex.match(r'(?u)\p{scx:Balinese}', '\u062A')), + False) + self.assertEqual(bool(regex.match(r'(?u)\p{scx:Devanagari}', '\u091C')), + True) + self.assertEqual(bool(regex.match(r'(?u)\p{scx:Batak}', '\u091C')), False) + + # Hg issue 296: Group references are not taken into account when group is reporting the last match + self.assertEqual(regex.fullmatch('(?P.)*(?&x)', 'abc').captures('x'), + ['a', 'b', 'c']) + self.assertEqual(regex.fullmatch('(?P.)*(?&x)', 'abc').group('x'), + 'b') + + self.assertEqual(regex.fullmatch('(?P.)(?P.)(?P.)', + 'abc').captures('x'), ['a', 'b', 'c']) + self.assertEqual(regex.fullmatch('(?P.)(?P.)(?P.)', + 'abc').group('x'), 'c') + + # Hg issue 299: Partial gives misleading results with "open ended" regexp + self.assertEqual(regex.match('(?:ab)*', 'ab', partial=True).partial, + False) + self.assertEqual(regex.match('(?:ab)*', 'abab', partial=True).partial, + False) + self.assertEqual(regex.match('(?:ab)*?', '', partial=True).partial, + False) + self.assertEqual(regex.match('(?:ab)*+', 'ab', partial=True).partial, + False) + self.assertEqual(regex.match('(?:ab)*+', 'abab', partial=True).partial, + False) + self.assertEqual(regex.match('(?:ab)+', 'ab', partial=True).partial, + False) + self.assertEqual(regex.match('(?:ab)+', 'abab', partial=True).partial, + False) + self.assertEqual(regex.match('(?:ab)+?', 'ab', partial=True).partial, + False) + self.assertEqual(regex.match('(?:ab)++', 'ab', partial=True).partial, + False) + self.assertEqual(regex.match('(?:ab)++', 'abab', partial=True).partial, + False) + + self.assertEqual(regex.match('(?r)(?:ab)*', 'ab', partial=True).partial, + False) + self.assertEqual(regex.match('(?r)(?:ab)*', 'abab', partial=True).partial, + False) + self.assertEqual(regex.match('(?r)(?:ab)*?', '', partial=True).partial, + False) + self.assertEqual(regex.match('(?r)(?:ab)*+', 'ab', partial=True).partial, + False) + self.assertEqual(regex.match('(?r)(?:ab)*+', 'abab', partial=True).partial, + False) + self.assertEqual(regex.match('(?r)(?:ab)+', 'ab', partial=True).partial, + False) + self.assertEqual(regex.match('(?r)(?:ab)+', 'abab', partial=True).partial, + False) + self.assertEqual(regex.match('(?r)(?:ab)+?', 'ab', partial=True).partial, + False) + self.assertEqual(regex.match('(?r)(?:ab)++', 'ab', partial=True).partial, + False) + self.assertEqual(regex.match('(?r)(?:ab)++', 'abab', partial=True).partial, + False) + + self.assertEqual(regex.match('a*', '', partial=True).partial, False) + self.assertEqual(regex.match('a*?', '', partial=True).partial, False) + self.assertEqual(regex.match('a*+', '', partial=True).partial, False) + self.assertEqual(regex.match('a+', '', partial=True).partial, True) + self.assertEqual(regex.match('a+?', '', partial=True).partial, True) + self.assertEqual(regex.match('a++', '', partial=True).partial, True) + self.assertEqual(regex.match('a+', 'a', partial=True).partial, False) + self.assertEqual(regex.match('a+?', 'a', partial=True).partial, False) + self.assertEqual(regex.match('a++', 'a', partial=True).partial, False) + + self.assertEqual(regex.match('(?r)a*', '', partial=True).partial, False) + self.assertEqual(regex.match('(?r)a*?', '', partial=True).partial, False) + self.assertEqual(regex.match('(?r)a*+', '', partial=True).partial, False) + self.assertEqual(regex.match('(?r)a+', '', partial=True).partial, True) + self.assertEqual(regex.match('(?r)a+?', '', partial=True).partial, True) + self.assertEqual(regex.match('(?r)a++', '', partial=True).partial, True) + self.assertEqual(regex.match('(?r)a+', 'a', partial=True).partial, False) + self.assertEqual(regex.match('(?r)a+?', 'a', partial=True).partial, False) + self.assertEqual(regex.match('(?r)a++', 'a', partial=True).partial, False) + + self.assertEqual(regex.match(r"(?:\s*\w+'*)+", 'whatever', partial=True).partial, + False) + + # Hg issue 300: segmentation fault + pattern = ('(?PGGCGTCACACTTTGCTATGCCATAGCAT[AG]TTTATCCATAAGA' + 'TTAGCGGATCCTACCTGACGCTTTTTATCGCAACTCTCTACTGTTTCTCCATAACAGAACATATTGA' + 'CTATCCGGTATTACCCGGCATGACAGGAGTAAAA){e<=1}' + '(?P[ACGT]{1059}){e<=2}' + '(?PTAATCGTCTTGTTTGATACACAAGGGTCGCATCTGCGGCCCTTTTGCTTTTTTAAG' + 'TTGTAAGGATATGCCATTCTAGA){e<=0}' + '(?P[ACGT]{18}){e<=0}' + '(?PAGATCGG[CT]AGAGCGTCGTGTAGGGAAAGAGTGTGG){e<=1}') + + text = ('GCACGGCGTCACACTTTGCTATGCCATAGCATATTTATCCATAAGATTAGCGGATCCTACC' + 'TGACGCTTTTTATCGCAACTCTCTACTGTTTCTCCATAACAGAACATATTGACTATCCGGTATTACC' + 'CGGCATGACAGGAGTAAAAATGGCTATCGACGAAAACAAACAGAAAGCGTTGGCGGCAGCACTGGGC' + 'CAGATTGAGAAACAATTTGGTAAAGGCTCCATCATGCGCCTGGGTGAAGACCGTTCCATGGATGTGG' + 'AAACCATCTCTACCGGTTCGCTTTCACTGGATATCGCGCTTGGGGCAGGTGGTCTGCCGATGGGCCG' + 'TATCGTCGAAATCTACGGACCGGAATCTTCCGGTAAAACCACGCTGACGCTGCAGGTGATCGCCGCA' + 'GCGCAGCGTGAAGGTAAAACCTGTGCGTTTATCGATGCTGAACACGCGCTGGACCCAATCTACGCAC' + 'GTAAACTGGGCGTCGATATCGACAACCTGCTGTGCTCCCAGCCGGACACCGGCGAGCAGGCACTGGA' + 'AATCTGTGACGCCCTGGCGCGTTCTGGCGCAGTAGACGTTATCGTCGTTGACTCCGTGGCGGCACTG' + 'ACGCCGAAAGCGGAAATCGAAGGCGAAATCGGCGACTCTCATATGGGCCTTGCGGCACGTATGATGA' + 'GCCAGGCGATGCGTAAGCTGGCGGGTAACCTGAAGCAGTCCAACACGCTGCTGATCTTCATCAACCC' + 'CATCCGTATGAAAATTGGTGTGATGTTCGGCAACCCGGAAACCACTTACCGGTGGTAACGCGCTGAA' + 'ATTCTACGCCTCTGTTCGTCTCGACATCCGTTAAATCGGCGCGGTGAAAGAGGGCGAAAACGTGGTG' + 'GGTAGCGAAACCCGCGTGAAAGTGGTGAAGAACAAAATCGCTGCGCCGTTTAAACAGGCTGAATTCC' + 'AGATCCTCTACGGCGAAGGTATCAACTTCTACCCCGAACTGGTTGACCTGGGCGTAAAAGAGAAGCT' + 'GATCGAGAAAGCAGGCGCGTGGTACAGCTACAAAGGTGAGAAGATCGGTCAGGGTAAAGCGAATGCG' + 'ACTGCCTGGCTGAAATTTAACCCGGAAACCGCGAAAGAGATCGAGTGAAAAGTACGTGAGTTGCTGC' + 'TGAGCAACCCGAACTCAACGCCGGATTTCTCTGTAGATGATAGCGAAGGCGTAGCAGAAACTAACGA' + 'AGATTTTTAATCGTCTTGTTTGATACACAAGGGTCGCATCTGCGGCCCTTTTGCTTTTTTAAGTTGT' + 'AAGGATATGCCATTCTAGACAGTTAACACACCAACAAAGATCGGTAGAGCGTCGTGTAGGGAAAGAG' + 'TGTGGTACC') + + m = regex.search(pattern, text, flags=regex.BESTMATCH) + self.assertEqual(m.fuzzy_counts, (0, 1, 0)) + self.assertEqual(m.fuzzy_changes, ([], [1206], [])) + + # Hg issue 306: Fuzzy match parameters not respecting quantifier scope + self.assertEqual(regex.search(r'(?e)(dogf(((oo){e<1})|((00){e<1}))d){e<2}', + 'dogfood').fuzzy_counts, (0, 0, 0)) + self.assertEqual(regex.search(r'(?e)(dogf(((oo){e<1})|((00){e<1}))d){e<2}', + 'dogfoot').fuzzy_counts, (1, 0, 0)) + + # Hg issue 312: \X not matching graphemes with zero-width-joins + self.assertEqual(regex.findall(r'\X', + '\U0001F468\u200D\U0001F469\u200D\U0001F467\u200D\U0001F466'), + ['\U0001F468\u200D\U0001F469\u200D\U0001F467\u200D\U0001F466']) + + # Hg issue 320: Abnormal performance + self.assertEqual(bool(regex.search(r'(?=a)a', 'a')), True) + self.assertEqual(bool(regex.search(r'(?!b)a', 'a')), True) + + # Hg issue 327: .fullmatch() causes MemoryError + self.assertEqual(regex.fullmatch(r'((\d)*?)*?', '123').span(), (0, 3)) + + # Hg issue 329: Wrong group matches when question mark quantifier is used within a look behind + self.assertEqual(regex.search(r'''(?(DEFINE)(?(?THIS_SHOULD_NOT_MATCHx?)|(?right))).*(?<=(?&mydef).*)''', + 'x right').capturesdict(), {'mydef': ['right'], 'wrong': [], 'right': + ['right']}) + + # Hg issue 338: specifying allowed characters when fuzzy-matching + self.assertEqual(bool(regex.match(r'(?:cat){e<=1:[u]}', 'cut')), True) + self.assertEqual(bool(regex.match(r'(?:cat){e<=1:u}', 'cut')), True) + + # Hg issue 353: fuzzy changes negative indexes + self.assertEqual(regex.search(r'(?be)(AGTGTTCCCCGCGCCAGCGGGGATAAACCG){s<=5,i<=5,d<=5,s+i+d<=10}', + 'TTCCCCGCGCCAGCGGGGATAAACCG').fuzzy_changes, ([], [], [0, 1, 3, 5])) + + # Git issue 364: Contradictory values in fuzzy_counts and fuzzy_changes + self.assertEqual(regex.match(r'(?:bc){e}', 'c').fuzzy_counts, (1, 0, + 1)) + self.assertEqual(regex.match(r'(?:bc){e}', 'c').fuzzy_changes, ([0], + [], [1])) + self.assertEqual(regex.match(r'(?e)(?:bc){e}', 'c').fuzzy_counts, (0, + 0, 1)) + self.assertEqual(regex.match(r'(?e)(?:bc){e}', 'c').fuzzy_changes, + ([], [], [0])) + self.assertEqual(regex.match(r'(?b)(?:bc){e}', 'c').fuzzy_counts, (0, + 0, 1)) + self.assertEqual(regex.match(r'(?b)(?:bc){e}', 'c').fuzzy_changes, + ([], [], [0])) + + # Git issue 370: Confusions about Fuzzy matching behavior + self.assertEqual(regex.match('(?e)(?:^(\\$ )?\\d{1,3}(,\\d{3})*(\\.\\d{2})$){e}', + '$ 10,112.111.12').fuzzy_counts, (6, 0, 5)) + self.assertEqual(regex.match('(?e)(?:^(\\$ )?\\d{1,3}(,\\d{3})*(\\.\\d{2})$){s<=1}', + '$ 10,112.111.12').fuzzy_counts, (1, 0, 0)) + self.assertEqual(regex.match('(?e)(?:^(\\$ )?\\d{1,3}(,\\d{3})*(\\.\\d{2})$){s<=1,i<=1,d<=1}', + '$ 10,112.111.12').fuzzy_counts, (1, 0, 0)) + self.assertEqual(regex.match('(?e)(?:^(\\$ )?\\d{1,3}(,\\d{3})*(\\.\\d{2})$){s<=3}', + '$ 10,1a2.111.12').fuzzy_counts, (2, 0, 0)) + self.assertEqual(regex.match('(?e)(?:^(\\$ )?\\d{1,3}(,\\d{3})*(\\.\\d{2})$){s<=2}', + '$ 10,1a2.111.12').fuzzy_counts, (2, 0, 0)) + + self.assertEqual(regex.fullmatch(r'(?e)(?:0?,0(?:,0)?){s<=1,d<=1}', + ',0;0').fuzzy_counts, (1, 0, 0)) + self.assertEqual(regex.fullmatch(r'(?e)(?:0??,0(?:,0)?){s<=1,d<=1}', + ',0;0').fuzzy_counts, (1, 0, 0)) + + # Git issue 371: Specifying character set when fuzzy-matching allows characters not in the set + self.assertEqual(regex.search(r"\b(?e)(?:\d{6,20}){i<=5:[\-\\\/]}\b", + "cat dog starting at 00:01132.000. hello world"), None) + + # Git issue 385: Comments in expressions + self.assertEqual(bool(regex.compile('(?#)')), True) + self.assertEqual(bool(regex.compile('(?x)(?#)')), True) + + # Git issue 394: Unexpected behaviour in fuzzy matching with limited character set with IGNORECASE flag + self.assertEqual(regex.findall(r'(\d+){i<=2:[ab]}', '123X4Y5'), + ['123', '4', '5']) + self.assertEqual(regex.findall(r'(?i)(\d+){i<=2:[ab]}', '123X4Y5'), + ['123', '4', '5']) + + # Git issue 403: Fuzzy matching with wrong distance (unnecessary substitutions) + self.assertEqual(regex.match(r'^(test){e<=5}$', 'terstin', + flags=regex.B).fuzzy_counts, (0, 3, 0)) + + # Git issue 408: regex fails with a quantified backreference but succeeds with repeated backref + self.assertEqual(bool(regex.match(r"(?:(x*)\1\1\1)*x$", "x" * 5)), True) + self.assertEqual(bool(regex.match(r"(?:(x*)\1{3})*x$", "x" * 5)), True) + + # Git issue 415: Fuzzy character restrictions don't apply to insertions at "right edge" + self.assertEqual(regex.match(r't(?:es){s<=1:\d}t', 'te5t').group(), + 'te5t') + self.assertEqual(regex.match(r't(?:es){s<=1:\d}t', 'tezt'), None) + self.assertEqual(regex.match(r't(?:es){i<=1:\d}t', 'tes5t').group(), + 'tes5t') + self.assertEqual(regex.match(r't(?:es){i<=1:\d}t', 'teszt'), None) + self.assertEqual(regex.match(r't(?:es){i<=1:\d}t', + 'tes5t').fuzzy_changes, ([], [3], [])) + self.assertEqual(regex.match(r't(es){i<=1,0.*)(?PCTTCC){e<=1}(?P([ACGT]){4,6})(?PCAATACCGACTCCTCACTGTGT){e<=2}(?P([ACGT]){0,6}$)' + + m = regex.match(pattern, sequence, flags=regex.BESTMATCH) + self.assertEqual(m.span(), (0, 50)) + self.assertEqual(m.groupdict(), {'insert': 'TTCAGACGTGTGCT', 'anchor': 'CTTCC', 'umi': 'GATCT', 'sid': 'CAATACCGACTCCTCACTGTGT', 'end': 'GTCT'}) + + m = regex.match(pattern, sequence, flags=regex.ENHANCEMATCH) + self.assertEqual(m.span(), (0, 50)) + self.assertEqual(m.groupdict(), {'insert': 'TTCAGACGTGTGCT', 'anchor': 'CTTCC', 'umi': 'GATCT', 'sid': 'CAATACCGACTCCTCACTGTGT', 'end': 'GTCT'}) + + # Git issue 433: Disagreement between fuzzy_counts and fuzzy_changes + pattern = r'(?P.*)(?PAACACTGG){e<=1}(?P([AT][CG]){5}){e<=2}(?PGTAACCGAAG){e<=2}(?P([ACGT]){0,6}$)' + + sequence = 'GGAAAACACTGGTCTCAGTCTCGTAACCGAAGTGGTCG' + m = regex.match(pattern, sequence, flags=regex.BESTMATCH) + self.assertEqual(m.fuzzy_counts, (0, 0, 0)) + self.assertEqual(m.fuzzy_changes, ([], [], [])) + + sequence = 'GGAAAACACTGGTCTCAGTCTCGTCCCCGAAGTGGTCG' + m = regex.match(pattern, sequence, flags=regex.BESTMATCH) + self.assertEqual(m.fuzzy_counts, (2, 0, 0)) + self.assertEqual(m.fuzzy_changes, ([24, 25], [], [])) + + # Git issue 439: Unmatched groups: sub vs subf + self.assertEqual(regex.sub(r'(test1)|(test2)', r'matched: \1\2', 'test1'), 'matched: test1') + self.assertEqual(regex.subf(r'(test1)|(test2)', r'matched: {1}{2}', 'test1'), 'matched: test1') + self.assertEqual(regex.search(r'(test1)|(test2)', 'matched: test1').expand(r'matched: \1\2'), 'matched: test1'), + self.assertEqual(regex.search(r'(test1)|(test2)', 'matched: test1').expandf(r'matched: {1}{2}'), 'matched: test1') + + # Git issue 442: Fuzzy regex matching doesn't seem to test insertions correctly + self.assertEqual(regex.search(r"(?:\bha\b){i:[ ]}", "having"), None) + self.assertEqual(regex.search(r"(?:\bha\b){i:[ ]}", "having", flags=regex.I), None) + + # Git issue 467: Scoped inline flags 'a', 'u' and 'L' affect global flags + self.assertEqual(regex.match(r'(?a:\w)\w', 'd\N{CYRILLIC SMALL LETTER ZHE}').span(), (0, 2)) + self.assertEqual(regex.match(r'(?a:\w)(?u:\w)', 'd\N{CYRILLIC SMALL LETTER ZHE}').span(), (0, 2)) + + # Git issue 473: Emoji classified as letter + self.assertEqual(regex.match(r'^\p{LC}+$', '\N{SMILING CAT FACE WITH OPEN MOUTH}'), None) + self.assertEqual(regex.match(r'^\p{So}+$', '\N{SMILING CAT FACE WITH OPEN MOUTH}').span(), (0, 1)) + + # Git issue 474: regex has no equivalent to `re.Match.groups()` for captures + self.assertEqual(regex.match(r'(.)+', 'abc').allcaptures(), (['abc'], ['a', 'b', 'c'])) + self.assertEqual(regex.match(r'(.)+', 'abc').allspans(), ([(0, 3)], [(0, 1), (1, 2), (2, 3)])) + + # Git issue 477: \v for vertical spacing + self.assertEqual(bool(regex.fullmatch(r'\p{HorizSpace}+', '\t \xA0\u1680\u180E\u2000\u2001\u2002\u2003\u2004\u2005\u2006\u2007\u2008\u2009\u200A\u202F\u205F\u3000')), True) + self.assertEqual(bool(regex.fullmatch(r'\p{VertSpace}+', '\n\v\f\r\x85\u2028\u2029')), True) + + # Git issue 479: Segmentation fault when using conditional pattern + self.assertEqual(regex.match(r'(?(?<=A)|(?(?![^B])C|D))', 'A'), None) + self.assertEqual(regex.search(r'(?(?<=A)|(?(?![^B])C|D))', 'A').span(), (1, 1)) + + # Git issue 494: Backtracking failure matching regex ^a?(a?)b?c\1$ against string abca + self.assertEqual(regex.search(r"^a?(a?)b?c\1$", "abca").span(), (0, 4)) + + # Git issue 498: Conditional negative lookahead inside positive lookahead fails to match + self.assertEqual(regex.match(r'(?(?=a).|..)', 'ab').span(), (0, 1)) + self.assertEqual(regex.match(r'(?(?=b).|..)', 'ab').span(), (0, 2)) + self.assertEqual(regex.match(r'(?(?!a).|..)', 'ab').span(), (0, 2)) + self.assertEqual(regex.match(r'(?(?!b).|..)', 'ab').span(), (0, 1)) + + # Git issue 525: segfault when fuzzy matching empty list + self.assertEqual(regex.match(r"(\L){e<=5}", "blah", foo=[]).span(), (0, 0)) + + # Git issue 527: `VERBOSE`/`X` flag breaks `\N` escapes + self.assertEqual(regex.compile(r'\N{LATIN SMALL LETTER A}').match('a').span(), (0, 1)) + self.assertEqual(regex.compile(r'\N{LATIN SMALL LETTER A}', flags=regex.X).match('a').span(), (0, 1)) + + # Git issue 539: Bug: Partial matching fails on a simple example + self.assertEqual(regex.match(r"[^/]*b/ccc", "b/ccc", partial=True).span(), (0, 5)) + self.assertEqual(regex.match(r"[^/]*b/ccc", "b/ccb", partial=True), None) + self.assertEqual(regex.match(r"[^/]*b/ccc", "b/cc", partial=True).span(), (0, 4)) + self.assertEqual(regex.match(r"[^/]*b/xyz", "b/xy", partial=True).span(), (0, 4)) + self.assertEqual(regex.match(r"[^/]*b/xyz", "b/yz", partial=True), None) + + self.assertEqual(regex.match(r"(?i)[^/]*b/ccc", "b/ccc", partial=True).span(), (0, 5)) + self.assertEqual(regex.match(r"(?i)[^/]*b/ccc", "b/ccb", partial=True), None) + self.assertEqual(regex.match(r"(?i)[^/]*b/ccc", "b/cc", partial=True).span(), (0, 4)) + self.assertEqual(regex.match(r"(?i)[^/]*b/xyz", "b/xy", partial=True).span(), (0, 4)) + self.assertEqual(regex.match(r"(?i)[^/]*b/xyz", "b/yz", partial=True), None) + + # Git issue 546: Partial match not working in some instances with non-greedy capture + self.assertEqual(bool(regex.match(r'.*?', '<', partial=True)), True) + self.assertEqual(bool(regex.match(r'.*?', '.*?', '', partial=True)), True) + self.assertEqual(bool(regex.match(r'.*?', 'x', partial=True)), True) + self.assertEqual(bool(regex.match(r'.*?', 'xyz abc', partial=True)), True) + self.assertEqual(bool(regex.match(r'.*?', 'xyz abc foo', partial=True)), True) + self.assertEqual(bool(regex.match(r'.*?', 'xyz abc foo ', partial=True)), True) + self.assertEqual(bool(regex.match(r'.*?', 'xyz abc foo bar', partial=True)), True) + + def test_fuzzy_ext(self): + self.assertEqual(bool(regex.fullmatch(r'(?r)(?:a){e<=1:[a-z]}', 'e')), + True) + self.assertEqual(bool(regex.fullmatch(r'(?:a){e<=1:[a-z]}', 'e')), + True) + self.assertEqual(bool(regex.fullmatch(r'(?:a){e<=1:[a-z]}', '-')), + False) + self.assertEqual(bool(regex.fullmatch(r'(?r)(?:a){e<=1:[a-z]}', '-')), + False) + + self.assertEqual(bool(regex.fullmatch(r'(?:a){e<=1:[a-z]}', 'ae')), + True) + self.assertEqual(bool(regex.fullmatch(r'(?r)(?:a){e<=1:[a-z]}', + 'ae')), True) + self.assertEqual(bool(regex.fullmatch(r'(?:a){e<=1:[a-z]}', 'a-')), + False) + self.assertEqual(bool(regex.fullmatch(r'(?r)(?:a){e<=1:[a-z]}', + 'a-')), False) + + self.assertEqual(bool(regex.fullmatch(r'(?:ab){e<=1:[a-z]}', 'ae')), + True) + self.assertEqual(bool(regex.fullmatch(r'(?r)(?:ab){e<=1:[a-z]}', + 'ae')), True) + self.assertEqual(bool(regex.fullmatch(r'(?:ab){e<=1:[a-z]}', 'a-')), + False) + self.assertEqual(bool(regex.fullmatch(r'(?r)(?:ab){e<=1:[a-z]}', + 'a-')), False) + + self.assertEqual(bool(regex.fullmatch(r'(a)\1{e<=1:[a-z]}', 'ae')), + True) + self.assertEqual(bool(regex.fullmatch(r'(?r)\1{e<=1:[a-z]}(a)', + 'ea')), True) + self.assertEqual(bool(regex.fullmatch(r'(a)\1{e<=1:[a-z]}', 'a-')), + False) + self.assertEqual(bool(regex.fullmatch(r'(?r)\1{e<=1:[a-z]}(a)', + '-a')), False) + + self.assertEqual(bool(regex.fullmatch(r'(?fiu)(?:\N{LATIN SMALL LETTER SHARP S}){e<=1:[a-z]}', + 'ts')), True) + self.assertEqual(bool(regex.fullmatch(r'(?fiu)(?:\N{LATIN SMALL LETTER SHARP S}){e<=1:[a-z]}', + 'st')), True) + self.assertEqual(bool(regex.fullmatch(r'(?firu)(?:\N{LATIN SMALL LETTER SHARP S}){e<=1:[a-z]}', + 'st')), True) + self.assertEqual(bool(regex.fullmatch(r'(?firu)(?:\N{LATIN SMALL LETTER SHARP S}){e<=1:[a-z]}', + 'ts')), True) + self.assertEqual(bool(regex.fullmatch(r'(?fiu)(?:\N{LATIN SMALL LETTER SHARP S}){e<=1:[a-z]}', + '-s')), False) + self.assertEqual(bool(regex.fullmatch(r'(?fiu)(?:\N{LATIN SMALL LETTER SHARP S}){e<=1:[a-z]}', + 's-')), False) + self.assertEqual(bool(regex.fullmatch(r'(?firu)(?:\N{LATIN SMALL LETTER SHARP S}){e<=1:[a-z]}', + 's-')), False) + self.assertEqual(bool(regex.fullmatch(r'(?firu)(?:\N{LATIN SMALL LETTER SHARP S}){e<=1:[a-z]}', + '-s')), False) + + self.assertEqual(bool(regex.fullmatch(r'(?fiu)(\N{LATIN SMALL LETTER SHARP S})\1{e<=1:[a-z]}', + 'ssst')), True) + self.assertEqual(bool(regex.fullmatch(r'(?fiu)(\N{LATIN SMALL LETTER SHARP S})\1{e<=1:[a-z]}', + 'ssts')), True) + self.assertEqual(bool(regex.fullmatch(r'(?firu)\1{e<=1:[a-z]}(\N{LATIN SMALL LETTER SHARP S})', + 'stss')), True) + self.assertEqual(bool(regex.fullmatch(r'(?firu)\1{e<=1:[a-z]}(\N{LATIN SMALL LETTER SHARP S})', + 'tsss')), True) + self.assertEqual(bool(regex.fullmatch(r'(?fiu)(\N{LATIN SMALL LETTER SHARP S})\1{e<=1:[a-z]}', + 'ss-s')), False) + self.assertEqual(bool(regex.fullmatch(r'(?fiu)(\N{LATIN SMALL LETTER SHARP S})\1{e<=1:[a-z]}', + 'sss-')), False) + self.assertEqual(bool(regex.fullmatch(r'(?firu)(\N{LATIN SMALL LETTER SHARP S})\1{e<=1:[a-z]}', + '-s')), False) + self.assertEqual(bool(regex.fullmatch(r'(?firu)(\N{LATIN SMALL LETTER SHARP S})\1{e<=1:[a-z]}', + 's-')), False) + + self.assertEqual(bool(regex.fullmatch(r'(?fiu)(ss)\1{e<=1:[a-z]}', + '\N{LATIN SMALL LETTER SHARP S}ts')), True) + self.assertEqual(bool(regex.fullmatch(r'(?fiu)(ss)\1{e<=1:[a-z]}', + '\N{LATIN SMALL LETTER SHARP S}st')), True) + self.assertEqual(bool(regex.fullmatch(r'(?firu)\1{e<=1:[a-z]}(ss)', + 'st\N{LATIN SMALL LETTER SHARP S}')), True) + self.assertEqual(bool(regex.fullmatch(r'(?firu)\1{e<=1:[a-z]}(ss)', + 'ts\N{LATIN SMALL LETTER SHARP S}')), True) + self.assertEqual(bool(regex.fullmatch(r'(?fiu)(ss)\1{e<=1:[a-z]}', + '\N{LATIN SMALL LETTER SHARP S}-s')), False) + self.assertEqual(bool(regex.fullmatch(r'(?fiu)(ss)\1{e<=1:[a-z]}', + '\N{LATIN SMALL LETTER SHARP S}s-')), False) + self.assertEqual(bool(regex.fullmatch(r'(?firu)(ss)\1{e<=1:[a-z]}', + 's-\N{LATIN SMALL LETTER SHARP S}')), False) + self.assertEqual(bool(regex.fullmatch(r'(?firu)(ss)\1{e<=1:[a-z]}', + '-s\N{LATIN SMALL LETTER SHARP S}')), False) + + def test_subscripted_captures(self): + self.assertEqual(regex.match(r'(?P.)+', + 'abc').expandf('{0} {0[0]} {0[-1]}'), 'abc abc abc') + self.assertEqual(regex.match(r'(?P.)+', + 'abc').expandf('{1} {1[0]} {1[1]} {1[2]} {1[-1]} {1[-2]} {1[-3]}'), + 'c a b c c b a') + self.assertEqual(regex.match(r'(?P.)+', + 'abc').expandf('{x} {x[0]} {x[1]} {x[2]} {x[-1]} {x[-2]} {x[-3]}'), + 'c a b c c b a') + + self.assertEqual(regex.subf(r'(?P.)+', r'{0} {0[0]} {0[-1]}', + 'abc'), 'abc abc abc') + self.assertEqual(regex.subf(r'(?P.)+', + '{1} {1[0]} {1[1]} {1[2]} {1[-1]} {1[-2]} {1[-3]}', 'abc'), + 'c a b c c b a') + self.assertEqual(regex.subf(r'(?P.)+', + '{x} {x[0]} {x[1]} {x[2]} {x[-1]} {x[-2]} {x[-3]}', 'abc'), + 'c a b c c b a') + + def test_more_zerowidth(self): + if sys.version_info >= (3, 7, 0): + self.assertEqual(regex.split(r'\b|:+', 'a::bc'), ['', 'a', '', '', + 'bc', '']) + self.assertEqual(regex.sub(r'\b|:+', '-', 'a::bc'), '-a---bc-') + self.assertEqual(regex.findall(r'\b|:+', 'a::bc'), ['', '', '::', + '', '']) + self.assertEqual([m.span() for m in regex.finditer(r'\b|:+', + 'a::bc')], [(0, 0), (1, 1), (1, 3), (3, 3), (5, 5)]) + self.assertEqual([m.span() for m in regex.finditer(r'(?m)^\s*?$', + 'foo\n\n\nbar')], [(4, 4), (4, 5), (5, 5)]) + + def test_line_ending(self): + self.assertEqual(regex.findall(r'\R', '\r\n\n\x0B\f\r\x85\u2028\u2029'), + ['\r\n', '\n', '\x0B', '\f', '\r', '\x85', '\u2028', '\u2029']) + self.assertEqual(regex.findall(br'\R', b'\r\n\n\x0B\f\r\x85'), [b'\r\n', + b'\n', b'\x0B', b'\f', b'\r']) + +def test_main(): + unittest.main(verbosity=2) + +if __name__ == "__main__": + test_main() diff --git a/vlmpy310/lib/python3.10/site-packages/transformers-4.49.0.dev0.dist-info/INSTALLER b/vlmpy310/lib/python3.10/site-packages/transformers-4.49.0.dev0.dist-info/INSTALLER new file mode 100644 index 0000000000000000000000000000000000000000..a1b589e38a32041e49332e5e81c2d363dc418d68 --- /dev/null +++ b/vlmpy310/lib/python3.10/site-packages/transformers-4.49.0.dev0.dist-info/INSTALLER @@ -0,0 +1 @@ +pip diff --git a/vlmpy310/lib/python3.10/site-packages/transformers-4.49.0.dev0.dist-info/METADATA b/vlmpy310/lib/python3.10/site-packages/transformers-4.49.0.dev0.dist-info/METADATA new file mode 100644 index 0000000000000000000000000000000000000000..3ca21785ffafa8b748b9fbd0bf1a220bbf8ae9e6 --- /dev/null +++ b/vlmpy310/lib/python3.10/site-packages/transformers-4.49.0.dev0.dist-info/METADATA @@ -0,0 +1,813 @@ +Metadata-Version: 2.4 +Name: transformers +Version: 4.49.0.dev0 +Summary: State-of-the-art Machine Learning for JAX, PyTorch and TensorFlow +Home-page: https://github.com/huggingface/transformers +Author: The Hugging Face team (past and future) with the help of all our contributors (https://github.com/huggingface/transformers/graphs/contributors) +Author-email: transformers@huggingface.co +License: Apache 2.0 License +Keywords: NLP vision speech deep learning transformer pytorch tensorflow jax BERT GPT-2 Wav2Vec2 ViT +Classifier: Development Status :: 5 - Production/Stable +Classifier: Intended Audience :: Developers +Classifier: Intended Audience :: Education +Classifier: Intended Audience :: Science/Research +Classifier: License :: OSI Approved :: Apache Software License +Classifier: Operating System :: OS Independent +Classifier: Programming Language :: Python :: 3 +Classifier: Programming Language :: Python :: 3.9 +Classifier: Programming Language :: Python :: 3.10 +Classifier: Topic :: Scientific/Engineering :: Artificial Intelligence +Requires-Python: >=3.9.0 +Description-Content-Type: text/markdown +License-File: LICENSE +Requires-Dist: filelock +Requires-Dist: huggingface-hub<1.0,>=0.24.0 +Requires-Dist: numpy>=1.17 +Requires-Dist: packaging>=20.0 +Requires-Dist: pyyaml>=5.1 +Requires-Dist: regex!=2019.12.17 +Requires-Dist: requests +Requires-Dist: tokenizers<0.22,>=0.21 +Requires-Dist: safetensors>=0.4.1 +Requires-Dist: tqdm>=4.27 +Provides-Extra: ja +Requires-Dist: fugashi>=1.0; extra == "ja" +Requires-Dist: ipadic<2.0,>=1.0.0; extra == "ja" +Requires-Dist: unidic_lite>=1.0.7; extra == "ja" +Requires-Dist: unidic>=1.0.2; extra == "ja" +Requires-Dist: sudachipy>=0.6.6; extra == "ja" +Requires-Dist: sudachidict_core>=20220729; extra == "ja" +Requires-Dist: rhoknp<1.3.1,>=1.1.0; extra == "ja" +Provides-Extra: sklearn +Requires-Dist: scikit-learn; extra == "sklearn" +Provides-Extra: tf +Requires-Dist: tensorflow<2.16,>2.9; extra == "tf" +Requires-Dist: onnxconverter-common; extra == "tf" +Requires-Dist: tf2onnx; extra == "tf" +Requires-Dist: tensorflow-text<2.16; extra == "tf" +Requires-Dist: keras-nlp<0.14.0,>=0.3.1; extra == "tf" +Provides-Extra: tf-cpu +Requires-Dist: keras<2.16,>2.9; extra == "tf-cpu" +Requires-Dist: tensorflow-cpu<2.16,>2.9; extra == "tf-cpu" +Requires-Dist: onnxconverter-common; extra == "tf-cpu" +Requires-Dist: tf2onnx; extra == "tf-cpu" +Requires-Dist: tensorflow-text<2.16; extra == "tf-cpu" +Requires-Dist: keras-nlp<0.14.0,>=0.3.1; extra == "tf-cpu" +Requires-Dist: tensorflow-probability<0.24; extra == "tf-cpu" +Provides-Extra: torch +Requires-Dist: torch>=2.0; extra == "torch" +Requires-Dist: accelerate>=0.26.0; extra == "torch" +Provides-Extra: accelerate +Requires-Dist: accelerate>=0.26.0; extra == "accelerate" +Provides-Extra: retrieval +Requires-Dist: faiss-cpu; extra == "retrieval" +Requires-Dist: datasets!=2.5.0; extra == "retrieval" +Provides-Extra: flax +Requires-Dist: jax<=0.4.13,>=0.4.1; extra == "flax" +Requires-Dist: jaxlib<=0.4.13,>=0.4.1; extra == "flax" +Requires-Dist: flax<=0.7.0,>=0.4.1; extra == "flax" +Requires-Dist: optax<=0.1.4,>=0.0.8; extra == "flax" +Requires-Dist: scipy<1.13.0; extra == "flax" +Provides-Extra: tokenizers +Requires-Dist: tokenizers<0.22,>=0.21; extra == "tokenizers" +Provides-Extra: ftfy +Requires-Dist: ftfy; extra == "ftfy" +Provides-Extra: onnxruntime +Requires-Dist: onnxruntime>=1.4.0; extra == "onnxruntime" +Requires-Dist: onnxruntime-tools>=1.4.2; extra == "onnxruntime" +Provides-Extra: onnx +Requires-Dist: onnxconverter-common; extra == "onnx" +Requires-Dist: tf2onnx; extra == "onnx" +Requires-Dist: onnxruntime>=1.4.0; extra == "onnx" +Requires-Dist: onnxruntime-tools>=1.4.2; extra == "onnx" +Provides-Extra: modelcreation +Requires-Dist: cookiecutter==1.7.3; extra == "modelcreation" +Provides-Extra: sagemaker +Requires-Dist: sagemaker>=2.31.0; extra == "sagemaker" +Provides-Extra: deepspeed +Requires-Dist: deepspeed>=0.9.3; extra == "deepspeed" +Requires-Dist: accelerate>=0.26.0; extra == "deepspeed" +Provides-Extra: optuna +Requires-Dist: optuna; extra == "optuna" +Provides-Extra: ray +Requires-Dist: ray[tune]>=2.7.0; extra == "ray" +Provides-Extra: sigopt +Requires-Dist: sigopt; extra == "sigopt" +Provides-Extra: integrations +Requires-Dist: optuna; extra == "integrations" +Requires-Dist: ray[tune]>=2.7.0; extra == "integrations" +Requires-Dist: sigopt; extra == "integrations" +Provides-Extra: serving +Requires-Dist: pydantic; extra == "serving" +Requires-Dist: uvicorn; extra == "serving" +Requires-Dist: fastapi; extra == "serving" +Requires-Dist: starlette; extra == "serving" +Provides-Extra: audio +Requires-Dist: librosa; extra == "audio" +Requires-Dist: pyctcdecode>=0.4.0; extra == "audio" +Requires-Dist: phonemizer; extra == "audio" +Requires-Dist: kenlm; extra == "audio" +Provides-Extra: speech +Requires-Dist: torchaudio; extra == "speech" +Requires-Dist: librosa; extra == "speech" +Requires-Dist: pyctcdecode>=0.4.0; extra == "speech" +Requires-Dist: phonemizer; extra == "speech" +Requires-Dist: kenlm; extra == "speech" +Provides-Extra: torch-speech +Requires-Dist: torchaudio; extra == "torch-speech" +Requires-Dist: librosa; extra == "torch-speech" +Requires-Dist: pyctcdecode>=0.4.0; extra == "torch-speech" +Requires-Dist: phonemizer; extra == "torch-speech" +Requires-Dist: kenlm; extra == "torch-speech" +Provides-Extra: tf-speech +Requires-Dist: librosa; extra == "tf-speech" +Requires-Dist: pyctcdecode>=0.4.0; extra == "tf-speech" +Requires-Dist: phonemizer; extra == "tf-speech" +Requires-Dist: kenlm; extra == "tf-speech" +Provides-Extra: flax-speech +Requires-Dist: librosa; extra == "flax-speech" +Requires-Dist: pyctcdecode>=0.4.0; extra == "flax-speech" +Requires-Dist: phonemizer; extra == "flax-speech" +Requires-Dist: kenlm; extra == "flax-speech" +Provides-Extra: vision +Requires-Dist: Pillow<=15.0,>=10.0.1; extra == "vision" +Provides-Extra: timm +Requires-Dist: timm<=1.0.11; extra == "timm" +Provides-Extra: torch-vision +Requires-Dist: torchvision; extra == "torch-vision" +Requires-Dist: Pillow<=15.0,>=10.0.1; extra == "torch-vision" +Provides-Extra: natten +Requires-Dist: natten<0.15.0,>=0.14.6; extra == "natten" +Provides-Extra: codecarbon +Requires-Dist: codecarbon>=2.8.1; extra == "codecarbon" +Provides-Extra: video +Requires-Dist: av==9.2.0; extra == "video" +Provides-Extra: sentencepiece +Requires-Dist: sentencepiece!=0.1.92,>=0.1.91; extra == "sentencepiece" +Requires-Dist: protobuf; extra == "sentencepiece" +Provides-Extra: tiktoken +Requires-Dist: tiktoken; extra == "tiktoken" +Requires-Dist: blobfile; extra == "tiktoken" +Provides-Extra: testing +Requires-Dist: pytest<8.0.0,>=7.2.0; extra == "testing" +Requires-Dist: pytest-asyncio; extra == "testing" +Requires-Dist: pytest-rich; extra == "testing" +Requires-Dist: pytest-xdist; extra == "testing" +Requires-Dist: timeout-decorator; extra == "testing" +Requires-Dist: parameterized; extra == "testing" +Requires-Dist: psutil; extra == "testing" +Requires-Dist: datasets!=2.5.0; extra == "testing" +Requires-Dist: dill<0.3.5; extra == "testing" +Requires-Dist: evaluate>=0.2.0; extra == "testing" +Requires-Dist: pytest-timeout; extra == "testing" +Requires-Dist: ruff==0.5.1; extra == "testing" +Requires-Dist: sacrebleu<2.0.0,>=1.4.12; extra == "testing" +Requires-Dist: rouge-score!=0.0.7,!=0.0.8,!=0.1,!=0.1.1; extra == "testing" +Requires-Dist: nltk<=3.8.1; extra == "testing" +Requires-Dist: GitPython<3.1.19; extra == "testing" +Requires-Dist: sacremoses; extra == "testing" +Requires-Dist: rjieba; extra == "testing" +Requires-Dist: beautifulsoup4; extra == "testing" +Requires-Dist: tensorboard; extra == "testing" +Requires-Dist: pydantic; extra == "testing" +Requires-Dist: sentencepiece!=0.1.92,>=0.1.91; extra == "testing" +Requires-Dist: faiss-cpu; extra == "testing" +Requires-Dist: datasets!=2.5.0; extra == "testing" +Requires-Dist: cookiecutter==1.7.3; extra == "testing" +Provides-Extra: deepspeed-testing +Requires-Dist: deepspeed>=0.9.3; extra == "deepspeed-testing" +Requires-Dist: accelerate>=0.26.0; extra == "deepspeed-testing" +Requires-Dist: pytest<8.0.0,>=7.2.0; extra == "deepspeed-testing" +Requires-Dist: pytest-asyncio; extra == "deepspeed-testing" +Requires-Dist: pytest-rich; extra == "deepspeed-testing" +Requires-Dist: pytest-xdist; extra == "deepspeed-testing" +Requires-Dist: timeout-decorator; extra == "deepspeed-testing" +Requires-Dist: parameterized; extra == "deepspeed-testing" +Requires-Dist: psutil; extra == "deepspeed-testing" +Requires-Dist: datasets!=2.5.0; extra == "deepspeed-testing" +Requires-Dist: dill<0.3.5; extra == "deepspeed-testing" +Requires-Dist: evaluate>=0.2.0; extra == "deepspeed-testing" +Requires-Dist: pytest-timeout; extra == "deepspeed-testing" +Requires-Dist: ruff==0.5.1; extra == "deepspeed-testing" +Requires-Dist: sacrebleu<2.0.0,>=1.4.12; extra == "deepspeed-testing" +Requires-Dist: rouge-score!=0.0.7,!=0.0.8,!=0.1,!=0.1.1; extra == "deepspeed-testing" +Requires-Dist: nltk<=3.8.1; extra == "deepspeed-testing" +Requires-Dist: GitPython<3.1.19; extra == "deepspeed-testing" +Requires-Dist: sacremoses; extra == "deepspeed-testing" +Requires-Dist: rjieba; extra == "deepspeed-testing" +Requires-Dist: beautifulsoup4; extra == "deepspeed-testing" +Requires-Dist: tensorboard; extra == "deepspeed-testing" +Requires-Dist: pydantic; extra == "deepspeed-testing" +Requires-Dist: sentencepiece!=0.1.92,>=0.1.91; extra == "deepspeed-testing" +Requires-Dist: faiss-cpu; extra == "deepspeed-testing" +Requires-Dist: datasets!=2.5.0; extra == "deepspeed-testing" +Requires-Dist: cookiecutter==1.7.3; extra == "deepspeed-testing" +Requires-Dist: optuna; extra == "deepspeed-testing" +Requires-Dist: sentencepiece!=0.1.92,>=0.1.91; extra == "deepspeed-testing" +Requires-Dist: protobuf; extra == "deepspeed-testing" +Provides-Extra: ruff +Requires-Dist: ruff==0.5.1; extra == "ruff" +Provides-Extra: quality +Requires-Dist: datasets!=2.5.0; extra == "quality" +Requires-Dist: isort>=5.5.4; extra == "quality" +Requires-Dist: ruff==0.5.1; extra == "quality" +Requires-Dist: GitPython<3.1.19; extra == "quality" +Requires-Dist: urllib3<2.0.0; extra == "quality" +Requires-Dist: libcst; extra == "quality" +Requires-Dist: rich; extra == "quality" +Provides-Extra: all +Requires-Dist: tensorflow<2.16,>2.9; extra == "all" +Requires-Dist: onnxconverter-common; extra == "all" +Requires-Dist: tf2onnx; extra == "all" +Requires-Dist: tensorflow-text<2.16; extra == "all" +Requires-Dist: keras-nlp<0.14.0,>=0.3.1; extra == "all" +Requires-Dist: torch>=2.0; extra == "all" +Requires-Dist: accelerate>=0.26.0; extra == "all" +Requires-Dist: jax<=0.4.13,>=0.4.1; extra == "all" +Requires-Dist: jaxlib<=0.4.13,>=0.4.1; extra == "all" +Requires-Dist: flax<=0.7.0,>=0.4.1; extra == "all" +Requires-Dist: optax<=0.1.4,>=0.0.8; extra == "all" +Requires-Dist: scipy<1.13.0; extra == "all" +Requires-Dist: sentencepiece!=0.1.92,>=0.1.91; extra == "all" +Requires-Dist: protobuf; extra == "all" +Requires-Dist: tokenizers<0.22,>=0.21; extra == "all" +Requires-Dist: torchaudio; extra == "all" +Requires-Dist: librosa; extra == "all" +Requires-Dist: pyctcdecode>=0.4.0; extra == "all" +Requires-Dist: phonemizer; extra == "all" +Requires-Dist: kenlm; extra == "all" +Requires-Dist: Pillow<=15.0,>=10.0.1; extra == "all" +Requires-Dist: optuna; extra == "all" +Requires-Dist: ray[tune]>=2.7.0; extra == "all" +Requires-Dist: sigopt; extra == "all" +Requires-Dist: timm<=1.0.11; extra == "all" +Requires-Dist: torchvision; extra == "all" +Requires-Dist: Pillow<=15.0,>=10.0.1; extra == "all" +Requires-Dist: codecarbon>=2.8.1; extra == "all" +Requires-Dist: accelerate>=0.26.0; extra == "all" +Requires-Dist: av==9.2.0; extra == "all" +Provides-Extra: dev-torch +Requires-Dist: pytest<8.0.0,>=7.2.0; extra == "dev-torch" +Requires-Dist: pytest-asyncio; extra == "dev-torch" +Requires-Dist: pytest-rich; extra == "dev-torch" +Requires-Dist: pytest-xdist; extra == "dev-torch" +Requires-Dist: timeout-decorator; extra == "dev-torch" +Requires-Dist: parameterized; extra == "dev-torch" +Requires-Dist: psutil; extra == "dev-torch" +Requires-Dist: datasets!=2.5.0; extra == "dev-torch" +Requires-Dist: dill<0.3.5; extra == "dev-torch" +Requires-Dist: evaluate>=0.2.0; extra == "dev-torch" +Requires-Dist: pytest-timeout; extra == "dev-torch" +Requires-Dist: ruff==0.5.1; extra == "dev-torch" +Requires-Dist: sacrebleu<2.0.0,>=1.4.12; extra == "dev-torch" +Requires-Dist: rouge-score!=0.0.7,!=0.0.8,!=0.1,!=0.1.1; extra == "dev-torch" +Requires-Dist: nltk<=3.8.1; extra == "dev-torch" +Requires-Dist: GitPython<3.1.19; extra == "dev-torch" +Requires-Dist: sacremoses; extra == "dev-torch" +Requires-Dist: rjieba; extra == "dev-torch" +Requires-Dist: beautifulsoup4; extra == "dev-torch" +Requires-Dist: tensorboard; extra == "dev-torch" +Requires-Dist: pydantic; extra == "dev-torch" +Requires-Dist: sentencepiece!=0.1.92,>=0.1.91; extra == "dev-torch" +Requires-Dist: faiss-cpu; extra == "dev-torch" +Requires-Dist: datasets!=2.5.0; extra == "dev-torch" +Requires-Dist: cookiecutter==1.7.3; extra == "dev-torch" +Requires-Dist: torch>=2.0; extra == "dev-torch" +Requires-Dist: accelerate>=0.26.0; extra == "dev-torch" +Requires-Dist: sentencepiece!=0.1.92,>=0.1.91; extra == "dev-torch" +Requires-Dist: protobuf; extra == "dev-torch" +Requires-Dist: tokenizers<0.22,>=0.21; extra == "dev-torch" +Requires-Dist: torchaudio; extra == "dev-torch" +Requires-Dist: librosa; extra == "dev-torch" +Requires-Dist: pyctcdecode>=0.4.0; extra == "dev-torch" +Requires-Dist: phonemizer; extra == "dev-torch" +Requires-Dist: kenlm; extra == "dev-torch" +Requires-Dist: Pillow<=15.0,>=10.0.1; extra == "dev-torch" +Requires-Dist: optuna; extra == "dev-torch" +Requires-Dist: ray[tune]>=2.7.0; extra == "dev-torch" +Requires-Dist: sigopt; extra == "dev-torch" +Requires-Dist: timm<=1.0.11; extra == "dev-torch" +Requires-Dist: torchvision; extra == "dev-torch" +Requires-Dist: Pillow<=15.0,>=10.0.1; extra == "dev-torch" +Requires-Dist: codecarbon>=2.8.1; extra == "dev-torch" +Requires-Dist: datasets!=2.5.0; extra == "dev-torch" +Requires-Dist: isort>=5.5.4; extra == "dev-torch" +Requires-Dist: ruff==0.5.1; extra == "dev-torch" +Requires-Dist: GitPython<3.1.19; extra == "dev-torch" +Requires-Dist: urllib3<2.0.0; extra == "dev-torch" +Requires-Dist: libcst; extra == "dev-torch" +Requires-Dist: rich; extra == "dev-torch" +Requires-Dist: fugashi>=1.0; extra == "dev-torch" +Requires-Dist: ipadic<2.0,>=1.0.0; extra == "dev-torch" +Requires-Dist: unidic_lite>=1.0.7; extra == "dev-torch" +Requires-Dist: unidic>=1.0.2; extra == "dev-torch" +Requires-Dist: sudachipy>=0.6.6; extra == "dev-torch" +Requires-Dist: sudachidict_core>=20220729; extra == "dev-torch" +Requires-Dist: rhoknp<1.3.1,>=1.1.0; extra == "dev-torch" +Requires-Dist: scikit-learn; extra == "dev-torch" +Requires-Dist: cookiecutter==1.7.3; extra == "dev-torch" +Requires-Dist: onnxruntime>=1.4.0; extra == "dev-torch" +Requires-Dist: onnxruntime-tools>=1.4.2; extra == "dev-torch" +Provides-Extra: dev-tensorflow +Requires-Dist: pytest<8.0.0,>=7.2.0; extra == "dev-tensorflow" +Requires-Dist: pytest-asyncio; extra == "dev-tensorflow" +Requires-Dist: pytest-rich; extra == "dev-tensorflow" +Requires-Dist: pytest-xdist; extra == "dev-tensorflow" +Requires-Dist: timeout-decorator; extra == "dev-tensorflow" +Requires-Dist: parameterized; extra == "dev-tensorflow" +Requires-Dist: psutil; extra == "dev-tensorflow" +Requires-Dist: datasets!=2.5.0; extra == "dev-tensorflow" +Requires-Dist: dill<0.3.5; extra == "dev-tensorflow" +Requires-Dist: evaluate>=0.2.0; extra == "dev-tensorflow" +Requires-Dist: pytest-timeout; extra == "dev-tensorflow" +Requires-Dist: ruff==0.5.1; extra == "dev-tensorflow" +Requires-Dist: sacrebleu<2.0.0,>=1.4.12; extra == "dev-tensorflow" +Requires-Dist: rouge-score!=0.0.7,!=0.0.8,!=0.1,!=0.1.1; extra == "dev-tensorflow" +Requires-Dist: nltk<=3.8.1; extra == "dev-tensorflow" +Requires-Dist: GitPython<3.1.19; extra == "dev-tensorflow" +Requires-Dist: sacremoses; extra == "dev-tensorflow" +Requires-Dist: rjieba; extra == "dev-tensorflow" +Requires-Dist: beautifulsoup4; extra == "dev-tensorflow" +Requires-Dist: tensorboard; extra == "dev-tensorflow" +Requires-Dist: pydantic; extra == "dev-tensorflow" +Requires-Dist: sentencepiece!=0.1.92,>=0.1.91; extra == "dev-tensorflow" +Requires-Dist: faiss-cpu; extra == "dev-tensorflow" +Requires-Dist: datasets!=2.5.0; extra == "dev-tensorflow" +Requires-Dist: cookiecutter==1.7.3; extra == "dev-tensorflow" +Requires-Dist: tensorflow<2.16,>2.9; extra == "dev-tensorflow" +Requires-Dist: onnxconverter-common; extra == "dev-tensorflow" +Requires-Dist: tf2onnx; extra == "dev-tensorflow" +Requires-Dist: tensorflow-text<2.16; extra == "dev-tensorflow" +Requires-Dist: keras-nlp<0.14.0,>=0.3.1; extra == "dev-tensorflow" +Requires-Dist: sentencepiece!=0.1.92,>=0.1.91; extra == "dev-tensorflow" +Requires-Dist: protobuf; extra == "dev-tensorflow" +Requires-Dist: tokenizers<0.22,>=0.21; extra == "dev-tensorflow" +Requires-Dist: Pillow<=15.0,>=10.0.1; extra == "dev-tensorflow" +Requires-Dist: datasets!=2.5.0; extra == "dev-tensorflow" +Requires-Dist: isort>=5.5.4; extra == "dev-tensorflow" +Requires-Dist: ruff==0.5.1; extra == "dev-tensorflow" +Requires-Dist: GitPython<3.1.19; extra == "dev-tensorflow" +Requires-Dist: urllib3<2.0.0; extra == "dev-tensorflow" +Requires-Dist: libcst; extra == "dev-tensorflow" +Requires-Dist: rich; extra == "dev-tensorflow" +Requires-Dist: scikit-learn; extra == "dev-tensorflow" +Requires-Dist: cookiecutter==1.7.3; extra == "dev-tensorflow" +Requires-Dist: onnxconverter-common; extra == "dev-tensorflow" +Requires-Dist: tf2onnx; extra == "dev-tensorflow" +Requires-Dist: onnxruntime>=1.4.0; extra == "dev-tensorflow" +Requires-Dist: onnxruntime-tools>=1.4.2; extra == "dev-tensorflow" +Requires-Dist: librosa; extra == "dev-tensorflow" +Requires-Dist: pyctcdecode>=0.4.0; extra == "dev-tensorflow" +Requires-Dist: phonemizer; extra == "dev-tensorflow" +Requires-Dist: kenlm; extra == "dev-tensorflow" +Provides-Extra: dev +Requires-Dist: tensorflow<2.16,>2.9; extra == "dev" +Requires-Dist: onnxconverter-common; extra == "dev" +Requires-Dist: tf2onnx; extra == "dev" +Requires-Dist: tensorflow-text<2.16; extra == "dev" +Requires-Dist: keras-nlp<0.14.0,>=0.3.1; extra == "dev" +Requires-Dist: torch>=2.0; extra == "dev" +Requires-Dist: accelerate>=0.26.0; extra == "dev" +Requires-Dist: jax<=0.4.13,>=0.4.1; extra == "dev" +Requires-Dist: jaxlib<=0.4.13,>=0.4.1; extra == "dev" +Requires-Dist: flax<=0.7.0,>=0.4.1; extra == "dev" +Requires-Dist: optax<=0.1.4,>=0.0.8; extra == "dev" +Requires-Dist: scipy<1.13.0; extra == "dev" +Requires-Dist: sentencepiece!=0.1.92,>=0.1.91; extra == "dev" +Requires-Dist: protobuf; extra == "dev" +Requires-Dist: tokenizers<0.22,>=0.21; extra == "dev" +Requires-Dist: torchaudio; extra == "dev" +Requires-Dist: librosa; extra == "dev" +Requires-Dist: pyctcdecode>=0.4.0; extra == "dev" +Requires-Dist: phonemizer; extra == "dev" +Requires-Dist: kenlm; extra == "dev" +Requires-Dist: Pillow<=15.0,>=10.0.1; extra == "dev" +Requires-Dist: optuna; extra == "dev" +Requires-Dist: ray[tune]>=2.7.0; extra == "dev" +Requires-Dist: sigopt; extra == "dev" +Requires-Dist: timm<=1.0.11; extra == "dev" +Requires-Dist: torchvision; extra == "dev" +Requires-Dist: Pillow<=15.0,>=10.0.1; extra == "dev" +Requires-Dist: codecarbon>=2.8.1; extra == "dev" +Requires-Dist: accelerate>=0.26.0; extra == "dev" +Requires-Dist: av==9.2.0; extra == "dev" +Requires-Dist: pytest<8.0.0,>=7.2.0; extra == "dev" +Requires-Dist: pytest-asyncio; extra == "dev" +Requires-Dist: pytest-rich; extra == "dev" +Requires-Dist: pytest-xdist; extra == "dev" +Requires-Dist: timeout-decorator; extra == "dev" +Requires-Dist: parameterized; extra == "dev" +Requires-Dist: psutil; extra == "dev" +Requires-Dist: datasets!=2.5.0; extra == "dev" +Requires-Dist: dill<0.3.5; extra == "dev" +Requires-Dist: evaluate>=0.2.0; extra == "dev" +Requires-Dist: pytest-timeout; extra == "dev" +Requires-Dist: ruff==0.5.1; extra == "dev" +Requires-Dist: sacrebleu<2.0.0,>=1.4.12; extra == "dev" +Requires-Dist: rouge-score!=0.0.7,!=0.0.8,!=0.1,!=0.1.1; extra == "dev" +Requires-Dist: nltk<=3.8.1; extra == "dev" +Requires-Dist: GitPython<3.1.19; extra == "dev" +Requires-Dist: sacremoses; extra == "dev" +Requires-Dist: rjieba; extra == "dev" +Requires-Dist: beautifulsoup4; extra == "dev" +Requires-Dist: tensorboard; extra == "dev" +Requires-Dist: pydantic; extra == "dev" +Requires-Dist: sentencepiece!=0.1.92,>=0.1.91; extra == "dev" +Requires-Dist: faiss-cpu; extra == "dev" +Requires-Dist: datasets!=2.5.0; extra == "dev" +Requires-Dist: cookiecutter==1.7.3; extra == "dev" +Requires-Dist: datasets!=2.5.0; extra == "dev" +Requires-Dist: isort>=5.5.4; extra == "dev" +Requires-Dist: ruff==0.5.1; extra == "dev" +Requires-Dist: GitPython<3.1.19; extra == "dev" +Requires-Dist: urllib3<2.0.0; extra == "dev" +Requires-Dist: libcst; extra == "dev" +Requires-Dist: rich; extra == "dev" +Requires-Dist: fugashi>=1.0; extra == "dev" +Requires-Dist: ipadic<2.0,>=1.0.0; extra == "dev" +Requires-Dist: unidic_lite>=1.0.7; extra == "dev" +Requires-Dist: unidic>=1.0.2; extra == "dev" +Requires-Dist: sudachipy>=0.6.6; extra == "dev" +Requires-Dist: sudachidict_core>=20220729; extra == "dev" +Requires-Dist: rhoknp<1.3.1,>=1.1.0; extra == "dev" +Requires-Dist: scikit-learn; extra == "dev" +Requires-Dist: cookiecutter==1.7.3; extra == "dev" +Provides-Extra: torchhub +Requires-Dist: filelock; extra == "torchhub" +Requires-Dist: huggingface-hub<1.0,>=0.24.0; extra == "torchhub" +Requires-Dist: importlib_metadata; extra == "torchhub" +Requires-Dist: numpy>=1.17; extra == "torchhub" +Requires-Dist: packaging>=20.0; extra == "torchhub" +Requires-Dist: protobuf; extra == "torchhub" +Requires-Dist: regex!=2019.12.17; extra == "torchhub" +Requires-Dist: requests; extra == "torchhub" +Requires-Dist: sentencepiece!=0.1.92,>=0.1.91; extra == "torchhub" +Requires-Dist: torch>=2.0; extra == "torchhub" +Requires-Dist: tokenizers<0.22,>=0.21; extra == "torchhub" +Requires-Dist: tqdm>=4.27; extra == "torchhub" +Provides-Extra: agents +Requires-Dist: diffusers; extra == "agents" +Requires-Dist: accelerate>=0.26.0; extra == "agents" +Requires-Dist: datasets!=2.5.0; extra == "agents" +Requires-Dist: torch>=2.0; extra == "agents" +Requires-Dist: sentencepiece!=0.1.92,>=0.1.91; extra == "agents" +Requires-Dist: opencv-python; extra == "agents" +Requires-Dist: Pillow<=15.0,>=10.0.1; extra == "agents" +Provides-Extra: benchmark +Requires-Dist: optimum-benchmark>=0.3.0; extra == "benchmark" +Dynamic: author +Dynamic: author-email +Dynamic: classifier +Dynamic: description +Dynamic: description-content-type +Dynamic: home-page +Dynamic: keywords +Dynamic: license +Dynamic: license-file +Dynamic: provides-extra +Dynamic: requires-dist +Dynamic: requires-python +Dynamic: summary + + + +

+ + + + Hugging Face Transformers Library + +
+
+

+ +

+ Build + GitHub + Documentation + GitHub release + Contributor Covenant + DOI +

+ +

+

+ English | + 简体中文 | + 繁體中文 | + 한국어 | + Español | + 日本語 | + हिन्दी | + Русский | + Рortuguês | + తెలుగు | + Français | + Deutsch | + Tiếng Việt | + العربية | + اردو | +

+

+ +

+

State-of-the-art Machine Learning for JAX, PyTorch and TensorFlow

+

+ +

+ +

+ +🤗 Transformers provides thousands of pretrained models to perform tasks on different modalities such as text, vision, and audio. + +These models can be applied on: + +* 📝 Text, for tasks like text classification, information extraction, question answering, summarization, translation, and text generation, in over 100 languages. +* 🖼️ Images, for tasks like image classification, object detection, and segmentation. +* 🗣️ Audio, for tasks like speech recognition and audio classification. + +Transformer models can also perform tasks on **several modalities combined**, such as table question answering, optical character recognition, information extraction from scanned documents, video classification, and visual question answering. + +🤗 Transformers provides APIs to quickly download and use those pretrained models on a given text, fine-tune them on your own datasets and then share them with the community on our [model hub](https://huggingface.co/models). At the same time, each python module defining an architecture is fully standalone and can be modified to enable quick research experiments. + +🤗 Transformers is backed by the three most popular deep learning libraries — [Jax](https://jax.readthedocs.io/en/latest/), [PyTorch](https://pytorch.org/) and [TensorFlow](https://www.tensorflow.org/) — with a seamless integration between them. It's straightforward to train your models with one before loading them for inference with the other. + +## Online demos + +You can test most of our models directly on their pages from the [model hub](https://huggingface.co/models). We also offer [private model hosting, versioning, & an inference API](https://huggingface.co/pricing) for public and private models. + +Here are a few examples: + +In Natural Language Processing: +- [Masked word completion with BERT](https://huggingface.co/google-bert/bert-base-uncased?text=Paris+is+the+%5BMASK%5D+of+France) +- [Named Entity Recognition with Electra](https://huggingface.co/dbmdz/electra-large-discriminator-finetuned-conll03-english?text=My+name+is+Sarah+and+I+live+in+London+city) +- [Text generation with Mistral](https://huggingface.co/mistralai/Mistral-7B-Instruct-v0.2) +- [Natural Language Inference with RoBERTa](https://huggingface.co/FacebookAI/roberta-large-mnli?text=The+dog+was+lost.+Nobody+lost+any+animal) +- [Summarization with BART](https://huggingface.co/facebook/bart-large-cnn?text=The+tower+is+324+metres+%281%2C063+ft%29+tall%2C+about+the+same+height+as+an+81-storey+building%2C+and+the+tallest+structure+in+Paris.+Its+base+is+square%2C+measuring+125+metres+%28410+ft%29+on+each+side.+During+its+construction%2C+the+Eiffel+Tower+surpassed+the+Washington+Monument+to+become+the+tallest+man-made+structure+in+the+world%2C+a+title+it+held+for+41+years+until+the+Chrysler+Building+in+New+York+City+was+finished+in+1930.+It+was+the+first+structure+to+reach+a+height+of+300+metres.+Due+to+the+addition+of+a+broadcasting+aerial+at+the+top+of+the+tower+in+1957%2C+it+is+now+taller+than+the+Chrysler+Building+by+5.2+metres+%2817+ft%29.+Excluding+transmitters%2C+the+Eiffel+Tower+is+the+second+tallest+free-standing+structure+in+France+after+the+Millau+Viaduct) +- [Question answering with DistilBERT](https://huggingface.co/distilbert/distilbert-base-uncased-distilled-squad?text=Which+name+is+also+used+to+describe+the+Amazon+rainforest+in+English%3F&context=The+Amazon+rainforest+%28Portuguese%3A+Floresta+Amaz%C3%B4nica+or+Amaz%C3%B4nia%3B+Spanish%3A+Selva+Amaz%C3%B3nica%2C+Amazon%C3%ADa+or+usually+Amazonia%3B+French%3A+For%C3%AAt+amazonienne%3B+Dutch%3A+Amazoneregenwoud%29%2C+also+known+in+English+as+Amazonia+or+the+Amazon+Jungle%2C+is+a+moist+broadleaf+forest+that+covers+most+of+the+Amazon+basin+of+South+America.+This+basin+encompasses+7%2C000%2C000+square+kilometres+%282%2C700%2C000+sq+mi%29%2C+of+which+5%2C500%2C000+square+kilometres+%282%2C100%2C000+sq+mi%29+are+covered+by+the+rainforest.+This+region+includes+territory+belonging+to+nine+nations.+The+majority+of+the+forest+is+contained+within+Brazil%2C+with+60%25+of+the+rainforest%2C+followed+by+Peru+with+13%25%2C+Colombia+with+10%25%2C+and+with+minor+amounts+in+Venezuela%2C+Ecuador%2C+Bolivia%2C+Guyana%2C+Suriname+and+French+Guiana.+States+or+departments+in+four+nations+contain+%22Amazonas%22+in+their+names.+The+Amazon+represents+over+half+of+the+planet%27s+remaining+rainforests%2C+and+comprises+the+largest+and+most+biodiverse+tract+of+tropical+rainforest+in+the+world%2C+with+an+estimated+390+billion+individual+trees+divided+into+16%2C000+species) +- [Translation with T5](https://huggingface.co/google-t5/t5-base?text=My+name+is+Wolfgang+and+I+live+in+Berlin) + +In Computer Vision: +- [Image classification with ViT](https://huggingface.co/google/vit-base-patch16-224) +- [Object Detection with DETR](https://huggingface.co/facebook/detr-resnet-50) +- [Semantic Segmentation with SegFormer](https://huggingface.co/nvidia/segformer-b0-finetuned-ade-512-512) +- [Panoptic Segmentation with Mask2Former](https://huggingface.co/facebook/mask2former-swin-large-coco-panoptic) +- [Depth Estimation with Depth Anything](https://huggingface.co/docs/transformers/main/model_doc/depth_anything) +- [Video Classification with VideoMAE](https://huggingface.co/docs/transformers/model_doc/videomae) +- [Universal Segmentation with OneFormer](https://huggingface.co/shi-labs/oneformer_ade20k_dinat_large) + +In Audio: +- [Automatic Speech Recognition with Whisper](https://huggingface.co/openai/whisper-large-v3) +- [Keyword Spotting with Wav2Vec2](https://huggingface.co/superb/wav2vec2-base-superb-ks) +- [Audio Classification with Audio Spectrogram Transformer](https://huggingface.co/MIT/ast-finetuned-audioset-10-10-0.4593) + +In Multimodal tasks: +- [Table Question Answering with TAPAS](https://huggingface.co/google/tapas-base-finetuned-wtq) +- [Visual Question Answering with ViLT](https://huggingface.co/dandelin/vilt-b32-finetuned-vqa) +- [Image captioning with LLaVa](https://huggingface.co/llava-hf/llava-1.5-7b-hf) +- [Zero-shot Image Classification with SigLIP](https://huggingface.co/google/siglip-so400m-patch14-384) +- [Document Question Answering with LayoutLM](https://huggingface.co/impira/layoutlm-document-qa) +- [Zero-shot Video Classification with X-CLIP](https://huggingface.co/docs/transformers/model_doc/xclip) +- [Zero-shot Object Detection with OWLv2](https://huggingface.co/docs/transformers/en/model_doc/owlv2) +- [Zero-shot Image Segmentation with CLIPSeg](https://huggingface.co/docs/transformers/model_doc/clipseg) +- [Automatic Mask Generation with SAM](https://huggingface.co/docs/transformers/model_doc/sam) + + +## 100 projects using Transformers + +Transformers is more than a toolkit to use pretrained models: it's a community of projects built around it and the +Hugging Face Hub. We want Transformers to enable developers, researchers, students, professors, engineers, and anyone +else to build their dream projects. + +In order to celebrate the 100,000 stars of transformers, we have decided to put the spotlight on the +community, and we have created the [awesome-transformers](./awesome-transformers.md) page which lists 100 +incredible projects built in the vicinity of transformers. + +If you own or use a project that you believe should be part of the list, please open a PR to add it! + +## Serious about AI in your organisation? Build faster with the Hugging Face Enterprise Hub. + + + Hugging Face Enterprise Hub +
+ +## Quick tour + +To immediately use a model on a given input (text, image, audio, ...), we provide the `pipeline` API. Pipelines group together a pretrained model with the preprocessing that was used during that model's training. Here is how to quickly use a pipeline to classify positive versus negative texts: + +```python +>>> from transformers import pipeline + +# Allocate a pipeline for sentiment-analysis +>>> classifier = pipeline('sentiment-analysis') +>>> classifier('We are very happy to introduce pipeline to the transformers repository.') +[{'label': 'POSITIVE', 'score': 0.9996980428695679}] +``` + +The second line of code downloads and caches the pretrained model used by the pipeline, while the third evaluates it on the given text. Here, the answer is "positive" with a confidence of 99.97%. + +Many tasks have a pre-trained `pipeline` ready to go, in NLP but also in computer vision and speech. For example, we can easily extract detected objects in an image: + +``` python +>>> import requests +>>> from PIL import Image +>>> from transformers import pipeline + +# Download an image with cute cats +>>> url = "https://huggingface.co/datasets/huggingface/documentation-images/resolve/main/coco_sample.png" +>>> image_data = requests.get(url, stream=True).raw +>>> image = Image.open(image_data) + +# Allocate a pipeline for object detection +>>> object_detector = pipeline('object-detection') +>>> object_detector(image) +[{'score': 0.9982201457023621, + 'label': 'remote', + 'box': {'xmin': 40, 'ymin': 70, 'xmax': 175, 'ymax': 117}}, + {'score': 0.9960021376609802, + 'label': 'remote', + 'box': {'xmin': 333, 'ymin': 72, 'xmax': 368, 'ymax': 187}}, + {'score': 0.9954745173454285, + 'label': 'couch', + 'box': {'xmin': 0, 'ymin': 1, 'xmax': 639, 'ymax': 473}}, + {'score': 0.9988006353378296, + 'label': 'cat', + 'box': {'xmin': 13, 'ymin': 52, 'xmax': 314, 'ymax': 470}}, + {'score': 0.9986783862113953, + 'label': 'cat', + 'box': {'xmin': 345, 'ymin': 23, 'xmax': 640, 'ymax': 368}}] +``` + +Here, we get a list of objects detected in the image, with a box surrounding the object and a confidence score. Here is the original image on the left, with the predictions displayed on the right: + +

+ + +

+ +You can learn more about the tasks supported by the `pipeline` API in [this tutorial](https://huggingface.co/docs/transformers/task_summary). + +In addition to `pipeline`, to download and use any of the pretrained models on your given task, all it takes is three lines of code. Here is the PyTorch version: +```python +>>> from transformers import AutoTokenizer, AutoModel + +>>> tokenizer = AutoTokenizer.from_pretrained("google-bert/bert-base-uncased") +>>> model = AutoModel.from_pretrained("google-bert/bert-base-uncased") + +>>> inputs = tokenizer("Hello world!", return_tensors="pt") +>>> outputs = model(**inputs) +``` + +And here is the equivalent code for TensorFlow: +```python +>>> from transformers import AutoTokenizer, TFAutoModel + +>>> tokenizer = AutoTokenizer.from_pretrained("google-bert/bert-base-uncased") +>>> model = TFAutoModel.from_pretrained("google-bert/bert-base-uncased") + +>>> inputs = tokenizer("Hello world!", return_tensors="tf") +>>> outputs = model(**inputs) +``` + +The tokenizer is responsible for all the preprocessing the pretrained model expects and can be called directly on a single string (as in the above examples) or a list. It will output a dictionary that you can use in downstream code or simply directly pass to your model using the ** argument unpacking operator. + +The model itself is a regular [Pytorch `nn.Module`](https://pytorch.org/docs/stable/nn.html#torch.nn.Module) or a [TensorFlow `tf.keras.Model`](https://www.tensorflow.org/api_docs/python/tf/keras/Model) (depending on your backend) which you can use as usual. [This tutorial](https://huggingface.co/docs/transformers/training) explains how to integrate such a model into a classic PyTorch or TensorFlow training loop, or how to use our `Trainer` API to quickly fine-tune on a new dataset. + +## Why should I use transformers? + +1. Easy-to-use state-of-the-art models: + - High performance on natural language understanding & generation, computer vision, and audio tasks. + - Low barrier to entry for educators and practitioners. + - Few user-facing abstractions with just three classes to learn. + - A unified API for using all our pretrained models. + +1. Lower compute costs, smaller carbon footprint: + - Researchers can share trained models instead of always retraining. + - Practitioners can reduce compute time and production costs. + - Dozens of architectures with over 400,000 pretrained models across all modalities. + +1. Choose the right framework for every part of a model's lifetime: + - Train state-of-the-art models in 3 lines of code. + - Move a single model between TF2.0/PyTorch/JAX frameworks at will. + - Seamlessly pick the right framework for training, evaluation, and production. + +1. Easily customize a model or an example to your needs: + - We provide examples for each architecture to reproduce the results published by its original authors. + - Model internals are exposed as consistently as possible. + - Model files can be used independently of the library for quick experiments. + +## Why shouldn't I use transformers? + +- This library is not a modular toolbox of building blocks for neural nets. The code in the model files is not refactored with additional abstractions on purpose, so that researchers can quickly iterate on each of the models without diving into additional abstractions/files. +- The training API is not intended to work on any model but is optimized to work with the models provided by the library. For generic machine learning loops, you should use another library (possibly, [Accelerate](https://huggingface.co/docs/accelerate)). +- While we strive to present as many use cases as possible, the scripts in our [examples folder](https://github.com/huggingface/transformers/tree/main/examples) are just that: examples. It is expected that they won't work out-of-the-box on your specific problem and that you will be required to change a few lines of code to adapt them to your needs. + +## Installation + +### With pip + +This repository is tested on Python 3.9+, Flax 0.4.1+, PyTorch 2.0+, and TensorFlow 2.6+. + +You should install 🤗 Transformers in a [virtual environment](https://docs.python.org/3/library/venv.html). If you're unfamiliar with Python virtual environments, check out the [user guide](https://packaging.python.org/guides/installing-using-pip-and-virtual-environments/). + +First, create a virtual environment with the version of Python you're going to use and activate it. + +**macOS/Linux** + +```python -m venv env +source env/bin/activate +``` + +**Windows** + +``` python -m venv env +env\Scripts\activate +``` + +To use 🤗 Transformers, you must install at least one of Flax, PyTorch, or TensorFlow. Refer to the official installation guides for platform-specific commands: + +[TensorFlow installation page](https://www.tensorflow.org/install/), +[PyTorch installation page](https://pytorch.org/get-started/locally/#start-locally) and/or [Flax](https://github.com/google/flax#quick-install) and [Jax](https://github.com/google/jax#installation) + +When one of those backends has been installed, 🤗 Transformers can be installed using pip as follows: + +``` +pip install transformers +``` + +If you'd like to play with the examples or need the bleeding edge of the code and can't wait for a new release, you must [install the library from source](https://huggingface.co/docs/transformers/installation#installing-from-source). + +``` +git clone https://github.com/huggingface/transformers.git +cd transformers +pip install +``` + +### With conda + +🤗 Transformers can be installed using conda as follows: + +```shell script +conda install conda-forge::transformers +``` + +> **_NOTE:_** Installing `transformers` from the `huggingface` channel is deprecated. + +Follow the installation pages of Flax, PyTorch or TensorFlow to see how to install them with conda. + +> **_NOTE:_** On Windows, you may be prompted to activate Developer Mode in order to benefit from caching. If this is not an option for you, please let us know in [this issue](https://github.com/huggingface/huggingface_hub/issues/1062). + +## Model architectures + +**[All the model checkpoints](https://huggingface.co/models)** provided by 🤗 Transformers are seamlessly integrated from the huggingface.co [model hub](https://huggingface.co/models), where they are uploaded directly by [users](https://huggingface.co/users) and [organizations](https://huggingface.co/organizations). + +Current number of checkpoints: ![](https://img.shields.io/endpoint?url=https://huggingface.co/api/shields/models&color=brightgreen) + +🤗 Transformers currently provides the following architectures: see [here](https://huggingface.co/docs/transformers/model_summary) for a high-level summary of each them. + +To check if each model has an implementation in Flax, PyTorch or TensorFlow, or has an associated tokenizer backed by the 🤗 Tokenizers library, refer to [this table](https://huggingface.co/docs/transformers/index#supported-frameworks). + +These implementations have been tested on several datasets (see the example scripts) and should match the performance of the original implementations. You can find more details on performance in the Examples section of the [documentation](https://github.com/huggingface/transformers/tree/main/examples). + + +## Learn more + +| Section | Description | +|-|-| +| [Documentation](https://huggingface.co/docs/transformers/) | Full API documentation and tutorials | +| [Task summary](https://huggingface.co/docs/transformers/task_summary) | Tasks supported by 🤗 Transformers | +| [Preprocessing tutorial](https://huggingface.co/docs/transformers/preprocessing) | Using the `Tokenizer` class to prepare data for the models | +| [Training and fine-tuning](https://huggingface.co/docs/transformers/training) | Using the models provided by 🤗 Transformers in a PyTorch/TensorFlow training loop and the `Trainer` API | +| [Quick tour: Fine-tuning/usage scripts](https://github.com/huggingface/transformers/tree/main/examples) | Example scripts for fine-tuning models on a wide range of tasks | +| [Model sharing and uploading](https://huggingface.co/docs/transformers/model_sharing) | Upload and share your fine-tuned models with the community | + +## Citation + +We now have a [paper](https://www.aclweb.org/anthology/2020.emnlp-demos.6/) you can cite for the 🤗 Transformers library: +```bibtex +@inproceedings{wolf-etal-2020-transformers, + title = "Transformers: State-of-the-Art Natural Language Processing", + author = "Thomas Wolf and Lysandre Debut and Victor Sanh and Julien Chaumond and Clement Delangue and Anthony Moi and Pierric Cistac and Tim Rault and Rémi Louf and Morgan Funtowicz and Joe Davison and Sam Shleifer and Patrick von Platen and Clara Ma and Yacine Jernite and Julien Plu and Canwen Xu and Teven Le Scao and Sylvain Gugger and Mariama Drame and Quentin Lhoest and Alexander M. Rush", + booktitle = "Proceedings of the 2020 Conference on Empirical Methods in Natural Language Processing: System Demonstrations", + month = oct, + year = "2020", + address = "Online", + publisher = "Association for Computational Linguistics", + url = "https://www.aclweb.org/anthology/2020.emnlp-demos.6", + pages = "38--45" +} +``` diff --git a/vlmpy310/lib/python3.10/site-packages/transformers-4.49.0.dev0.dist-info/RECORD b/vlmpy310/lib/python3.10/site-packages/transformers-4.49.0.dev0.dist-info/RECORD new file mode 100644 index 0000000000000000000000000000000000000000..2e11665a722d5c02f29404f6acae699fb6883efb --- /dev/null +++ b/vlmpy310/lib/python3.10/site-packages/transformers-4.49.0.dev0.dist-info/RECORD @@ -0,0 +1,3877 @@ +../../../bin/transformers-cli,sha256=ooAq_DAugjB6-6MmXTtD2x28M1hYnICFCibiw9LAW0s,251 +transformers-4.49.0.dev0.dist-info/INSTALLER,sha256=zuuue4knoyJ-UwPPXg8fezS7VCrXJQrAP7zeNuwvFQg,4 +transformers-4.49.0.dev0.dist-info/METADATA,sha256=xSisrh5vFqI36-4h4SBoYOeeb4tx2EwjFR4LY1jTrZU,45887 +transformers-4.49.0.dev0.dist-info/RECORD,, +transformers-4.49.0.dev0.dist-info/REQUESTED,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0 +transformers-4.49.0.dev0.dist-info/WHEEL,sha256=CmyFI0kx5cdEMTLiONQRbGQwjIoR1aIYB7eCAQ4KPJ0,91 +transformers-4.49.0.dev0.dist-info/direct_url.json,sha256=JX7TbxyQWWu1NqpBobBFcCpQM1tdJDWCaCsT3w9kG4U,209 +transformers-4.49.0.dev0.dist-info/entry_points.txt,sha256=kgdW_0F_tXNrWKSZXKWKeUD_LqVgcji9j7atGXve8z4,81 +transformers-4.49.0.dev0.dist-info/licenses/LICENSE,sha256=d_1HEN757DwPYiWADgI18VpCWr1KiwNVkSf814JhIEk,11418 +transformers-4.49.0.dev0.dist-info/top_level.txt,sha256=GLBaeTo_CSdhnHvbxQ0kzpEHdlLuA_33foIogaWxntI,13 +transformers/__init__.py,sha256=iEszDJC8vRhShbcQCcgdJQk5VoPD2NDpNgaNTSuX26c,315419 +transformers/__pycache__/__init__.cpython-310.pyc,, +transformers/__pycache__/activations.cpython-310.pyc,, +transformers/__pycache__/activations_tf.cpython-310.pyc,, +transformers/__pycache__/audio_utils.cpython-310.pyc,, +transformers/__pycache__/cache_utils.cpython-310.pyc,, +transformers/__pycache__/configuration_utils.cpython-310.pyc,, +transformers/__pycache__/convert_graph_to_onnx.cpython-310.pyc,, +transformers/__pycache__/convert_pytorch_checkpoint_to_tf2.cpython-310.pyc,, +transformers/__pycache__/convert_slow_tokenizer.cpython-310.pyc,, +transformers/__pycache__/convert_slow_tokenizers_checkpoints_to_fast.cpython-310.pyc,, +transformers/__pycache__/convert_tf_hub_seq_to_seq_bert_to_pytorch.cpython-310.pyc,, +transformers/__pycache__/debug_utils.cpython-310.pyc,, +transformers/__pycache__/dependency_versions_check.cpython-310.pyc,, +transformers/__pycache__/dependency_versions_table.cpython-310.pyc,, +transformers/__pycache__/dynamic_module_utils.cpython-310.pyc,, +transformers/__pycache__/feature_extraction_sequence_utils.cpython-310.pyc,, +transformers/__pycache__/feature_extraction_utils.cpython-310.pyc,, +transformers/__pycache__/file_utils.cpython-310.pyc,, +transformers/__pycache__/hf_argparser.cpython-310.pyc,, +transformers/__pycache__/hyperparameter_search.cpython-310.pyc,, +transformers/__pycache__/image_processing_base.cpython-310.pyc,, +transformers/__pycache__/image_processing_utils.cpython-310.pyc,, +transformers/__pycache__/image_processing_utils_fast.cpython-310.pyc,, +transformers/__pycache__/image_transforms.cpython-310.pyc,, +transformers/__pycache__/image_utils.cpython-310.pyc,, +transformers/__pycache__/keras_callbacks.cpython-310.pyc,, +transformers/__pycache__/modelcard.cpython-310.pyc,, +transformers/__pycache__/modeling_attn_mask_utils.cpython-310.pyc,, +transformers/__pycache__/modeling_flash_attention_utils.cpython-310.pyc,, +transformers/__pycache__/modeling_flax_outputs.cpython-310.pyc,, +transformers/__pycache__/modeling_flax_pytorch_utils.cpython-310.pyc,, +transformers/__pycache__/modeling_flax_utils.cpython-310.pyc,, +transformers/__pycache__/modeling_gguf_pytorch_utils.cpython-310.pyc,, +transformers/__pycache__/modeling_outputs.cpython-310.pyc,, +transformers/__pycache__/modeling_rope_utils.cpython-310.pyc,, +transformers/__pycache__/modeling_tf_outputs.cpython-310.pyc,, +transformers/__pycache__/modeling_tf_pytorch_utils.cpython-310.pyc,, +transformers/__pycache__/modeling_tf_utils.cpython-310.pyc,, +transformers/__pycache__/modeling_utils.cpython-310.pyc,, +transformers/__pycache__/optimization.cpython-310.pyc,, +transformers/__pycache__/optimization_tf.cpython-310.pyc,, +transformers/__pycache__/processing_utils.cpython-310.pyc,, +transformers/__pycache__/pytorch_utils.cpython-310.pyc,, +transformers/__pycache__/safetensors_conversion.cpython-310.pyc,, +transformers/__pycache__/testing_utils.cpython-310.pyc,, +transformers/__pycache__/tf_utils.cpython-310.pyc,, +transformers/__pycache__/time_series_utils.cpython-310.pyc,, +transformers/__pycache__/tokenization_utils.cpython-310.pyc,, +transformers/__pycache__/tokenization_utils_base.cpython-310.pyc,, +transformers/__pycache__/tokenization_utils_fast.cpython-310.pyc,, +transformers/__pycache__/trainer.cpython-310.pyc,, +transformers/__pycache__/trainer_callback.cpython-310.pyc,, +transformers/__pycache__/trainer_pt_utils.cpython-310.pyc,, +transformers/__pycache__/trainer_seq2seq.cpython-310.pyc,, +transformers/__pycache__/trainer_utils.cpython-310.pyc,, +transformers/__pycache__/training_args.cpython-310.pyc,, +transformers/__pycache__/training_args_seq2seq.cpython-310.pyc,, +transformers/__pycache__/training_args_tf.cpython-310.pyc,, +transformers/activations.py,sha256=EMN-kVzitS1TmltS7Kr2ROKwxW0oLbAHeAmNdDQuvu4,8177 +transformers/activations_tf.py,sha256=u2Y9dgDRgW-YbN_J-xmd05EK4p24rV8ZkzrQzpz4lCI,4689 +transformers/agents/__init__.py,sha256=wVjvkIafUIotTIDrniFMuqMc_iyq4MGAG9aim8VYFMU,2859 +transformers/agents/__pycache__/__init__.cpython-310.pyc,, +transformers/agents/__pycache__/agent_types.cpython-310.pyc,, +transformers/agents/__pycache__/agents.cpython-310.pyc,, +transformers/agents/__pycache__/default_tools.cpython-310.pyc,, +transformers/agents/__pycache__/document_question_answering.cpython-310.pyc,, +transformers/agents/__pycache__/evaluate_agent.cpython-310.pyc,, +transformers/agents/__pycache__/image_question_answering.cpython-310.pyc,, +transformers/agents/__pycache__/llm_engine.cpython-310.pyc,, +transformers/agents/__pycache__/monitoring.cpython-310.pyc,, +transformers/agents/__pycache__/prompts.cpython-310.pyc,, +transformers/agents/__pycache__/python_interpreter.cpython-310.pyc,, +transformers/agents/__pycache__/search.cpython-310.pyc,, +transformers/agents/__pycache__/speech_to_text.cpython-310.pyc,, +transformers/agents/__pycache__/text_to_speech.cpython-310.pyc,, +transformers/agents/__pycache__/tools.cpython-310.pyc,, +transformers/agents/__pycache__/translation.cpython-310.pyc,, +transformers/agents/agent_types.py,sha256=yjhBd2nxYkMYLqCfo4lbMUnlo7yT_wbZ_a1nHTSM7jc,8373 +transformers/agents/agents.py,sha256=7kuR4HM0jpjYS25ePrhrAF1X993De2d-Emb2XNs0Czg,52066 +transformers/agents/default_tools.py,sha256=kY4Ae9yn1l_hs1E-wQe3zT48WBU9hitIsBPiCvZBFVU,5252 +transformers/agents/document_question_answering.py,sha256=-xI5v_R5MATPdra5VMHAEi1Zt3oiRv_fJu4N16Expkk,3634 +transformers/agents/evaluate_agent.py,sha256=-q4K6vyCFIbPx8uYtf85jh8i7a6wFY_u5XOiRas58UQ,14929 +transformers/agents/image_question_answering.py,sha256=yk7pGDeL0MVoRj56H11qt7JPwNTR-8q5dD80kbaHFW8,2004 +transformers/agents/llm_engine.py,sha256=Uw2fsrqj92cGatLMOhWm7AWqSzbs9spidldbiriCumo,9233 +transformers/agents/monitoring.py,sha256=l8M3Gk3h0l7RYhDTqKfv318ma9S0xxal2SO25J-sD9g,4684 +transformers/agents/prompts.py,sha256=qNRNH562oGvoAh_lOEluo6jaTpl_WAtc6tQaIyHTJWE,31416 +transformers/agents/python_interpreter.py,sha256=yT_a2h2a6lMj4QCCTSbal8jFT2n49FZyzCjzKJe5l8Q,38358 +transformers/agents/search.py,sha256=Ai0qXG6ytcUW67VrrM40TVY-uGoJtWvp0LL4jleYfIs,2777 +transformers/agents/speech_to_text.py,sha256=9VNfM56c-H9bL0g84hzGkDCT1_EWGwBtF0A8mQYpJ88,1496 +transformers/agents/text_to_speech.py,sha256=FhVbSolKvMlSB454HF_RqtAULygdQ8xMas_B2G8gIyw,2468 +transformers/agents/tools.py,sha256=YWAKzQnK2PzHocwOQGpb4icGTngbv_UXBWnH8P538-c,39570 +transformers/agents/translation.py,sha256=P-Dnk3rUA3NPrJKiveRKptCwJ_M-3k1JEPyNze7VAAc,8671 +transformers/audio_utils.py,sha256=N7DTa_uesPyXpelZAMq-VLQ6S6D6mLTp4U1MuMSPMC0,50302 +transformers/benchmark/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0 +transformers/benchmark/__pycache__/__init__.cpython-310.pyc,, +transformers/benchmark/__pycache__/benchmark.cpython-310.pyc,, +transformers/benchmark/__pycache__/benchmark_args.cpython-310.pyc,, +transformers/benchmark/__pycache__/benchmark_args_tf.cpython-310.pyc,, +transformers/benchmark/__pycache__/benchmark_args_utils.cpython-310.pyc,, +transformers/benchmark/__pycache__/benchmark_tf.cpython-310.pyc,, +transformers/benchmark/__pycache__/benchmark_utils.cpython-310.pyc,, +transformers/benchmark/benchmark.py,sha256=efSeIH606x5u9IA6CkKXL7t7CoH9kORKSrXypR8gfHk,10747 +transformers/benchmark/benchmark_args.py,sha256=djFAjBC11MnI-auxByCWSVVAqRqXGV650Leosd60VmA,4050 +transformers/benchmark/benchmark_args_tf.py,sha256=bAcsgf7bOUyoo8AGFSiQhciR8S5wMJqnL5iVlvbQzow,4735 +transformers/benchmark/benchmark_args_utils.py,sha256=pkgvor3IuC5v9BubOCFVuwbgGHsoGkNp1CDdgJlyBi4,6499 +transformers/benchmark/benchmark_tf.py,sha256=wSXldL0TVlNzBB0YOUTQ9REia8Eib6nz4WsNg6UXBsU,13246 +transformers/benchmark/benchmark_utils.py,sha256=ZzHCNnPKtygQpdq3vD1q_p5Y2rwo4ctmKjOc5e8sca8,37599 +transformers/cache_utils.py,sha256=6H3QGJPHBOE5p4AF3wa02F2hn522rceOCuJ7vz3n_q0,100343 +transformers/commands/__init__.py,sha256=aFO3I7C6G9OLA9JZSc_yMaZl0glOQtjNPjqMFfu9wfQ,923 +transformers/commands/__pycache__/__init__.cpython-310.pyc,, +transformers/commands/__pycache__/add_new_model_like.cpython-310.pyc,, +transformers/commands/__pycache__/convert.cpython-310.pyc,, +transformers/commands/__pycache__/download.cpython-310.pyc,, +transformers/commands/__pycache__/env.cpython-310.pyc,, +transformers/commands/__pycache__/lfs.cpython-310.pyc,, +transformers/commands/__pycache__/run.cpython-310.pyc,, +transformers/commands/__pycache__/serving.cpython-310.pyc,, +transformers/commands/__pycache__/train.cpython-310.pyc,, +transformers/commands/__pycache__/transformers_cli.cpython-310.pyc,, +transformers/commands/__pycache__/user.cpython-310.pyc,, +transformers/commands/add_new_model_like.py,sha256=Nl8vdHyP1it2FieiiymuEmyjURPytbBNNhgSW0QsrgU,71130 +transformers/commands/convert.py,sha256=lHz2sQti9HubMNwObLCc_sw9Y7L-IPcaYJMSJR_AVWM,7068 +transformers/commands/download.py,sha256=GKPadx-YGBL7dHJSEcUp-QNOP3R2L71-gPGP0z6NNQI,2395 +transformers/commands/env.py,sha256=g54Hwtyp3ZpYq782cKwvLrnliLg2PhUrZ7yOQsOgZ5c,5756 +transformers/commands/lfs.py,sha256=4QDGBbJxBcRpgmhHXvigZQUsXuTPwrRY60t1qGjzfWU,8001 +transformers/commands/run.py,sha256=nyEe2lOoj6e0EOxjKeF08hdW9WVWa101r9hWXl9v3Jo,4249 +transformers/commands/serving.py,sha256=CnNHFVM_SK_-aNxEJnq7vJK5dBqDBw7bxxQiv5truEU,8027 +transformers/commands/train.py,sha256=FKlH-IYr3mVc7_mS5ObCyJaHs9JincYLg3Zt6WQz1ag,6341 +transformers/commands/transformers_cli.py,sha256=DH1WTp42yt4oIZo67ldo1qV_IuFgrhQ3hDKEYenZU1U,1850 +transformers/commands/user.py,sha256=UxHTw7L2qU8Svdj6v2Nb8OB8nGI5dRXawrXt398DkXQ,7091 +transformers/configuration_utils.py,sha256=f3lPLvLnMjtrkGoevHq6lGmdJ2qoF4f0iSWRDGlujG0,57581 +transformers/convert_graph_to_onnx.py,sha256=eoA4_4LmxwK-dirCgB0A75dAIMn_v9BoYEoJ_HaJc1Q,20151 +transformers/convert_pytorch_checkpoint_to_tf2.py,sha256=oV_wzdVHbtpztPGQC_msuHh4zudzoj3WDABDUiDzKkE,14539 +transformers/convert_slow_tokenizer.py,sha256=1GM1B5a2T8AtWqTlSXl4AT_-W6_1mXPQr_9Brq5YuOA,63233 +transformers/convert_slow_tokenizers_checkpoints_to_fast.py,sha256=5FiWOleZOLTCtR7T8h2l1-XaryCz8VPbOfT8lK46vMQ,5076 +transformers/convert_tf_hub_seq_to_seq_bert_to_pytorch.py,sha256=dy9yjETinWJl2MeQ-wv1J5HtmGm3j6Ki3r65optejRg,2910 +transformers/data/__init__.py,sha256=Ey608EH-9P9qajKIrBxopGKH4FwyEL5tB8fQ3SfzSJk,1455 +transformers/data/__pycache__/__init__.cpython-310.pyc,, +transformers/data/__pycache__/data_collator.cpython-310.pyc,, +transformers/data/data_collator.py,sha256=w7Ke_7StXhEuIqVMC6Fn0klaPnnDQj7lPOjXQaxYygE,86356 +transformers/data/datasets/__init__.py,sha256=PGzUJjdmTPOPMyjV4-Tj3sNrmmh-lspjyxrVbrfJoX8,909 +transformers/data/datasets/__pycache__/__init__.cpython-310.pyc,, +transformers/data/datasets/__pycache__/glue.cpython-310.pyc,, +transformers/data/datasets/__pycache__/language_modeling.cpython-310.pyc,, +transformers/data/datasets/__pycache__/squad.cpython-310.pyc,, +transformers/data/datasets/glue.py,sha256=K3h2KxjIg0kWegPCw6ikbOL-lCFbKoQewb7R8wLZoIc,6163 +transformers/data/datasets/language_modeling.py,sha256=E-VGwuyb09J4KmV8v37bNH5in90wDPuZHCYsqGdT7W0,23721 +transformers/data/datasets/squad.py,sha256=OUTQDd687SQns7HRWDCgAjnuo_ZXihifLS6jF2bhUhc,9219 +transformers/data/metrics/__init__.py,sha256=o9t_VTQtqU3lEhqvocDzFMm7OvAKD-uxrjPWy0r74BI,3632 +transformers/data/metrics/__pycache__/__init__.cpython-310.pyc,, +transformers/data/metrics/__pycache__/squad_metrics.cpython-310.pyc,, +transformers/data/metrics/squad_metrics.py,sha256=mP6eaDcGTLsS4EhnvnD3U_Yyvcua_LVgElCkuxy2XJE,29697 +transformers/data/processors/__init__.py,sha256=lvN5mp9mdrr5v6QvZT6VcoZ78zZUvXiumTm6Gdvlgvo,1014 +transformers/data/processors/__pycache__/__init__.cpython-310.pyc,, +transformers/data/processors/__pycache__/glue.cpython-310.pyc,, +transformers/data/processors/__pycache__/squad.cpython-310.pyc,, +transformers/data/processors/__pycache__/utils.cpython-310.pyc,, +transformers/data/processors/__pycache__/xnli.cpython-310.pyc,, +transformers/data/processors/glue.py,sha256=1sHcfSWbl-ooNIEu3emKmDlpW-95UZT1JfDlGYx5TFA,23218 +transformers/data/processors/squad.py,sha256=_4WNLcZA6TAy7uNZO46948tmL5ngVF0LSB0y8nUn6rs,33153 +transformers/data/processors/utils.py,sha256=GSaZbJ--XYq57vqyRVx_5LHSR4tklzFyR7ZKHGWsTAs,13829 +transformers/data/processors/xnli.py,sha256=sgcYz9YSfHY9NS0LO_YeFRRjq-nJFsDhFUP4NJeu-Q4,3481 +transformers/debug_utils.py,sha256=6q8ArB104GdcIC2qfBQzKLxO7PfXmHEKdYtfL2FOK2w,12907 +transformers/dependency_versions_check.py,sha256=6HbgtT2Wp-QZGOAdyUOklHvNA4rOVITGHrX34dtMOqg,2115 +transformers/dependency_versions_table.py,sha256=2hnV1vVMM-uMroIBJbnxId9QeftOOL6W6pon7EOrIjM,3573 +transformers/dynamic_module_utils.py,sha256=DhCKKFRZkyVpqGYj-9dk8SZqvDHq1KW37plnUHvWhNI,29689 +transformers/feature_extraction_sequence_utils.py,sha256=xE5f0cSWWodEjCwNDsG0Dl9kL3B9KPs-SsF4YTWNh0M,18307 +transformers/feature_extraction_utils.py,sha256=Db51L6rXokWQ3Zu7HNp2IWG-_W2MKoBceoa2QMcv6JU,30508 +transformers/file_utils.py,sha256=P86AFIAgEjfSmlyPCK-GFndYLCI6OJm_UmehBJb5diU,3745 +transformers/generation/__init__.py,sha256=rw0kiEMJVs2VTKFI1Td2gPASLQNqZUJTJ_52Kgc1lRM,12303 +transformers/generation/__pycache__/__init__.cpython-310.pyc,, +transformers/generation/__pycache__/beam_constraints.cpython-310.pyc,, +transformers/generation/__pycache__/beam_search.cpython-310.pyc,, +transformers/generation/__pycache__/candidate_generator.cpython-310.pyc,, +transformers/generation/__pycache__/configuration_utils.cpython-310.pyc,, +transformers/generation/__pycache__/flax_logits_process.cpython-310.pyc,, +transformers/generation/__pycache__/flax_utils.cpython-310.pyc,, +transformers/generation/__pycache__/logits_process.cpython-310.pyc,, +transformers/generation/__pycache__/stopping_criteria.cpython-310.pyc,, +transformers/generation/__pycache__/streamers.cpython-310.pyc,, +transformers/generation/__pycache__/tf_logits_process.cpython-310.pyc,, +transformers/generation/__pycache__/tf_utils.cpython-310.pyc,, +transformers/generation/__pycache__/utils.cpython-310.pyc,, +transformers/generation/__pycache__/watermarking.cpython-310.pyc,, +transformers/generation/beam_constraints.py,sha256=Yt9dtQARVET_WaC26Sil49DO-b2YfosyhP2Rx5ILReI,19274 +transformers/generation/beam_search.py,sha256=rGjKNmKPa61POCkEh5VmZn9oUaqM3eIJ1jXhkn7CPYU,49536 +transformers/generation/candidate_generator.py,sha256=ApX5CsOYKLg2ObF8Z32J4O-aHtqj2LnMu1KYQUxGOrc,43885 +transformers/generation/configuration_utils.py,sha256=reKqRS93DPE7rG21N0P2zUzsz-B4NaS5jx0AiDU2-gM,84725 +transformers/generation/flax_logits_process.py,sha256=w5WaWXrp6QmD4aqLYrXgjqpb21rpPV3rzL_etJ1RBhI,23007 +transformers/generation/flax_utils.py,sha256=hhIx5hgfMpfcpCY0uV4P3pjZvOjRXt5RheDUZHYZVZU,50469 +transformers/generation/logits_process.py,sha256=nDL5ElM52rBze2pfgEyn2JJSsm66o8bUTwmRepQjCHs,137793 +transformers/generation/stopping_criteria.py,sha256=U0vcGHAuBAxuSbV2Jb7nFd2n0AkxdMSeI_8rvtG5R4I,28583 +transformers/generation/streamers.py,sha256=Tn1mP3Tb6jTSoJxG0wf6Dbf0SoHC15_r_wJLOYJUGkw,13026 +transformers/generation/tf_logits_process.py,sha256=qLtG-EBD4SgK6QaSYApwpegJ4FrlR-rKdxF0FMjhAG8,28714 +transformers/generation/tf_utils.py,sha256=TjWN_pKijyZs0oDz8w7Jff_vmVz8RE6srcB2ZtA0mZI,175496 +transformers/generation/utils.py,sha256=H_qFtLejgiggPeDEENfkN5LwpJ24YRHdeLBb2ZVFmLY,254382 +transformers/generation/watermarking.py,sha256=8d4AFs9CRnGsguiqLKy8Lz6g2pg0wLAaZtAwA7qsIxA,24416 +transformers/hf_argparser.py,sha256=hNp_XsFe5vKwn6FTZ-bckyGDk0RVqMBv0WUenYHfJtM,20378 +transformers/hyperparameter_search.py,sha256=wmfAWk_NTUQj3MezO_6CaDaJyUt9pbARcs-tbo_BdeM,4171 +transformers/image_processing_base.py,sha256=Jl2W4htov0a32MNSoAr5NTQCXFUUabteSFKBxuS9yLU,25430 +transformers/image_processing_utils.py,sha256=EDSL4O-qWBofy1a5sZLNtSG9b-PvFpteAmP7U9_ai0I,12742 +transformers/image_processing_utils_fast.py,sha256=Zy7-028ExRQEUazYRR5VPMyfWfUr29_d30YSGV9r9Tk,4141 +transformers/image_transforms.py,sha256=Ge6l2FQo2vDaX0kD-q0G5zth3FUBWrC6oNHUbt_R_mM,36166 +transformers/image_utils.py,sha256=uSmEJIHng_bAUzg-mKiX8ebWSK_YvWxneSt7T5wItDQ,41035 +transformers/integrations/__init__.py,sha256=n51rpbBkB6SDge46rfn14ehOK-Ny5iDcafJNZhgNpO4,7034 +transformers/integrations/__pycache__/__init__.cpython-310.pyc,, +transformers/integrations/__pycache__/aqlm.cpython-310.pyc,, +transformers/integrations/__pycache__/awq.cpython-310.pyc,, +transformers/integrations/__pycache__/bitnet.cpython-310.pyc,, +transformers/integrations/__pycache__/bitsandbytes.cpython-310.pyc,, +transformers/integrations/__pycache__/deepspeed.cpython-310.pyc,, +transformers/integrations/__pycache__/eetq.cpython-310.pyc,, +transformers/integrations/__pycache__/executorch.cpython-310.pyc,, +transformers/integrations/__pycache__/fbgemm_fp8.cpython-310.pyc,, +transformers/integrations/__pycache__/flash_attention.cpython-310.pyc,, +transformers/integrations/__pycache__/flex_attention.cpython-310.pyc,, +transformers/integrations/__pycache__/fsdp.cpython-310.pyc,, +transformers/integrations/__pycache__/ggml.cpython-310.pyc,, +transformers/integrations/__pycache__/higgs.cpython-310.pyc,, +transformers/integrations/__pycache__/hqq.cpython-310.pyc,, +transformers/integrations/__pycache__/integration_utils.cpython-310.pyc,, +transformers/integrations/__pycache__/peft.cpython-310.pyc,, +transformers/integrations/__pycache__/quanto.cpython-310.pyc,, +transformers/integrations/__pycache__/sdpa_attention.cpython-310.pyc,, +transformers/integrations/__pycache__/tiktoken.cpython-310.pyc,, +transformers/integrations/__pycache__/tpu.cpython-310.pyc,, +transformers/integrations/__pycache__/vptq.cpython-310.pyc,, +transformers/integrations/aqlm.py,sha256=g4ujHmqCr66ugoP93lemtIaMQLLDMDZ1TeeR0U1MIOw,4536 +transformers/integrations/awq.py,sha256=gZjVCxWhVErViMh1AHtCWw3bjtHRGwlu5ick6Qy2pFY,20580 +transformers/integrations/bitnet.py,sha256=6GAtWEDsaelz79kPx2mHXcz273r6gDl0nrO9RaW-8EU,10706 +transformers/integrations/bitsandbytes.py,sha256=BXP6UURMb8YTQGcDRyCVnbfzIhvizjgcISOWCebrz-E,24797 +transformers/integrations/deepspeed.py,sha256=Ui0mjL7i7sn6kI1pmYoj0bovrFvogV6xyW6qpbHnPqM,19136 +transformers/integrations/eetq.py,sha256=ZDxYk6vfjGsjQoQeFOQ2odn58hc6xrYbwdKtfLdDp-Q,5365 +transformers/integrations/executorch.py,sha256=Kle1GRkyPrbNw4GoMIA5dXPVI-uOLYH5GOn2yn6iAg0,9445 +transformers/integrations/fbgemm_fp8.py,sha256=g322A5Hhz7agAACHYgeEvh6P4gWcnRt1BxkUBwYbMLA,7431 +transformers/integrations/flash_attention.py,sha256=Y3uV1y5mPEix0ygglQNHqvkIrWDdvxH8amPMxbNJdL4,2293 +transformers/integrations/flex_attention.py,sha256=3V3sBD3RJI2ME3zMs4UEhkPVLVxajk5iadZkmxwX0sY,1487 +transformers/integrations/fsdp.py,sha256=_jkV54fIk9TIz8ZmU_51KIJUzc3yEMi_nE-dUdCXwFw,1053 +transformers/integrations/ggml.py,sha256=OsOEzqlXhpqrp7Pnfsa3Wtwhie1CWOHnR5JxMWukhfw,26303 +transformers/integrations/higgs.py,sha256=UMo-sh6tkTU13yKhnS1mIwMiSh_DPUMNEqNwOaUfQVw,31349 +transformers/integrations/hqq.py,sha256=meBkmmy99NgsTG0Oir9P_DMEifRgpbuCvbmgu73KTTM,5088 +transformers/integrations/integration_utils.py,sha256=QNFXaCXnMzrdoC546xWM5TnHZjzww-sVhrK-PDoF5MI,97000 +transformers/integrations/peft.py,sha256=B6QezThvFBaXD6r3QhEySY8YVSsulp7ZyXx5FAp3xWM,27464 +transformers/integrations/quanto.py,sha256=UfGtKGRFB8eQiUtTBiyENo4B5roY1DlmlCCWrPbjl5g,4378 +transformers/integrations/sdpa_attention.py,sha256=n4UJqBG0DGDENV5n242ypaD8aPx3XxVFjeQayt8mFJM,2255 +transformers/integrations/tiktoken.py,sha256=9m5PrDbp7F6vpPMQnD5zVACi3aMlRTibjbwpV_C3_bE,1652 +transformers/integrations/tpu.py,sha256=Y8YMwIrEgh1s-OCNbOQZFD1_3Tvqpo3e1H6eECTceSU,1392 +transformers/integrations/vptq.py,sha256=JxwcJsdKbU_Hhi0W9urWXuInRFOEuAVOU1BQoT-IyuA,4545 +transformers/keras_callbacks.py,sha256=i95nrEd_QsEo10x3T9RqZf3xGzfPiMOhmU1Ef_HvnGE,20675 +transformers/kernels/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0 +transformers/kernels/__pycache__/__init__.cpython-310.pyc,, +transformers/kernels/deformable_detr/cpu/ms_deform_attn_cpu.cpp,sha256=VcCGm9IrvgVvmyZt0KyP16Q-ONmbeg6bKwccP6KadL0,1255 +transformers/kernels/deformable_detr/cpu/ms_deform_attn_cpu.h,sha256=nvVsKj9nabQ7IaNY4di5xVx6u-0lIifQvLg2JCoxiik,1138 +transformers/kernels/deformable_detr/cuda/ms_deform_attn_cuda.cu,sha256=9X92GS3siNseh_ToGfwQm-_N_o_ZQBmgm0rAa6tpCzk,7553 +transformers/kernels/deformable_detr/cuda/ms_deform_attn_cuda.cuh,sha256=HD7bMWLoGrDKw7XUPPgILCAdOSo1IC8RIv_KyKAnLb0,61539 +transformers/kernels/deformable_detr/cuda/ms_deform_attn_cuda.h,sha256=xxP17aer-SiU9J5ASLHdtLIyhFmHC5iLcPIPNW2xkrg,1694 +transformers/kernels/deformable_detr/cuda/ms_deform_im2col_cuda.cuh,sha256=BRN8-yfSHY8ChLij8jFl2_z2LL0LEFKuVF6Byi-YLAY,54695 +transformers/kernels/deformable_detr/ms_deform_attn.h,sha256=H2bBXGyl0R-v2DqGVz11asoRvxbjZ9iWB9djomZTpgY,1837 +transformers/kernels/deformable_detr/vision.cpp,sha256=8RvZy7P_MMx5QEszo_MwNODddJLQ8mKcmmMfgLYC_HA,798 +transformers/kernels/deta/cpu/ms_deform_attn_cpu.cpp,sha256=VcCGm9IrvgVvmyZt0KyP16Q-ONmbeg6bKwccP6KadL0,1255 +transformers/kernels/deta/cpu/ms_deform_attn_cpu.h,sha256=nvVsKj9nabQ7IaNY4di5xVx6u-0lIifQvLg2JCoxiik,1138 +transformers/kernels/deta/cuda/ms_deform_attn_cuda.cu,sha256=M5-bW9g5z-upTFMNPIfnyLAqKTxGMCjAPqBr0GmWHX8,7360 +transformers/kernels/deta/cuda/ms_deform_attn_cuda.cuh,sha256=hygB20Vh3RttOSdCuTFz8V0d3CXNp-Q89x22rYmD258,61433 +transformers/kernels/deta/cuda/ms_deform_attn_cuda.h,sha256=rPWOOMo3QyFdB5kMiexpApLFZ4dnRtx4CluEAGwsfO8,1139 +transformers/kernels/deta/cuda/ms_deform_im2col_cuda.cuh,sha256=BRN8-yfSHY8ChLij8jFl2_z2LL0LEFKuVF6Byi-YLAY,54695 +transformers/kernels/deta/ms_deform_attn.h,sha256=H2bBXGyl0R-v2DqGVz11asoRvxbjZ9iWB9djomZTpgY,1837 +transformers/kernels/deta/vision.cpp,sha256=8RvZy7P_MMx5QEszo_MwNODddJLQ8mKcmmMfgLYC_HA,798 +transformers/kernels/falcon_mamba/__init__.py,sha256=bt0j851F1uuH7flSsTvIqdh9zdKVTOVKWt3datb15SI,721 +transformers/kernels/falcon_mamba/__pycache__/__init__.cpython-310.pyc,, +transformers/kernels/falcon_mamba/__pycache__/selective_scan_with_ln_interface.cpython-310.pyc,, +transformers/kernels/falcon_mamba/selective_scan_with_ln_interface.py,sha256=649oJD0sox1I-TCkZuRMjYm3tWQkQ3VoPXLNeOcN_ss,19731 +transformers/kernels/mra/cuda_kernel.cu,sha256=LxxRYTymSoBEQpWXHA0PMzwZwpolcwX7mFAjwU8-ZMc,11678 +transformers/kernels/mra/cuda_kernel.h,sha256=UJvYq_MDzhcp07bZpYcOBn8ZGFcf_Ax1dynuiVTBvmA,1682 +transformers/kernels/mra/cuda_launch.cu,sha256=Ox5MTACriC30CGyn-g1Kb5EgQSMAZSaN6fpit3xLFWc,4072 +transformers/kernels/mra/cuda_launch.h,sha256=RVCkN_euasvgPK0zADNRvRYGWd4ah5l9X-7UG_AcdH8,707 +transformers/kernels/mra/torch_extension.cpp,sha256=N0YdBLVX0lZabckJzV_RYTHS2atCNvn13E4Ivobt25g,1405 +transformers/kernels/rwkv/wkv_cuda.cu,sha256=EvaUrEnw_qr2EjMKP-Pq7VPzFfGlMJnFhdHNLtn1fPU,6219 +transformers/kernels/rwkv/wkv_cuda_bf16.cu,sha256=DG9hTtOAlrnpDFahjt-MmnOxjMuhGU55GPsmV21HtrQ,6633 +transformers/kernels/rwkv/wkv_op.cpp,sha256=qSExhKdT6p3hyaTv5SypCnH_c7EmaX6HbhTcCntvZWg,4022 +transformers/kernels/yoso/common.h,sha256=Tq2rOUtE8Y4DRAUrRISvwIwVI3u8JBf21WgWSAYiDlQ,273 +transformers/kernels/yoso/common_cuda.h,sha256=Sji70AuVcuZSotLF7Gotmun9MJuOHo8wEkxizKXLRtc,258 +transformers/kernels/yoso/common_cuda_device.h,sha256=y6WUgAiapnMKqthRMS5s-DMSWNVkar_i8g4KPFvqiuk,2063 +transformers/kernels/yoso/fast_lsh_cumulation.cu,sha256=LA4LGNgyXT3osIyQtFBcRanSyNQWm8yqmpz7AeLP7cw,19061 +transformers/kernels/yoso/fast_lsh_cumulation.h,sha256=1cTWZjOm751HGiEB5P-UPJ8SE1VO7XRyXmBgyxYDyjI,1575 +transformers/kernels/yoso/fast_lsh_cumulation_cuda.cu,sha256=HKGLWl-WFz5BXjaAPHTNTbG6IUkJjhBdvFf2K7hrDVQ,32870 +transformers/kernels/yoso/fast_lsh_cumulation_cuda.h,sha256=_KGI8HQbVFtCN5KAcSGpyiJ2foGi26RKen138CUc2fY,5490 +transformers/kernels/yoso/fast_lsh_cumulation_torch.cpp,sha256=-Rh7o39Z3rtOPwNnEM-c51TCqywpVdK0WVaA7VRrXbQ,3154 +transformers/loss/__init__.py,sha256=qETsqCwayu6Ymj_J4_A_eiwiaMRHQ0noWKM35naanzc,606 +transformers/loss/__pycache__/__init__.cpython-310.pyc,, +transformers/loss/__pycache__/loss_deformable_detr.cpython-310.pyc,, +transformers/loss/__pycache__/loss_for_object_detection.cpython-310.pyc,, +transformers/loss/__pycache__/loss_rt_detr.cpython-310.pyc,, +transformers/loss/__pycache__/loss_utils.cpython-310.pyc,, +transformers/loss/loss_deformable_detr.py,sha256=6nybwni_dj2_H8UEe3e2o3kymMVHcbFhPpjPyx87Kqc,7335 +transformers/loss/loss_for_object_detection.py,sha256=__Siy5m1zEQPZk0JbUOcA3ZtoJ0jRt0qq7dwGR6MGtA,24594 +transformers/loss/loss_rt_detr.py,sha256=04kG6ucpCbsuim9etCZpoWLS2ZKI3Ui0PzDdhGPFqFY,21734 +transformers/loss/loss_utils.py,sha256=9E4KOruEwVAvLx5WKlXWxRgrnWqWHjCs2eJ7gPTwyrk,5795 +transformers/modelcard.py,sha256=7oXrCHfeWDdwK5dSjuNDUrs-rqrjCmsfY0s2jXsVfjM,35555 +transformers/modeling_attn_mask_utils.py,sha256=FXKojWtrHCZ3B68Lvrfd8e4berJl3z5UR_e3mdUl688,21234 +transformers/modeling_flash_attention_utils.py,sha256=4sJMMZa5wi-S92LqzoPcT_NHb79dn3Z1yGUglVxZAxE,17046 +transformers/modeling_flax_outputs.py,sha256=wXse1g9VyQyVOZ9DrbPALeoZBdS45fsBA9fNrGnwaZc,41961 +transformers/modeling_flax_pytorch_utils.py,sha256=ua9Y5tMv5AevLU0CUS3GLqcEueCupW_ozL77FH1AG7U,21634 +transformers/modeling_flax_utils.py,sha256=koz6c6GgLrK2YpJdLUQr_WsBPKl9QE9J_G_o_7FbkOo,61486 +transformers/modeling_gguf_pytorch_utils.py,sha256=DWjdeE3FqkrKsLlK3TxAOs6nxST3hvnxrnhiY4DEjZM,19772 +transformers/modeling_outputs.py,sha256=CYpjijqZNOVUc-kixDLI-jMFru9MhpDQvnncSfp0wb4,112567 +transformers/modeling_rope_utils.py,sha256=KQP_e3ltxrC4tnYpi4w4DMMZT-IVEkHf-TN1Z4VwvTQ,28557 +transformers/modeling_tf_outputs.py,sha256=nXCMOmFZ7IZFVuiQr7EU2ciV9QqwOYPYld_r2jBxVpE,56074 +transformers/modeling_tf_pytorch_utils.py,sha256=eJHbaqN6Orz3fGEeSyP213OMCjTWt3I53Ha2gStR9zo,27773 +transformers/modeling_tf_utils.py,sha256=WlNkvHIMXllppL1zJ0z5JWsq6Yzi8ifxqzZIEfGtzA4,166959 +transformers/modeling_utils.py,sha256=nTTof84QZ4rW48gRC4B-P68Ehbri3JtrPESJdQdB8a4,281878 +transformers/models/__init__.py,sha256=37GCati6C2V1a5uRo1DGaPi1vNExvaLIkZgFnCjjHng,4664 +transformers/models/__pycache__/__init__.cpython-310.pyc,, +transformers/models/albert/__init__.py,sha256=WjQ4NtNxKNj7Hvk9lA2OXmdgD_SNFp1wLS2eeL3-WoE,1154 +transformers/models/albert/__pycache__/__init__.cpython-310.pyc,, +transformers/models/albert/__pycache__/configuration_albert.cpython-310.pyc,, +transformers/models/albert/__pycache__/convert_albert_original_tf_checkpoint_to_pytorch.cpython-310.pyc,, +transformers/models/albert/__pycache__/modeling_albert.cpython-310.pyc,, +transformers/models/albert/__pycache__/modeling_flax_albert.cpython-310.pyc,, +transformers/models/albert/__pycache__/modeling_tf_albert.cpython-310.pyc,, +transformers/models/albert/__pycache__/tokenization_albert.cpython-310.pyc,, +transformers/models/albert/__pycache__/tokenization_albert_fast.cpython-310.pyc,, +transformers/models/albert/configuration_albert.py,sha256=nwBi1Gg1MRw_z-9Pwr7kjtJykLOhj5_zJC9zGzggrfQ,8137 +transformers/models/albert/convert_albert_original_tf_checkpoint_to_pytorch.py,sha256=sj2zzT7a1poQRuEpKB_9RUHaIPV6uc0P5zhgvSBfZV0,2161 +transformers/models/albert/modeling_albert.py,sha256=rbM9k-zdZfH_qqKQCwkgxAaJKZQU2yM2aIJheQaTsSo,64790 +transformers/models/albert/modeling_flax_albert.py,sha256=_oJ81lgLHwfYC-9DQq96Isrlj99jI02X5mol4eyu_j4,41027 +transformers/models/albert/modeling_tf_albert.py,sha256=l4cknO2O9IvaGgQgHD5drqAWkEjfOvMjdc6oBooSoYc,69145 +transformers/models/albert/tokenization_albert.py,sha256=y71xBZjcpGMdpWk6PC8ljQ1O4V4FLPDHHk6QF2e-LbY,14531 +transformers/models/albert/tokenization_albert_fast.py,sha256=2I7-4DbfP_xh3kib611aL7FgFY3TE3R3jrCCY0KUquw,8866 +transformers/models/align/__init__.py,sha256=QqTKk-Z4BylY6EkBSlYvKXVhT2te-m2Al626OUAz-r4,1027 +transformers/models/align/__pycache__/__init__.cpython-310.pyc,, +transformers/models/align/__pycache__/configuration_align.cpython-310.pyc,, +transformers/models/align/__pycache__/convert_align_tf_to_hf.cpython-310.pyc,, +transformers/models/align/__pycache__/modeling_align.cpython-310.pyc,, +transformers/models/align/__pycache__/processing_align.cpython-310.pyc,, +transformers/models/align/configuration_align.py,sha256=O8IZG-4Rcc2vwQdfO_9yXtMh69EWabsQpcU0Zy7DrpY,16538 +transformers/models/align/convert_align_tf_to_hf.py,sha256=tzPoEMyLV_ckVngYdvJ6uAFZ6RgsuX55JYjEkIMtPTg,15536 +transformers/models/align/modeling_align.py,sha256=nBuKM89UxX3vfBsGky5y2Om_TikGIJ8yhKiUW-AaWC4,71972 +transformers/models/align/processing_align.py,sha256=BrpIj1U9nYwxnuOQM9NmsMwuxWJxK-2gyHxqhp-b9Ik,7311 +transformers/models/altclip/__init__.py,sha256=405IijUCYr1EGvOqg1xzds_GHOlxCl0HCsf1rI0wtPY,1033 +transformers/models/altclip/__pycache__/__init__.cpython-310.pyc,, +transformers/models/altclip/__pycache__/configuration_altclip.cpython-310.pyc,, +transformers/models/altclip/__pycache__/modeling_altclip.cpython-310.pyc,, +transformers/models/altclip/__pycache__/processing_altclip.cpython-310.pyc,, +transformers/models/altclip/configuration_altclip.py,sha256=Gnib9SMogm262A56roRiT7ITaVSHOv5skI_LwHRH0TU,18980 +transformers/models/altclip/modeling_altclip.py,sha256=b2hEJSG_wtTq1EBF1XbvB1zVr0JJPkULJqUEgjui_VQ,81014 +transformers/models/altclip/processing_altclip.py,sha256=ak5ALJowiGiUcIrgMOIK1OaBYGqneQQuwcxoEL_1qSI,6904 +transformers/models/aria/__init__.py,sha256=I3vYPjV-sDl0OAILLADGZ7hUkk9ZsmyZ8CEf9tie_dY,1066 +transformers/models/aria/__pycache__/__init__.cpython-310.pyc,, +transformers/models/aria/__pycache__/configuration_aria.cpython-310.pyc,, +transformers/models/aria/__pycache__/convert_aria_weights_to_hf.cpython-310.pyc,, +transformers/models/aria/__pycache__/image_processing_aria.cpython-310.pyc,, +transformers/models/aria/__pycache__/modeling_aria.cpython-310.pyc,, +transformers/models/aria/__pycache__/modular_aria.cpython-310.pyc,, +transformers/models/aria/__pycache__/processing_aria.cpython-310.pyc,, +transformers/models/aria/configuration_aria.py,sha256=Br2vw2B22bEXKORXuaWes-5g2M1iK7IKfwsWFFHnBTA,16105 +transformers/models/aria/convert_aria_weights_to_hf.py,sha256=MDnRUOYBeJdvWLYY3iKyC7wXE7QZktSsZzN9DQYWKeg,5931 +transformers/models/aria/image_processing_aria.py,sha256=IBiNHSXzQ2ge7R3rOcB0FfdwZbaebPKByMnrEXLoaPw,22933 +transformers/models/aria/modeling_aria.py,sha256=7uBb7yVl65WelinVPPNA_RUuf-qsiUiZRmHEuQVylg0,70549 +transformers/models/aria/modular_aria.py,sha256=Fg2MtUwEXaNWL412ugpCt-D6nvF5vgpIGkgr0GCvd-Y,69611 +transformers/models/aria/processing_aria.py,sha256=xA809SduSc8gretUNUDrKrSgzjt8o8eQIcN5_tXvwPc,7659 +transformers/models/audio_spectrogram_transformer/__init__.py,sha256=a_YVwB1p4_PPeqPFWqFsGSGSQVTaSUXY0xsOd_Gflqs,1107 +transformers/models/audio_spectrogram_transformer/__pycache__/__init__.cpython-310.pyc,, +transformers/models/audio_spectrogram_transformer/__pycache__/configuration_audio_spectrogram_transformer.cpython-310.pyc,, +transformers/models/audio_spectrogram_transformer/__pycache__/convert_audio_spectrogram_transformer_original_to_pytorch.cpython-310.pyc,, +transformers/models/audio_spectrogram_transformer/__pycache__/feature_extraction_audio_spectrogram_transformer.cpython-310.pyc,, +transformers/models/audio_spectrogram_transformer/__pycache__/modeling_audio_spectrogram_transformer.cpython-310.pyc,, +transformers/models/audio_spectrogram_transformer/configuration_audio_spectrogram_transformer.py,sha256=m7jyBXJRTnsq7WLvGObn16eS-2QEvJ-yYyn4DTMhgis,5907 +transformers/models/audio_spectrogram_transformer/convert_audio_spectrogram_transformer_original_to_pytorch.py,sha256=clecd7YFd7_RqtMhbHrCjnC7KCiPZSRcJ7Ee7Eg_9Fw,11125 +transformers/models/audio_spectrogram_transformer/feature_extraction_audio_spectrogram_transformer.py,sha256=7azuno1_d7rZRetXaQ5M-gbPdMXmfQ9qQZsrUc4TGKE,9944 +transformers/models/audio_spectrogram_transformer/modeling_audio_spectrogram_transformer.py,sha256=94f9FarljO4S0XnRap5nO-yTluHOimHykkuKq3PbWHg,28425 +transformers/models/auto/__init__.py,sha256=b70YsNl9fYHKgw7tHRmDnlx9BBUHFrHGC1IPHWjD20s,17026 +transformers/models/auto/__pycache__/__init__.cpython-310.pyc,, +transformers/models/auto/__pycache__/auto_factory.cpython-310.pyc,, +transformers/models/auto/__pycache__/configuration_auto.cpython-310.pyc,, +transformers/models/auto/__pycache__/feature_extraction_auto.cpython-310.pyc,, +transformers/models/auto/__pycache__/image_processing_auto.cpython-310.pyc,, +transformers/models/auto/__pycache__/modeling_auto.cpython-310.pyc,, +transformers/models/auto/__pycache__/modeling_flax_auto.cpython-310.pyc,, +transformers/models/auto/__pycache__/modeling_tf_auto.cpython-310.pyc,, +transformers/models/auto/__pycache__/processing_auto.cpython-310.pyc,, +transformers/models/auto/__pycache__/tokenization_auto.cpython-310.pyc,, +transformers/models/auto/auto_factory.py,sha256=wBc5SzZKV1CxyR3RzocS9Vr4kCHCqM0XBTqthpjngb0,44542 +transformers/models/auto/configuration_auto.py,sha256=P5bDXAWsXqeUX1LoRGYWegLA-sS5g8sQTdA07Xm_dxk,43230 +transformers/models/auto/feature_extraction_auto.py,sha256=RS2QMs6iH-IMQDTmGKkGBp-6b70Pwcp9z-X8k7lPiI0,19725 +transformers/models/auto/image_processing_auto.py,sha256=v4SyS63ho2tW3Zt1CYzAaQ-boKsud_5UjUcPm0E4aMw,32769 +transformers/models/auto/modeling_auto.py,sha256=X3SkGsR6Y1rCevbvaVM-Yo99z-tSOk3u9SqWVhZigbw,77123 +transformers/models/auto/modeling_flax_auto.py,sha256=UTBFXa5sZxdO_663zWvhcJGpdt1bhpLCl337eZLB7H0,14568 +transformers/models/auto/modeling_tf_auto.py,sha256=hgO8Re6rW2lPm1r9etcjQLmgF2aiIVL5m1aWyyZ5szE,28420 +transformers/models/auto/processing_auto.py,sha256=FBqcOa4dTvB0GlD3vJ4GFV9POchMpuS24662Perj0bM,17794 +transformers/models/auto/tokenization_auto.py,sha256=B0lO1-KxcT-suHO1ZNV0CeF3hvvDj5kx3wQKEx8kXkg,51298 +transformers/models/autoformer/__init__.py,sha256=5xv9eb6R-4PmNJ4v-ogeo7pgobDRokFl4nWqqjnWII0,1691 +transformers/models/autoformer/__pycache__/__init__.cpython-310.pyc,, +transformers/models/autoformer/__pycache__/configuration_autoformer.cpython-310.pyc,, +transformers/models/autoformer/__pycache__/modeling_autoformer.cpython-310.pyc,, +transformers/models/autoformer/configuration_autoformer.py,sha256=IqjxujPU316HLjmljrBarAH8rnTsbJrlmrwyR7oTLPU,12165 +transformers/models/autoformer/modeling_autoformer.py,sha256=nH0M5AHGv6ft8hcVU2PpGPZOBfh0gGnNM2QZ3oZx0Ms,108745 +transformers/models/bamba/__init__.py,sha256=gtebRUrAdiwq-rJmlM5qpbtbGEg-xxA3pjivOHJvaRs,1040 +transformers/models/bamba/__pycache__/__init__.cpython-310.pyc,, +transformers/models/bamba/__pycache__/configuration_bamba.cpython-310.pyc,, +transformers/models/bamba/__pycache__/convert_mamba_ssm_checkpoint.cpython-310.pyc,, +transformers/models/bamba/__pycache__/modeling_bamba.cpython-310.pyc,, +transformers/models/bamba/__pycache__/modular_bamba.cpython-310.pyc,, +transformers/models/bamba/configuration_bamba.py,sha256=q2UmQtJL21N4eLJpPOZilkbKWvULgCluFwYGPQJFiYE,9886 +transformers/models/bamba/convert_mamba_ssm_checkpoint.py,sha256=UMzySc3J_dm5ZH_ARInYLK3DI2eyGpGl5QO4K7R6Frw,9752 +transformers/models/bamba/modeling_bamba.py,sha256=kvDM9lmxsGh0XVAWQmTa_Opp2Yb-oeWKeOIKMtU4Wyc,75653 +transformers/models/bamba/modular_bamba.py,sha256=YIGXx1iVDoq20I2W5Kk1_AUyQsOGIHGa5AREwdlFCX8,59936 +transformers/models/bark/__init__.py,sha256=fIlOQ6RPBARVhUKdjNx2Nvf09azEI6AiPv3lyWjk0Gc,1024 +transformers/models/bark/__pycache__/__init__.cpython-310.pyc,, +transformers/models/bark/__pycache__/configuration_bark.cpython-310.pyc,, +transformers/models/bark/__pycache__/convert_suno_to_hf.cpython-310.pyc,, +transformers/models/bark/__pycache__/generation_configuration_bark.cpython-310.pyc,, +transformers/models/bark/__pycache__/modeling_bark.cpython-310.pyc,, +transformers/models/bark/__pycache__/processing_bark.cpython-310.pyc,, +transformers/models/bark/configuration_bark.py,sha256=Wv8Q0u_bFuHAag_jiS07alb9tZ3gf_V_RMOKamDQeAs,11895 +transformers/models/bark/convert_suno_to_hf.py,sha256=9Al9mccdhIX1pT3k_bECmGOXii25gx6D1A7dSs94U4Y,9374 +transformers/models/bark/generation_configuration_bark.py,sha256=6YiZkHuloUVbfqzR36xmvhKm5SZ2-b2MWMHSwMvRWTA,14947 +transformers/models/bark/modeling_bark.py,sha256=qNN-WsfVAk7Cqnhq5irlLo4aSdNk8RLlSXysf5ZI0FI,82602 +transformers/models/bark/processing_bark.py,sha256=xeyTkcRTU8-Wgmp79Q3nRbmYL-VeEVsb_CPyT3iYcVU,13340 +transformers/models/bart/__init__.py,sha256=1_kCOlvj4hcCbNiAsAhH0PYAK4zopuVKAYKZ_64O3_c,1142 +transformers/models/bart/__pycache__/__init__.cpython-310.pyc,, +transformers/models/bart/__pycache__/configuration_bart.cpython-310.pyc,, +transformers/models/bart/__pycache__/convert_bart_original_pytorch_checkpoint_to_pytorch.cpython-310.pyc,, +transformers/models/bart/__pycache__/modeling_bart.cpython-310.pyc,, +transformers/models/bart/__pycache__/modeling_flax_bart.cpython-310.pyc,, +transformers/models/bart/__pycache__/modeling_tf_bart.cpython-310.pyc,, +transformers/models/bart/__pycache__/tokenization_bart.cpython-310.pyc,, +transformers/models/bart/__pycache__/tokenization_bart_fast.cpython-310.pyc,, +transformers/models/bart/configuration_bart.py,sha256=a7p9WvNZTjkz9OcqS4Yc7tmMiRUyHMHfsrwdp7lwBeo,18828 +transformers/models/bart/convert_bart_original_pytorch_checkpoint_to_pytorch.py,sha256=kJ1H5FbOt_p3YqeR8ezBKaOV0Po2UcOHJRiRdUcJ90c,6054 +transformers/models/bart/modeling_bart.py,sha256=_QLOlFE1OPrQLfDndPENZmYM-OrzXt7ErKtrZ2d4uJQ,102636 +transformers/models/bart/modeling_flax_bart.py,sha256=pUatshruENz2rbOZSZPy_axyfb0XwK50AfBVJY4KG4s,82964 +transformers/models/bart/modeling_tf_bart.py,sha256=qh1prHYMgGuDiVIVmXPkyo55T8tFP33WtXEvaWbu5CU,80893 +transformers/models/bart/tokenization_bart.py,sha256=I46HzuT0apIw2JMFyS5Iy4gMbmLNHqt7wxE_MqVXCmM,16280 +transformers/models/bart/tokenization_bart_fast.py,sha256=Yp5AGUVsIuitpZF7Sy-NL5qcyb6eRpD3VraIAXwrMhk,11288 +transformers/models/barthez/__init__.py,sha256=21WBGVafx-0kV-K_2jBdpBg0NBWsRKJqJowo03g2S9A,1003 +transformers/models/barthez/__pycache__/__init__.cpython-310.pyc,, +transformers/models/barthez/__pycache__/tokenization_barthez.cpython-310.pyc,, +transformers/models/barthez/__pycache__/tokenization_barthez_fast.cpython-310.pyc,, +transformers/models/barthez/tokenization_barthez.py,sha256=rUYGiy59C87gLqA7Nih1oZAuHBNgwwn6DAk5t327h6w,12097 +transformers/models/barthez/tokenization_barthez_fast.py,sha256=kJR7nOw1ogVLi9oTbRo9jbX6T1zeSOXIpws9WgJb_Ls,7873 +transformers/models/bartpho/__init__.py,sha256=DN0zgU4dM841Kqqo6wN8FpWFeWYHCBxIq3lxrg5vUoU,958 +transformers/models/bartpho/__pycache__/__init__.cpython-310.pyc,, +transformers/models/bartpho/__pycache__/tokenization_bartpho.cpython-310.pyc,, +transformers/models/bartpho/tokenization_bartpho.py,sha256=nrzFWM36nFdiK33XQ6hkW2dY9XKRJD2VzsrXXzA7BtM,13556 +transformers/models/beit/__init__.py,sha256=1YMzI3uPQsE2JABveFTVSkuHE8DJkB_4RBWtyCkiHUI,1111 +transformers/models/beit/__pycache__/__init__.cpython-310.pyc,, +transformers/models/beit/__pycache__/configuration_beit.cpython-310.pyc,, +transformers/models/beit/__pycache__/convert_beit_unilm_to_pytorch.cpython-310.pyc,, +transformers/models/beit/__pycache__/feature_extraction_beit.cpython-310.pyc,, +transformers/models/beit/__pycache__/image_processing_beit.cpython-310.pyc,, +transformers/models/beit/__pycache__/modeling_beit.cpython-310.pyc,, +transformers/models/beit/__pycache__/modeling_flax_beit.cpython-310.pyc,, +transformers/models/beit/configuration_beit.py,sha256=L9kQi9U7uHwvUieA8VhbjrHCi7PgdPmu1dHypyFy3x8,11593 +transformers/models/beit/convert_beit_unilm_to_pytorch.py,sha256=Pdqslow71bJYQioU21U5rWEjXSQC4z_dOnVgKfPxWYI,16601 +transformers/models/beit/feature_extraction_beit.py,sha256=_dd_Cd95PPiQQDd5lT9pAybrUhdiqHZPA6I2c54y8S4,1209 +transformers/models/beit/image_processing_beit.py,sha256=SVy2cgWTwZMReR8sEmiFs5t_e3os9MS_UAzX6zOraxs,24451 +transformers/models/beit/modeling_beit.py,sha256=xuDCjF-wUmdJ4yaNVVcmvtv7c0SEkZf2JeTE_qeZpHE,69874 +transformers/models/beit/modeling_flax_beit.py,sha256=P2tedtc4boQ-MRDzBBvi3gjbX621Q_x_J1ohU3icUMU,37140 +transformers/models/bert/__init__.py,sha256=8IqoRT5cO4DU3GmQHsJgW-n6MclOZTmho5VYkKDMbnU,1182 +transformers/models/bert/__pycache__/__init__.cpython-310.pyc,, +transformers/models/bert/__pycache__/configuration_bert.cpython-310.pyc,, +transformers/models/bert/__pycache__/convert_bert_original_tf2_checkpoint_to_pytorch.cpython-310.pyc,, +transformers/models/bert/__pycache__/convert_bert_original_tf_checkpoint_to_pytorch.cpython-310.pyc,, +transformers/models/bert/__pycache__/convert_bert_pytorch_checkpoint_to_original_tf.cpython-310.pyc,, +transformers/models/bert/__pycache__/convert_bert_token_dropping_original_tf2_checkpoint_to_pytorch.cpython-310.pyc,, +transformers/models/bert/__pycache__/modeling_bert.cpython-310.pyc,, +transformers/models/bert/__pycache__/modeling_flax_bert.cpython-310.pyc,, +transformers/models/bert/__pycache__/modeling_tf_bert.cpython-310.pyc,, +transformers/models/bert/__pycache__/tokenization_bert.cpython-310.pyc,, +transformers/models/bert/__pycache__/tokenization_bert_fast.cpython-310.pyc,, +transformers/models/bert/__pycache__/tokenization_bert_tf.cpython-310.pyc,, +transformers/models/bert/configuration_bert.py,sha256=YoCFuobD5d-xaBesGmhyTrS_2Po5DIdpEBGXEWQZ1nw,7289 +transformers/models/bert/convert_bert_original_tf2_checkpoint_to_pytorch.py,sha256=q7RA_4S9OkAxFdmmGUV_Lf9qKYoRshJJbQMAHxscsRU,10491 +transformers/models/bert/convert_bert_original_tf_checkpoint_to_pytorch.py,sha256=eSJs7TBQPBfZBPAmBJ7L8JKqKWBewvWKsnVbklmHkNc,2158 +transformers/models/bert/convert_bert_pytorch_checkpoint_to_original_tf.py,sha256=6nISsCdgO_sJFFiLpnkGGsmTqC9Yp-gzDPDM-EafVXA,4112 +transformers/models/bert/convert_bert_token_dropping_original_tf2_checkpoint_to_pytorch.py,sha256=ksipaccVHXMHrGrNJp_zAHemziQvYMMcG7fzIo00DQw,7607 +transformers/models/bert/modeling_bert.py,sha256=FN_oiUWPabpepo5ko-N39kwVAQg6Rl9j8T108AASqzo,90053 +transformers/models/bert/modeling_flax_bert.py,sha256=d7XmXxyiccYVK5Ya-3yWmdYZ9uCNISWRgZVJW-CoU68,64013 +transformers/models/bert/modeling_tf_bert.py,sha256=5NgXMUeMaXoAP0Yrb7oA23zsAUJm5uKLc8x5SKfGABc,94661 +transformers/models/bert/tokenization_bert.py,sha256=IXUVFnhEH-lSdEpV0wm9regp3K5Ja_H_2pDpPp-NCI8,20915 +transformers/models/bert/tokenization_bert_fast.py,sha256=sxlyN7qodKx3BoKxkmrhKnySNtlc04FQM1OcHp7S4sA,7686 +transformers/models/bert/tokenization_bert_tf.py,sha256=p6LfPErF6Hm2kVa6OBqNkWkkwuuCFje-WqX4TaVdICw,11927 +transformers/models/bert_generation/__init__.py,sha256=sLEyyFf2yI6QflP1lTI9LXUF5PvWBvu-fsaFbjund5I,1059 +transformers/models/bert_generation/__pycache__/__init__.cpython-310.pyc,, +transformers/models/bert_generation/__pycache__/configuration_bert_generation.cpython-310.pyc,, +transformers/models/bert_generation/__pycache__/modeling_bert_generation.cpython-310.pyc,, +transformers/models/bert_generation/__pycache__/tokenization_bert_generation.cpython-310.pyc,, +transformers/models/bert_generation/configuration_bert_generation.py,sha256=OknGKh0MkhqzzbPRXJO_-CNMVURP3OnRmUOa5Y0NIFw,6377 +transformers/models/bert_generation/modeling_bert_generation.py,sha256=8HhxcpP8j1E6GdIm_17T-I5BH6Q1PGuzf86CxPFKQVU,47635 +transformers/models/bert_generation/tokenization_bert_generation.py,sha256=JJXKV2CzPIxkKm1WErhm1LF4DA-_UPZ1-g3rLz5L7ak,7116 +transformers/models/bert_japanese/__init__.py,sha256=94xfgVPnIQuHQxvmc55_EedJlJQTnHiL4va6Ry6x3LE,964 +transformers/models/bert_japanese/__pycache__/__init__.cpython-310.pyc,, +transformers/models/bert_japanese/__pycache__/tokenization_bert_japanese.cpython-310.pyc,, +transformers/models/bert_japanese/tokenization_bert_japanese.py,sha256=3b-_Oyf0_1Fw6N7lJEcZ6w0p6Qse4JqRyIETR4vLqaw,39086 +transformers/models/bertweet/__init__.py,sha256=EZegs0rWTTCiOC_eY-M8eV7bCcwU60dB0HsM1S1VDzQ,959 +transformers/models/bertweet/__pycache__/__init__.cpython-310.pyc,, +transformers/models/bertweet/__pycache__/tokenization_bertweet.cpython-310.pyc,, +transformers/models/bertweet/tokenization_bertweet.py,sha256=xysk7JjQ_Q1faNzHYNy6kHqYZiR3gMgG1VeknOYTZCk,27020 +transformers/models/big_bird/__init__.py,sha256=3rloOuQNKURURWgk5Td4OBQBAzBdTJ2_fM_CI6yPrV0,1126 +transformers/models/big_bird/__pycache__/__init__.cpython-310.pyc,, +transformers/models/big_bird/__pycache__/configuration_big_bird.cpython-310.pyc,, +transformers/models/big_bird/__pycache__/convert_bigbird_original_tf_checkpoint_to_pytorch.cpython-310.pyc,, +transformers/models/big_bird/__pycache__/modeling_big_bird.cpython-310.pyc,, +transformers/models/big_bird/__pycache__/modeling_flax_big_bird.cpython-310.pyc,, +transformers/models/big_bird/__pycache__/tokenization_big_bird.cpython-310.pyc,, +transformers/models/big_bird/__pycache__/tokenization_big_bird_fast.cpython-310.pyc,, +transformers/models/big_bird/configuration_big_bird.py,sha256=e2xRL0lOIxVdUY3bXVcRJEXBM2bWWIavrx9-BXYKyoc,7883 +transformers/models/big_bird/convert_bigbird_original_tf_checkpoint_to_pytorch.py,sha256=qWXzJVxALd_0AYy1OETdK7LuVZFICtSiqv2vtv04zE0,2492 +transformers/models/big_bird/modeling_big_bird.py,sha256=7RcKPeVmrV6feIrl6ap9vFuKJCMM2yOP7SQXopAe79A,141859 +transformers/models/big_bird/modeling_flax_big_bird.py,sha256=9UqJX4gIWm4vYPQTA4ZwMZHyMRQ2EO5y3AeAMUJ2vFk,109837 +transformers/models/big_bird/tokenization_big_bird.py,sha256=dcrTNu1L4weScAFig7pY1MlQx8ctDF-RBdaUt2NJ55U,14250 +transformers/models/big_bird/tokenization_big_bird_fast.py,sha256=ZXRIpdmUAEcjUZgsC9PBOutjKdrtI23Tg4T_gtMhvkM,10203 +transformers/models/bigbird_pegasus/__init__.py,sha256=7zOl1EhO8W2S9jE0FsyEoW8kV6yn5bLA0dspGFM1mLQ,1011 +transformers/models/bigbird_pegasus/__pycache__/__init__.cpython-310.pyc,, +transformers/models/bigbird_pegasus/__pycache__/configuration_bigbird_pegasus.cpython-310.pyc,, +transformers/models/bigbird_pegasus/__pycache__/convert_bigbird_pegasus_tf_to_pytorch.cpython-310.pyc,, +transformers/models/bigbird_pegasus/__pycache__/modeling_bigbird_pegasus.cpython-310.pyc,, +transformers/models/bigbird_pegasus/configuration_bigbird_pegasus.py,sha256=7BzPOXq8tnl_F4NpElUo3RRoA8nKFS-V7KKbjzFrf3U,19280 +transformers/models/bigbird_pegasus/convert_bigbird_pegasus_tf_to_pytorch.py,sha256=Wc7aoNvtzxt-DPi655Kl30CgDgq_hp08psISb8dWpLU,6288 +transformers/models/bigbird_pegasus/modeling_bigbird_pegasus.py,sha256=gPD4JOlWfm7_KUOmNDScyoYL0smk0u7oYtwJUDpYA6w,144738 +transformers/models/biogpt/__init__.py,sha256=pZxVjmVzt7FXlkMO_5fMg01eyPvvHYXmDA33MKhp6Yk,1032 +transformers/models/biogpt/__pycache__/__init__.cpython-310.pyc,, +transformers/models/biogpt/__pycache__/configuration_biogpt.cpython-310.pyc,, +transformers/models/biogpt/__pycache__/convert_biogpt_original_pytorch_checkpoint_to_pytorch.cpython-310.pyc,, +transformers/models/biogpt/__pycache__/modeling_biogpt.cpython-310.pyc,, +transformers/models/biogpt/__pycache__/tokenization_biogpt.cpython-310.pyc,, +transformers/models/biogpt/configuration_biogpt.py,sha256=Kgvu5gVwfYih2d9UWbreENitXZoobGrXobqKq5zYVI0,6207 +transformers/models/biogpt/convert_biogpt_original_pytorch_checkpoint_to_pytorch.py,sha256=5zNYzaEy7QPc99LCHTcofXSCI3tr0pzlIpFpwT1ZgN0,10578 +transformers/models/biogpt/modeling_biogpt.py,sha256=eBdqtu_Ee7l4YvYwfQMId_TKuIQkbkzIoQ7AhABYKCc,47374 +transformers/models/biogpt/tokenization_biogpt.py,sha256=GZvyhnQtqOEJCRk7yac2nu6aGRA8f7fpOXFFgzF4-Xk,13289 +transformers/models/bit/__init__.py,sha256=q_q1ZLJq0Jzh1ahQSktKHMESHkXB-DPw4f9Z0YCFfAs,1027 +transformers/models/bit/__pycache__/__init__.cpython-310.pyc,, +transformers/models/bit/__pycache__/configuration_bit.cpython-310.pyc,, +transformers/models/bit/__pycache__/convert_bit_to_pytorch.cpython-310.pyc,, +transformers/models/bit/__pycache__/image_processing_bit.cpython-310.pyc,, +transformers/models/bit/__pycache__/modeling_bit.cpython-310.pyc,, +transformers/models/bit/configuration_bit.py,sha256=W-9-GGA3bGMTLvOYI9qdqqBt1tUuxEIVw6LhsMcacKI,6295 +transformers/models/bit/convert_bit_to_pytorch.py,sha256=fheumYRIVQwZoZCWcl0btZh7LhandaEeAoS6itUmkwk,5954 +transformers/models/bit/image_processing_bit.py,sha256=tHE_vmyw1b9xN4k9O36lSWlflYlPT0fuUooZNt00z-c,15824 +transformers/models/bit/modeling_bit.py,sha256=7J0dnulFgWTp_VJHQOvJdCXZeOBxcu5FqFu7aFW5xWE,32290 +transformers/models/blenderbot/__init__.py,sha256=kdNRND4x54J18VhDVLH6usun5IblSN_9NYaLZfvaysc,1178 +transformers/models/blenderbot/__pycache__/__init__.cpython-310.pyc,, +transformers/models/blenderbot/__pycache__/configuration_blenderbot.cpython-310.pyc,, +transformers/models/blenderbot/__pycache__/convert_blenderbot_original_pytorch_checkpoint_to_pytorch.cpython-310.pyc,, +transformers/models/blenderbot/__pycache__/modeling_blenderbot.cpython-310.pyc,, +transformers/models/blenderbot/__pycache__/modeling_flax_blenderbot.cpython-310.pyc,, +transformers/models/blenderbot/__pycache__/modeling_tf_blenderbot.cpython-310.pyc,, +transformers/models/blenderbot/__pycache__/tokenization_blenderbot.cpython-310.pyc,, +transformers/models/blenderbot/__pycache__/tokenization_blenderbot_fast.cpython-310.pyc,, +transformers/models/blenderbot/configuration_blenderbot.py,sha256=GydvRzNQyXl0A3PjvK-AFwX2WMO0Isf-toZRgxzlrIs,18838 +transformers/models/blenderbot/convert_blenderbot_original_pytorch_checkpoint_to_pytorch.py,sha256=86QBWYTeyJvxMUOfxqmGHwpDneadfqbEGSujMYw3yuU,3702 +transformers/models/blenderbot/modeling_blenderbot.py,sha256=ms5XrPHXeEw0VJvjFjdFZLFz9rgRL04am___k_1Iq2o,74088 +transformers/models/blenderbot/modeling_flax_blenderbot.py,sha256=2x9zUXQBozM1Uezutghd6DNO8pOkpRAHoZjWpPUo8bM,65095 +transformers/models/blenderbot/modeling_tf_blenderbot.py,sha256=hi6rHin1N8aosd9Vdeg5dfG511vCWDNLN1NJDUBa1Qc,72799 +transformers/models/blenderbot/tokenization_blenderbot.py,sha256=B_znhMwieYtuw13YiNMULh-wD6Pxjz7J0FuVNiWV804,18238 +transformers/models/blenderbot/tokenization_blenderbot_fast.py,sha256=ggPHz4tIgPQ5lhuWljVgv3vjaCqXioR7wSrL4YFypG4,12461 +transformers/models/blenderbot_small/__init__.py,sha256=QsmmBSPdTC43EIyYBwo-xTyJjLLVqm4Cx-KFJ9O2mfE,1214 +transformers/models/blenderbot_small/__pycache__/__init__.cpython-310.pyc,, +transformers/models/blenderbot_small/__pycache__/configuration_blenderbot_small.cpython-310.pyc,, +transformers/models/blenderbot_small/__pycache__/modeling_blenderbot_small.cpython-310.pyc,, +transformers/models/blenderbot_small/__pycache__/modeling_flax_blenderbot_small.cpython-310.pyc,, +transformers/models/blenderbot_small/__pycache__/modeling_tf_blenderbot_small.cpython-310.pyc,, +transformers/models/blenderbot_small/__pycache__/tokenization_blenderbot_small.cpython-310.pyc,, +transformers/models/blenderbot_small/__pycache__/tokenization_blenderbot_small_fast.cpython-310.pyc,, +transformers/models/blenderbot_small/configuration_blenderbot_small.py,sha256=H-kA5YzspG0lpH3P3Gcjf3oVQG1VsFlh4jJsIFWwf04,18280 +transformers/models/blenderbot_small/modeling_blenderbot_small.py,sha256=6iec_zpplz9mydtEJws6U8y49xmHalPZ4UPDAR48XCE,72155 +transformers/models/blenderbot_small/modeling_flax_blenderbot_small.py,sha256=iJretreLSzUWtKtjBcl6tuELIPe2xLUHc9jJDBdXnxA,66085 +transformers/models/blenderbot_small/modeling_tf_blenderbot_small.py,sha256=gq6dvrnakKHtQ9UaWiOcRy5hWi5INvcLcrlwEUISw9k,71726 +transformers/models/blenderbot_small/tokenization_blenderbot_small.py,sha256=IiM31KtIzetAqyZ1wa835RqWUK0hDQpuuequqEvnv_4,7964 +transformers/models/blenderbot_small/tokenization_blenderbot_small_fast.py,sha256=yAG-4jJSkeQ_UPpSZhAN-20564BHMVWaNyELOx-HvNc,3367 +transformers/models/blip/__init__.py,sha256=TIMM52D2RAem0Uqsajw2yfIw92yyCsNaP2EQcf_F634,1101 +transformers/models/blip/__pycache__/__init__.cpython-310.pyc,, +transformers/models/blip/__pycache__/configuration_blip.cpython-310.pyc,, +transformers/models/blip/__pycache__/convert_blip_original_pytorch_to_hf.cpython-310.pyc,, +transformers/models/blip/__pycache__/image_processing_blip.cpython-310.pyc,, +transformers/models/blip/__pycache__/modeling_blip.cpython-310.pyc,, +transformers/models/blip/__pycache__/modeling_blip_text.cpython-310.pyc,, +transformers/models/blip/__pycache__/modeling_tf_blip.cpython-310.pyc,, +transformers/models/blip/__pycache__/modeling_tf_blip_text.cpython-310.pyc,, +transformers/models/blip/__pycache__/processing_blip.cpython-310.pyc,, +transformers/models/blip/configuration_blip.py,sha256=hUGGYwHl_oBTMlU8GiQJTPpuEHewx9fLSKzWdTRz7oI,14894 +transformers/models/blip/convert_blip_original_pytorch_to_hf.py,sha256=7vL8HN4EsJMkUJt74p2nrFVvwRLmKmd43KJAxmEeGi0,6970 +transformers/models/blip/image_processing_blip.py,sha256=0ZqSWMiV4z_gWFSml_jLrLziOMohV853gY1GJ4nI4RM,15260 +transformers/models/blip/modeling_blip.py,sha256=IBM8R4QDyy87E9XpYmpYWfug6JlqHo46XDOjqxyPYEk,68848 +transformers/models/blip/modeling_blip_text.py,sha256=4eNmYG-mZBIZBbIsjKQrCgMzt2u-XuzP1E__MwPKdZ4,44175 +transformers/models/blip/modeling_tf_blip.py,sha256=qC3slLAjZ04QzFdU7Jb0ZS0kCVWbB78Doy5_odfuJFg,71531 +transformers/models/blip/modeling_tf_blip_text.py,sha256=iJiYcnZpqJhoNrfUcxPxtokT_qMJGgLyz1hAcAWZ-t4,49972 +transformers/models/blip/processing_blip.py,sha256=kUE-NeeIu3K2XB349QXD868os29SshunVUFg0_QfuGg,5897 +transformers/models/blip_2/__init__.py,sha256=kj_6H0rQ7dLoQk-COIb06LlDRnbORu3GLU3m4EdMkAM,1030 +transformers/models/blip_2/__pycache__/__init__.cpython-310.pyc,, +transformers/models/blip_2/__pycache__/configuration_blip_2.cpython-310.pyc,, +transformers/models/blip_2/__pycache__/convert_blip_2_original_to_pytorch.cpython-310.pyc,, +transformers/models/blip_2/__pycache__/modeling_blip_2.cpython-310.pyc,, +transformers/models/blip_2/__pycache__/processing_blip_2.cpython-310.pyc,, +transformers/models/blip_2/configuration_blip_2.py,sha256=e3gM_SZBkZpQ9tsKEmD2cI7FiFPXzEzz0_K_K3TS2Js,16012 +transformers/models/blip_2/convert_blip_2_original_to_pytorch.py,sha256=ryGFvGN2KlEJZXOzASltPNfhHzO_5EpbaphgBoYRsE4,16781 +transformers/models/blip_2/modeling_blip_2.py,sha256=00yosdp8Pf0mfw_1tT2-dtdbA--dth_ANabmAuimhnA,114516 +transformers/models/blip_2/processing_blip_2.py,sha256=3NlrZ6tBUOr76PLvYGS-OQGmpIViqPOx9HF6jGfEjnk,8901 +transformers/models/bloom/__init__.py,sha256=lcq09Py2vSezUf26aaBG4yp2DpLZ-mAPt-fybvY_C-Q,1073 +transformers/models/bloom/__pycache__/__init__.cpython-310.pyc,, +transformers/models/bloom/__pycache__/configuration_bloom.cpython-310.pyc,, +transformers/models/bloom/__pycache__/convert_bloom_original_checkpoint_to_pytorch.cpython-310.pyc,, +transformers/models/bloom/__pycache__/modeling_bloom.cpython-310.pyc,, +transformers/models/bloom/__pycache__/modeling_flax_bloom.cpython-310.pyc,, +transformers/models/bloom/__pycache__/tokenization_bloom_fast.cpython-310.pyc,, +transformers/models/bloom/configuration_bloom.py,sha256=owJZi9R6fqbCRNhKkQrhLRJaSwoV7hmUzgf6FMtsGIQ,10185 +transformers/models/bloom/convert_bloom_original_checkpoint_to_pytorch.py,sha256=WtPFsgC47dhGDW6dHm5PC4SaZgZ6tF3umTFd8Qiw-1Q,10301 +transformers/models/bloom/modeling_bloom.py,sha256=3wGC_JG8mPuzR41bGMOdk-x5-u0STVwdJigth2GcDD8,61998 +transformers/models/bloom/modeling_flax_bloom.py,sha256=Qch_LuXgy8nefmlUShUQqmDsEI3L6YlZwmLZYq7zKRg,30175 +transformers/models/bloom/tokenization_bloom_fast.py,sha256=csCeZyW8locaAry8KnXq88_TDX9LXiJkDYyXq2vt_C4,6284 +transformers/models/bridgetower/__init__.py,sha256=hZiDmqf2UAFUr0EY0b0ldkNXZvNx9YTLW64qTi2tIf8,1093 +transformers/models/bridgetower/__pycache__/__init__.cpython-310.pyc,, +transformers/models/bridgetower/__pycache__/configuration_bridgetower.cpython-310.pyc,, +transformers/models/bridgetower/__pycache__/image_processing_bridgetower.cpython-310.pyc,, +transformers/models/bridgetower/__pycache__/modeling_bridgetower.cpython-310.pyc,, +transformers/models/bridgetower/__pycache__/processing_bridgetower.cpython-310.pyc,, +transformers/models/bridgetower/configuration_bridgetower.py,sha256=06gk7a1fWLsQgAC29FRmFypnRGq1dPHZm6cF2lU8hEE,14876 +transformers/models/bridgetower/image_processing_bridgetower.py,sha256=u9uq96e0-CLXCEbXiXcR2V92xquNbKm4w1risyPXCcc,26333 +transformers/models/bridgetower/modeling_bridgetower.py,sha256=sBQ0s08HA8K6fDFqFnPjoo4ojsFTCX_VAk34VBdp8Go,91575 +transformers/models/bridgetower/processing_bridgetower.py,sha256=4vAYym1IHDu91HR_6wfwjhCjgQW0XAHGwnGoO9au5wo,4437 +transformers/models/bros/__init__.py,sha256=wT0avJ_J50-WK6jOB-6UbgN5kjHiBwG-NNT_iefMXr8,1024 +transformers/models/bros/__pycache__/__init__.cpython-310.pyc,, +transformers/models/bros/__pycache__/configuration_bros.cpython-310.pyc,, +transformers/models/bros/__pycache__/convert_bros_to_pytorch.cpython-310.pyc,, +transformers/models/bros/__pycache__/modeling_bros.cpython-310.pyc,, +transformers/models/bros/__pycache__/processing_bros.cpython-310.pyc,, +transformers/models/bros/configuration_bros.py,sha256=9Vgmvk3hZ-VccsOGhB8OlUPjM5ojPufSIBHa2oY4I5I,6418 +transformers/models/bros/convert_bros_to_pytorch.py,sha256=kxZDGzvIYxz9hbIzzJOfOj5tixji5efb2884rqwoY6A,4871 +transformers/models/bros/modeling_bros.py,sha256=ZkjTI0jW2EwEaP5Ifb3QCnvPol4KYk9daaMcOZ0LC60,58014 +transformers/models/bros/processing_bros.py,sha256=QnQHmepnVnGsg4lsL681-uU6LOIV1R7gzCUE49_MmKY,4223 +transformers/models/byt5/__init__.py,sha256=O7yXvHyqMZ7stkKX67knnddmJ81pPHoKrY_7NCAauU4,955 +transformers/models/byt5/__pycache__/__init__.cpython-310.pyc,, +transformers/models/byt5/__pycache__/convert_byt5_original_tf_checkpoint_to_pytorch.cpython-310.pyc,, +transformers/models/byt5/__pycache__/tokenization_byt5.cpython-310.pyc,, +transformers/models/byt5/convert_byt5_original_tf_checkpoint_to_pytorch.py,sha256=LEibHPdlDdKdyB6XHB5s7pHRsqB5qQxUWN93H8G_q5k,2119 +transformers/models/byt5/tokenization_byt5.py,sha256=y8G5Y-aBmTHkXoEsRcUjtmcatuOqW8ekfXagoyGU9Jg,10059 +transformers/models/camembert/__init__.py,sha256=hfxYgJYchvXLwio03yWsATGmrU2hgKOoiw7gaNoVgj8,1129 +transformers/models/camembert/__pycache__/__init__.cpython-310.pyc,, +transformers/models/camembert/__pycache__/configuration_camembert.cpython-310.pyc,, +transformers/models/camembert/__pycache__/modeling_camembert.cpython-310.pyc,, +transformers/models/camembert/__pycache__/modeling_tf_camembert.cpython-310.pyc,, +transformers/models/camembert/__pycache__/tokenization_camembert.cpython-310.pyc,, +transformers/models/camembert/__pycache__/tokenization_camembert_fast.cpython-310.pyc,, +transformers/models/camembert/configuration_camembert.py,sha256=pOCkwK5-87daixnYS6NMfjWCcTXGvKDGdUE2rpJlrQ0,7404 +transformers/models/camembert/modeling_camembert.py,sha256=4CqUdopM5wBrHabKBACDcb_gOK9geuOMkkxsatKlFaA,79443 +transformers/models/camembert/modeling_tf_camembert.py,sha256=2_KMeWITCbmVZwwPGNEzHtwK5CA3UGZkiE18r5TUWpg,81824 +transformers/models/camembert/tokenization_camembert.py,sha256=juhJAgm9-qC_ajVPIQ5EErOmx35Up2sTtacuPFrsFI0,14011 +transformers/models/camembert/tokenization_camembert_fast.py,sha256=eYU24_qNnpU4cNGjHe6okOFAR_wDux3CTMNaSDXxybg,8311 +transformers/models/canine/__init__.py,sha256=ThkEqO6wPzWCnAplx0EWCUqVaKKsNYQKXQhWfTblEBU,1032 +transformers/models/canine/__pycache__/__init__.cpython-310.pyc,, +transformers/models/canine/__pycache__/configuration_canine.cpython-310.pyc,, +transformers/models/canine/__pycache__/convert_canine_original_tf_checkpoint_to_pytorch.cpython-310.pyc,, +transformers/models/canine/__pycache__/modeling_canine.cpython-310.pyc,, +transformers/models/canine/__pycache__/tokenization_canine.cpython-310.pyc,, +transformers/models/canine/configuration_canine.py,sha256=8Rlt-y-lkY4Jwzi4Aa7NXN4TJtDoQylbogUOjt_q9IA,6584 +transformers/models/canine/convert_canine_original_tf_checkpoint_to_pytorch.py,sha256=zJ6VDpE58I4-ntOXDUCKPYXPnnhWkuYXCfejDchr9jY,2116 +transformers/models/canine/modeling_canine.py,sha256=sW1c8QHN_nTGNGXvnvpecZLisHKZ3KXqyV_Z5kWLz9E,73650 +transformers/models/canine/tokenization_canine.py,sha256=pGRq1iGZxZxLfSibFmxa7sHqZlb8YGcS0UgiD3us9qc,9319 +transformers/models/chameleon/__init__.py,sha256=5XR1fyLUHtxc-PLFlPnqT7pSsaihK9f4mBOJn-YhjY8,1085 +transformers/models/chameleon/__pycache__/__init__.cpython-310.pyc,, +transformers/models/chameleon/__pycache__/configuration_chameleon.cpython-310.pyc,, +transformers/models/chameleon/__pycache__/convert_chameleon_weights_to_hf.cpython-310.pyc,, +transformers/models/chameleon/__pycache__/image_processing_chameleon.cpython-310.pyc,, +transformers/models/chameleon/__pycache__/modeling_chameleon.cpython-310.pyc,, +transformers/models/chameleon/__pycache__/processing_chameleon.cpython-310.pyc,, +transformers/models/chameleon/configuration_chameleon.py,sha256=NYDmrkm3lunyWGuGEY7KkuGkhPRYgOgfCKdpOfECQNs,13293 +transformers/models/chameleon/convert_chameleon_weights_to_hf.py,sha256=Rh9cpW_iOQgTxdhVGBVvMgNRRn4eVMdXCljz9W9bv5Y,20301 +transformers/models/chameleon/image_processing_chameleon.py,sha256=7EvJ_aHkNkDbNIJkCHyDmvfj7gKcDfDVQViz7XYP8Ws,17541 +transformers/models/chameleon/modeling_chameleon.py,sha256=Zy_PEyWAIqzKxqWh9IFl_nr6WWrM0KaHRBznGuePEKw,77745 +transformers/models/chameleon/processing_chameleon.py,sha256=ooHM30q477U_IM5bqwboISA3UmE6ebue2UBBwbfYHaE,8497 +transformers/models/chinese_clip/__init__.py,sha256=HGJ0ZhGZrwe9RNBwbe0R66zbZJ-XwT6swCYjpFSYtOQ,1148 +transformers/models/chinese_clip/__pycache__/__init__.cpython-310.pyc,, +transformers/models/chinese_clip/__pycache__/configuration_chinese_clip.cpython-310.pyc,, +transformers/models/chinese_clip/__pycache__/convert_chinese_clip_original_pytorch_to_hf.cpython-310.pyc,, +transformers/models/chinese_clip/__pycache__/feature_extraction_chinese_clip.cpython-310.pyc,, +transformers/models/chinese_clip/__pycache__/image_processing_chinese_clip.cpython-310.pyc,, +transformers/models/chinese_clip/__pycache__/modeling_chinese_clip.cpython-310.pyc,, +transformers/models/chinese_clip/__pycache__/processing_chinese_clip.cpython-310.pyc,, +transformers/models/chinese_clip/configuration_chinese_clip.py,sha256=CGTZWWAMK93lLOJ3VzhRk8uiN3xUso_BWY8aVqmAyCo,20796 +transformers/models/chinese_clip/convert_chinese_clip_original_pytorch_to_hf.py,sha256=-0bnVcdXxStmygkyj6S1hIGCVbpEbe3cM7AoshHH5ZE,5069 +transformers/models/chinese_clip/feature_extraction_chinese_clip.py,sha256=37XWUOby8N6UWwE-t5tMiasS7V2q7cKyc-Jk1YfGqQU,1291 +transformers/models/chinese_clip/image_processing_chinese_clip.py,sha256=PJZgsxuxbhH0sXInjngESylhnf-IjRcT_mwKgzvFCxI,15383 +transformers/models/chinese_clip/modeling_chinese_clip.py,sha256=juVnpn5avrepmqr311YBdP6NBK4JGDeB5jJd93lhY1A,76467 +transformers/models/chinese_clip/processing_chinese_clip.py,sha256=__De516UXM9Mu2Yk2EvIvqVWTzYZ-Jy07psSl3cGDYw,7529 +transformers/models/clap/__init__.py,sha256=751udHbsD7FBLGAByjx_8Z4XPLly1MaQQ4wKN_9vbOY,1067 +transformers/models/clap/__pycache__/__init__.cpython-310.pyc,, +transformers/models/clap/__pycache__/configuration_clap.cpython-310.pyc,, +transformers/models/clap/__pycache__/convert_clap_original_pytorch_to_hf.cpython-310.pyc,, +transformers/models/clap/__pycache__/feature_extraction_clap.cpython-310.pyc,, +transformers/models/clap/__pycache__/modeling_clap.cpython-310.pyc,, +transformers/models/clap/__pycache__/processing_clap.cpython-310.pyc,, +transformers/models/clap/configuration_clap.py,sha256=cdJWJmdCsuoPDy-adWEkUt0OQB1-9oECdumFJptkpxg,18801 +transformers/models/clap/convert_clap_original_pytorch_to_hf.py,sha256=FqHoVAYXIzfUY9342azwlm9zfSP7QdS8p-u9Q6RE_K4,5149 +transformers/models/clap/feature_extraction_clap.py,sha256=h6FhovQGYHyXRrQ8GQQpkNBnphtlapawb4Be7A1dG3w,18728 +transformers/models/clap/modeling_clap.py,sha256=G21l3dtz78_mUM6gHlXgzKzwfDAvLMJvpb139J0TSfk,105025 +transformers/models/clap/processing_clap.py,sha256=YrxLfhmLXQGtSuj97x4TcROlSKJFIx5stdsV2G8xJkw,5708 +transformers/models/clip/__init__.py,sha256=djnvRECpVJwILtbE6Fr3UmCmc7wEqf8lDg739rz-10w,1261 +transformers/models/clip/__pycache__/__init__.cpython-310.pyc,, +transformers/models/clip/__pycache__/configuration_clip.cpython-310.pyc,, +transformers/models/clip/__pycache__/convert_clip_original_pytorch_to_hf.cpython-310.pyc,, +transformers/models/clip/__pycache__/feature_extraction_clip.cpython-310.pyc,, +transformers/models/clip/__pycache__/image_processing_clip.cpython-310.pyc,, +transformers/models/clip/__pycache__/modeling_clip.cpython-310.pyc,, +transformers/models/clip/__pycache__/modeling_flax_clip.cpython-310.pyc,, +transformers/models/clip/__pycache__/modeling_tf_clip.cpython-310.pyc,, +transformers/models/clip/__pycache__/processing_clip.cpython-310.pyc,, +transformers/models/clip/__pycache__/tokenization_clip.cpython-310.pyc,, +transformers/models/clip/__pycache__/tokenization_clip_fast.cpython-310.pyc,, +transformers/models/clip/configuration_clip.py,sha256=5Zk61KYs-OBHziSHh3IxVh6NY4GSCrIcmRXWlE85SgU,19353 +transformers/models/clip/convert_clip_original_pytorch_to_hf.py,sha256=tp5I6ZlOQgHl0SWoADVzYSBSdydSWmlquxtAkaXvaZU,5569 +transformers/models/clip/feature_extraction_clip.py,sha256=GoTgAxYidfx91DYnfLHHBPvx6oUt6LUNXwxlC5wkc8U,1209 +transformers/models/clip/image_processing_clip.py,sha256=xq6CbTauLhEy74dta2AcArSE3qcOHo2pbNjPXQsTlUk,16803 +transformers/models/clip/modeling_clip.py,sha256=OCrLIIvatF3X6Z1b4JsyXVaW_u36PFEbjHuVYaDAe_k,74256 +transformers/models/clip/modeling_flax_clip.py,sha256=oAXX_FiZ4bFKRWBAeaR-oJrKoUC4HEeUatMQFEo81v8,50748 +transformers/models/clip/modeling_tf_clip.py,sha256=GyQ_1ruFue6ygbJqXQR5SPKJoz_G86bVmlEnt8Bz7OA,60451 +transformers/models/clip/processing_clip.py,sha256=6_9lBf31K-vvgfLuwyJDb713lkmkxJw-MAJCqlFu1bg,7178 +transformers/models/clip/tokenization_clip.py,sha256=J1EFe-UhQPD7kgSfbDmjzfp-qaqc_pSr-hF0PsgQWR4,20606 +transformers/models/clip/tokenization_clip_fast.py,sha256=gTYUkNU1cHb_rIZo1OT0LVycdzzQ5BZWgvEoS0UJIZM,6780 +transformers/models/clipseg/__init__.py,sha256=12Y-b3sRDKM3Hy8-6rK4GUF2a91V1S3nLUF7559AALw,1033 +transformers/models/clipseg/__pycache__/__init__.cpython-310.pyc,, +transformers/models/clipseg/__pycache__/configuration_clipseg.cpython-310.pyc,, +transformers/models/clipseg/__pycache__/convert_clipseg_original_pytorch_to_hf.cpython-310.pyc,, +transformers/models/clipseg/__pycache__/modeling_clipseg.cpython-310.pyc,, +transformers/models/clipseg/__pycache__/processing_clipseg.cpython-310.pyc,, +transformers/models/clipseg/configuration_clipseg.py,sha256=leEXNq_YElCYMljja9STypU87jVv6plsvDM1seyGoqI,19353 +transformers/models/clipseg/convert_clipseg_original_pytorch_to_hf.py,sha256=kYyPxdpdtt6nSxD65tXUTMbN0xPyyzjfTOOMbQ8OL0Y,11114 +transformers/models/clipseg/modeling_clipseg.py,sha256=BMEHEIkzckeEOJjWIUyu6kFQkYCLOBsR0QCjS2TKSAU,66924 +transformers/models/clipseg/processing_clipseg.py,sha256=MHt8JRKlVNDm0f-LQ6OVpjl29BP-OmyYG-VC1SsYurA,7823 +transformers/models/clvp/__init__.py,sha256=RRnPofxkr_llgSxCP9tcAhu3xCR7E_m1PkrHv7KLMzo,1104 +transformers/models/clvp/__pycache__/__init__.cpython-310.pyc,, +transformers/models/clvp/__pycache__/configuration_clvp.cpython-310.pyc,, +transformers/models/clvp/__pycache__/convert_clvp_to_hf.cpython-310.pyc,, +transformers/models/clvp/__pycache__/feature_extraction_clvp.cpython-310.pyc,, +transformers/models/clvp/__pycache__/modeling_clvp.cpython-310.pyc,, +transformers/models/clvp/__pycache__/number_normalizer.cpython-310.pyc,, +transformers/models/clvp/__pycache__/processing_clvp.cpython-310.pyc,, +transformers/models/clvp/__pycache__/tokenization_clvp.cpython-310.pyc,, +transformers/models/clvp/configuration_clvp.py,sha256=EfqfAknfrnwO-zFl1GvVXKRfgInH1K3Sb15-GHwDQT0,20327 +transformers/models/clvp/convert_clvp_to_hf.py,sha256=1WYf_vwj1CeQ_VU9iMqu7Grr_MmlAsaKEK1Lojk6yM4,9326 +transformers/models/clvp/feature_extraction_clvp.py,sha256=vEVLOy3-m2GdU0nogQFKKjOhxnN5t-XWrq165_sLZ4c,10984 +transformers/models/clvp/modeling_clvp.py,sha256=8h0O00Po_MUHv2Qf6cvV0JsPkvEqGCGfr7z7np5_34c,91466 +transformers/models/clvp/number_normalizer.py,sha256=lW1MjRY8PDAWjWLA-S2Fk-LVWaqkmBVCACmF2765Vps,8856 +transformers/models/clvp/processing_clvp.py,sha256=r1KiLMS0NBJiL7v9qV02oMAL-uj2xugGQBGDMRMhYk8,3634 +transformers/models/clvp/tokenization_clvp.py,sha256=6cWue08VkLPT4Gi5BNHbMHB8IwZ1-17ApeDLa2QTi74,14830 +transformers/models/code_llama/__init__.py,sha256=aZJA9qTifG-RGtJKMzfspfxuQkaBryVva7Ah_uGNMoM,1009 +transformers/models/code_llama/__pycache__/__init__.cpython-310.pyc,, +transformers/models/code_llama/__pycache__/tokenization_code_llama.cpython-310.pyc,, +transformers/models/code_llama/__pycache__/tokenization_code_llama_fast.cpython-310.pyc,, +transformers/models/code_llama/tokenization_code_llama.py,sha256=O0UlJtd4021xMUo-dZ4PsXkjCKDeoddW_r5RKvhARa8,19251 +transformers/models/code_llama/tokenization_code_llama_fast.py,sha256=AyExiCYeWK2A3stxBwN-fWltMH5V3auxCRN-Ha9MFmo,16054 +transformers/models/codegen/__init__.py,sha256=NeUIbS8szfu5R9-7CX_G6730RHOODzTfmrapJH2ApMk,1080 +transformers/models/codegen/__pycache__/__init__.cpython-310.pyc,, +transformers/models/codegen/__pycache__/configuration_codegen.cpython-310.pyc,, +transformers/models/codegen/__pycache__/modeling_codegen.cpython-310.pyc,, +transformers/models/codegen/__pycache__/tokenization_codegen.cpython-310.pyc,, +transformers/models/codegen/__pycache__/tokenization_codegen_fast.cpython-310.pyc,, +transformers/models/codegen/configuration_codegen.py,sha256=ntWOJE0XkxKxFY2eR95JCN5QlO-xVXbBksmLxx-FKiw,9543 +transformers/models/codegen/modeling_codegen.py,sha256=nBNUcSKq4NL4mVQ27PJzdMgg7ffYRuAUDxz-d0BIjGw,36727 +transformers/models/codegen/tokenization_codegen.py,sha256=5w0lSfqPVuixyH3Z71hvToC2ueFb0MPloGW_nVl1QMM,16563 +transformers/models/codegen/tokenization_codegen_fast.py,sha256=mthH0R-VI8IohKpVhHZNarxpoPnWKUwjY5HO4efWJ4o,10966 +transformers/models/cohere/__init__.py,sha256=1Tg-6WGc5wgGduSR__N-jGZvPje9kNs92DW78vN0Auo,1037 +transformers/models/cohere/__pycache__/__init__.cpython-310.pyc,, +transformers/models/cohere/__pycache__/configuration_cohere.cpython-310.pyc,, +transformers/models/cohere/__pycache__/modeling_cohere.cpython-310.pyc,, +transformers/models/cohere/__pycache__/modular_cohere.cpython-310.pyc,, +transformers/models/cohere/__pycache__/tokenization_cohere_fast.cpython-310.pyc,, +transformers/models/cohere/configuration_cohere.py,sha256=dfUudDYlI23YXb_io5qUtioJWPquG-lf5uZjbULzCYQ,10573 +transformers/models/cohere/modeling_cohere.py,sha256=67gO6EUFnRiAgVwogF092-7yB1mDoowAoKnELqruSFc,41961 +transformers/models/cohere/modular_cohere.py,sha256=s8XdcvXMeY3ZCg5R_OAHNByRwY1dtPWr5yqOW2VLYb0,17769 +transformers/models/cohere/tokenization_cohere_fast.py,sha256=MZ6rH4mYvCcyjq-JkP9weuqMOiNn4t_xcdX6fkdwLzE,28901 +transformers/models/cohere2/__init__.py,sha256=6Cx_c-uTSNopbO3NLWCgMmEB2-5hzkrunUWmMrb8YSU,1011 +transformers/models/cohere2/__pycache__/__init__.cpython-310.pyc,, +transformers/models/cohere2/__pycache__/configuration_cohere2.cpython-310.pyc,, +transformers/models/cohere2/__pycache__/modeling_cohere2.cpython-310.pyc,, +transformers/models/cohere2/__pycache__/modular_cohere2.cpython-310.pyc,, +transformers/models/cohere2/configuration_cohere2.py,sha256=dGtmibUSwhMgmmDwUMs4VK3PtO1WF9vhUAe4s945Go0,11628 +transformers/models/cohere2/modeling_cohere2.py,sha256=3bsi9EzflVKy0idLhgI4sv0YaOfqQg5r61kd1G_8p4M,44517 +transformers/models/cohere2/modular_cohere2.py,sha256=bCQbzAFqyAXE2-7qPcj6aU0v1CmcWHHlQ5eT1i7aW_U,28613 +transformers/models/colpali/__init__.py,sha256=eG-nOojo-DPkgZJACn6hbJqqfnGE97uKmLkpWVin66A,1033 +transformers/models/colpali/__pycache__/__init__.cpython-310.pyc,, +transformers/models/colpali/__pycache__/configuration_colpali.cpython-310.pyc,, +transformers/models/colpali/__pycache__/convert_colpali_weights_to_hf.cpython-310.pyc,, +transformers/models/colpali/__pycache__/modeling_colpali.cpython-310.pyc,, +transformers/models/colpali/__pycache__/modular_colpali.cpython-310.pyc,, +transformers/models/colpali/__pycache__/processing_colpali.cpython-310.pyc,, +transformers/models/colpali/configuration_colpali.py,sha256=Opz4MSjq2v5n81qocw4zEdWsHFQky5zZAXbUu9g0ES8,4517 +transformers/models/colpali/convert_colpali_weights_to_hf.py,sha256=HKzFnpjMh29l1exjW3FCgWir2I9W-eHq_Yiz5k3FVcU,7737 +transformers/models/colpali/modeling_colpali.py,sha256=cLFKQ3Vyj_PSUZx1bAfogHnXJ63DmDR-StMzhDKJd3s,13501 +transformers/models/colpali/modular_colpali.py,sha256=AxJYOYBfow5ALn0okaTP7FdfSOM76ZHWp9qN9OaVnQ8,15887 +transformers/models/colpali/processing_colpali.py,sha256=Tn92XJdEkJRUbsfbZ4_CkKjXDzgIcs3uKseiyNCtt80,20396 +transformers/models/conditional_detr/__init__.py,sha256=hJC-1k8x4oSJK6HtMlMOIubpqnRRg1LCUsrwW9ta61g,1121 +transformers/models/conditional_detr/__pycache__/__init__.cpython-310.pyc,, +transformers/models/conditional_detr/__pycache__/configuration_conditional_detr.cpython-310.pyc,, +transformers/models/conditional_detr/__pycache__/convert_conditional_detr_original_pytorch_checkpoint_to_pytorch.cpython-310.pyc,, +transformers/models/conditional_detr/__pycache__/feature_extraction_conditional_detr.cpython-310.pyc,, +transformers/models/conditional_detr/__pycache__/image_processing_conditional_detr.cpython-310.pyc,, +transformers/models/conditional_detr/__pycache__/modeling_conditional_detr.cpython-310.pyc,, +transformers/models/conditional_detr/configuration_conditional_detr.py,sha256=NxpaHN0GK_PgtwOQFDNqrkiGKKrR0CoeIXtO94EYRuU,13352 +transformers/models/conditional_detr/convert_conditional_detr_original_pytorch_checkpoint_to_pytorch.py,sha256=pECO3PVooqZicWn5ycbfTg69C0oicbbSigv55fVCwIM,15929 +transformers/models/conditional_detr/feature_extraction_conditional_detr.py,sha256=K0Zp0cIuPo_oL56ltJ22I7K8EMeM9TQIkYG189WeP4c,1601 +transformers/models/conditional_detr/image_processing_conditional_detr.py,sha256=_-r-dxXfSd4wfbmIWq_-S6tdJb0ta8nVz3Rd-VTFeVA,85774 +transformers/models/conditional_detr/modeling_conditional_detr.py,sha256=kqmb5Qa-kONHxc40CqfkeJdEkm9wnR28sj-aZvkV0mg,103377 +transformers/models/convbert/__init__.py,sha256=x1Rv5-rurTKFifp3w8N_CNcZ3sHvuFwqpw_Zn1BAenw,1124 +transformers/models/convbert/__pycache__/__init__.cpython-310.pyc,, +transformers/models/convbert/__pycache__/configuration_convbert.cpython-310.pyc,, +transformers/models/convbert/__pycache__/convert_convbert_original_tf1_checkpoint_to_pytorch_and_tf2.cpython-310.pyc,, +transformers/models/convbert/__pycache__/modeling_convbert.cpython-310.pyc,, +transformers/models/convbert/__pycache__/modeling_tf_convbert.cpython-310.pyc,, +transformers/models/convbert/__pycache__/tokenization_convbert.cpython-310.pyc,, +transformers/models/convbert/__pycache__/tokenization_convbert_fast.cpython-310.pyc,, +transformers/models/convbert/configuration_convbert.py,sha256=hsAfVzzvceAyeDelaOZWOIF1yZqEWfVD_KzVXAXplWA,6886 +transformers/models/convbert/convert_convbert_original_tf1_checkpoint_to_pytorch_and_tf2.py,sha256=vTZyGhG9v7o4rDuP9-xM26gX1EzlCda7Sn_ELT9n3Gk,2108 +transformers/models/convbert/modeling_convbert.py,sha256=sHzeIBomOQzgR9CWllf3S54YiiJnz6hom5c6wWvqNJc,58621 +transformers/models/convbert/modeling_tf_convbert.py,sha256=8eSAaqs2q7nzaibq7ArjnCNhijbmADBlZ9ptUneNzqE,61643 +transformers/models/convbert/tokenization_convbert.py,sha256=oFj7oW5yiGmxrIyA3T3ll4-eidb_mADIrTFZTCfWGBU,21323 +transformers/models/convbert/tokenization_convbert_fast.py,sha256=z38ic66_ArTz-TRaBtp0UZbz9freg8d9bytqgGZBHuQ,7819 +transformers/models/convnext/__init__.py,sha256=7lOqhxn0YkWn7gpYjUTgnWBEy3gEwxcVcQioXQhTy_Y,1129 +transformers/models/convnext/__pycache__/__init__.cpython-310.pyc,, +transformers/models/convnext/__pycache__/configuration_convnext.cpython-310.pyc,, +transformers/models/convnext/__pycache__/convert_convnext_to_pytorch.cpython-310.pyc,, +transformers/models/convnext/__pycache__/feature_extraction_convnext.cpython-310.pyc,, +transformers/models/convnext/__pycache__/image_processing_convnext.cpython-310.pyc,, +transformers/models/convnext/__pycache__/modeling_convnext.cpython-310.pyc,, +transformers/models/convnext/__pycache__/modeling_tf_convnext.cpython-310.pyc,, +transformers/models/convnext/configuration_convnext.py,sha256=L57JCbIzUNBC8XbTMEmnlrcd7Y64lIK45X84cLRncYs,6183 +transformers/models/convnext/convert_convnext_to_pytorch.py,sha256=h8WJeh02GFWMYkq-9MdxyiBbsmWQJFSMhNzCHrxUI8o,10219 +transformers/models/convnext/feature_extraction_convnext.py,sha256=7C9es8Qn6F_Vx95pnKP7nCwNQeLqd9dxILAfV9pLA0g,1241 +transformers/models/convnext/image_processing_convnext.py,sha256=mimUMQR-4zVpU22HO6CHv2UZ73_LeBtH-XaQphydX-8,15866 +transformers/models/convnext/modeling_convnext.py,sha256=Bspvf_MN5Ws9nx2BTgzdnTFdZ7dZ1J1xMMXArTlV0b8,21934 +transformers/models/convnext/modeling_tf_convnext.py,sha256=kPoHoYN2P6O6aOOBXCz027B4k-7AakvdEBvA_84wfdQ,27290 +transformers/models/convnextv2/__init__.py,sha256=kOl9JbYIk9ioImF_hd0BS_mGDC8SG2k5LvO0-7WroRo,1043 +transformers/models/convnextv2/__pycache__/__init__.cpython-310.pyc,, +transformers/models/convnextv2/__pycache__/configuration_convnextv2.cpython-310.pyc,, +transformers/models/convnextv2/__pycache__/convert_convnextv2_to_pytorch.cpython-310.pyc,, +transformers/models/convnextv2/__pycache__/modeling_convnextv2.cpython-310.pyc,, +transformers/models/convnextv2/__pycache__/modeling_tf_convnextv2.cpython-310.pyc,, +transformers/models/convnextv2/configuration_convnextv2.py,sha256=wHvC-d6TiQR2v1D5bdRi7sNoRTN5IURCCcek64yVvIc,5564 +transformers/models/convnextv2/convert_convnextv2_to_pytorch.py,sha256=Yswl5UwLP0t0tC8O2b8wix2beNaMtPy7areKFCuEccg,12473 +transformers/models/convnextv2/modeling_convnextv2.py,sha256=n-JJKLyvQoC6mX0sCvCRFFTYv4ijVFNZVHXzpGEER-8,23714 +transformers/models/convnextv2/modeling_tf_convnextv2.py,sha256=tP0OnIL0hrRCOLoxgP9QpdlcVKiBcNsGI0a8e7Sd3bI,27708 +transformers/models/cpm/__init__.py,sha256=5Oz79wRruzXHciBLUAOGeo6PIH70Vs4ta8ffsMyT1Yg,995 +transformers/models/cpm/__pycache__/__init__.cpython-310.pyc,, +transformers/models/cpm/__pycache__/tokenization_cpm.cpython-310.pyc,, +transformers/models/cpm/__pycache__/tokenization_cpm_fast.cpython-310.pyc,, +transformers/models/cpm/tokenization_cpm.py,sha256=IeKGjhlQ9EHDJGIVQDSg_c6Y4G4RKP2xqt_4Mnyl9c4,15056 +transformers/models/cpm/tokenization_cpm_fast.py,sha256=aqUUhXsYaqB7yglDU7NaaJp0HqvbdoSU9Uacp-Ye-dc,10459 +transformers/models/cpmant/__init__.py,sha256=RfkbbhNqdbioJ5XVaTtxBLnZRt1GFnXugS3UFXHYV0c,1032 +transformers/models/cpmant/__pycache__/__init__.cpython-310.pyc,, +transformers/models/cpmant/__pycache__/configuration_cpmant.cpython-310.pyc,, +transformers/models/cpmant/__pycache__/modeling_cpmant.cpython-310.pyc,, +transformers/models/cpmant/__pycache__/tokenization_cpmant.cpython-310.pyc,, +transformers/models/cpmant/configuration_cpmant.py,sha256=RvgmQH8lQazRopzpfK5-Hf4eePtXXfvMJ3ar1VQC2vE,5145 +transformers/models/cpmant/modeling_cpmant.py,sha256=CbnELAjgIeS773YJDYlZW-RZJgbZi8o-bnIaB7ALMPQ,37112 +transformers/models/cpmant/tokenization_cpmant.py,sha256=3q3TlMkAMfVook26V--_0QO1IYjvEeVWzDEbNNxQDqw,9736 +transformers/models/ctrl/__init__.py,sha256=bVtGijL4n9ewNyhcJt7lpsRhXU8yo4nY0xIlRbpismk,1062 +transformers/models/ctrl/__pycache__/__init__.cpython-310.pyc,, +transformers/models/ctrl/__pycache__/configuration_ctrl.cpython-310.pyc,, +transformers/models/ctrl/__pycache__/modeling_ctrl.cpython-310.pyc,, +transformers/models/ctrl/__pycache__/modeling_tf_ctrl.cpython-310.pyc,, +transformers/models/ctrl/__pycache__/tokenization_ctrl.cpython-310.pyc,, +transformers/models/ctrl/configuration_ctrl.py,sha256=Vg6ZFqal5MCr-t2K5pp5mtN2TJSeojgKL8IgbZkd81k,4684 +transformers/models/ctrl/modeling_ctrl.py,sha256=4CDuChySmb9G75vIkd8DqJ_UBKotmhN7mJ851ZOLN2I,35826 +transformers/models/ctrl/modeling_tf_ctrl.py,sha256=NhFelt1xczUgJybt3VZ90gBWsVno1BPNHj_jNFmcMHk,39744 +transformers/models/ctrl/tokenization_ctrl.py,sha256=4kWB5UEE197eRrbKqqnFkmPweDb1QiF_WY3z2Vl4y3s,8087 +transformers/models/cvt/__init__.py,sha256=i1847SsjrXEIbrXsDEAiUlrtgLZRHtCSVG0rvCPXE9I,1022 +transformers/models/cvt/__pycache__/__init__.cpython-310.pyc,, +transformers/models/cvt/__pycache__/configuration_cvt.cpython-310.pyc,, +transformers/models/cvt/__pycache__/convert_cvt_original_pytorch_checkpoint_to_pytorch.cpython-310.pyc,, +transformers/models/cvt/__pycache__/modeling_cvt.cpython-310.pyc,, +transformers/models/cvt/__pycache__/modeling_tf_cvt.cpython-310.pyc,, +transformers/models/cvt/configuration_cvt.py,sha256=OdBupwTQpaCO1R-0anjvmPWvEjo1R7fl3lhNlrKJMz0,6684 +transformers/models/cvt/convert_cvt_original_pytorch_checkpoint_to_pytorch.py,sha256=zoed0S0LFkqKv3Or-8O512mjeVBo4dZ7bgnOCaqOU4E,13578 +transformers/models/cvt/modeling_cvt.py,sha256=lSUZ-JsNP-1HauKLW_m_lZuLS4xB4TkGB_s9C6zCuOA,28780 +transformers/models/cvt/modeling_tf_cvt.py,sha256=ce4UP4iJ0AHH_Lih8J-uYn8uvUB_yHDQx8GiwEn8-ms,43545 +transformers/models/dac/__init__.py,sha256=UpwXPmSOQOwvbIvklM21-y5HKY7MEIInmTt65xMX6Hw,1029 +transformers/models/dac/__pycache__/__init__.cpython-310.pyc,, +transformers/models/dac/__pycache__/configuration_dac.cpython-310.pyc,, +transformers/models/dac/__pycache__/convert_dac_checkpoint.cpython-310.pyc,, +transformers/models/dac/__pycache__/feature_extraction_dac.cpython-310.pyc,, +transformers/models/dac/__pycache__/modeling_dac.cpython-310.pyc,, +transformers/models/dac/configuration_dac.py,sha256=B-m2cUJBQe3AvbMH4hWxLpY2HdLIXrxXpJb6CEvA7XA,4581 +transformers/models/dac/convert_dac_checkpoint.py,sha256=ab2XoJEE5VsurcClyiZqQ6T57SJc9v25DEU8e0BV70Q,9435 +transformers/models/dac/feature_extraction_dac.py,sha256=_2tp_1K2ZGX4gdYtP-LwIwgHUQxD1XnwTSs1OjrMxVU,7947 +transformers/models/dac/modeling_dac.py,sha256=nTu7ghFPAQFASaJMlSYWGfDLBAgwfJ_tuRYu9W03tiU,30313 +transformers/models/data2vec/__init__.py,sha256=-2iFF1Rb8eF9cccBNLA29zgeFV1ADYaSLoQgf6K6KB8,1238 +transformers/models/data2vec/__pycache__/__init__.cpython-310.pyc,, +transformers/models/data2vec/__pycache__/configuration_data2vec_audio.cpython-310.pyc,, +transformers/models/data2vec/__pycache__/configuration_data2vec_text.cpython-310.pyc,, +transformers/models/data2vec/__pycache__/configuration_data2vec_vision.cpython-310.pyc,, +transformers/models/data2vec/__pycache__/convert_data2vec_audio_original_pytorch_checkpoint_to_pytorch.cpython-310.pyc,, +transformers/models/data2vec/__pycache__/convert_data2vec_text_original_pytorch_checkpoint_to_pytorch.cpython-310.pyc,, +transformers/models/data2vec/__pycache__/convert_data2vec_vision_original_pytorch_checkpoint_to_pytorch.cpython-310.pyc,, +transformers/models/data2vec/__pycache__/modeling_data2vec_audio.cpython-310.pyc,, +transformers/models/data2vec/__pycache__/modeling_data2vec_text.cpython-310.pyc,, +transformers/models/data2vec/__pycache__/modeling_data2vec_vision.cpython-310.pyc,, +transformers/models/data2vec/__pycache__/modeling_tf_data2vec_vision.cpython-310.pyc,, +transformers/models/data2vec/configuration_data2vec_audio.py,sha256=S2S_uz3AgSPJLOSXEOoP6a_GxDPzMGLGRMGENJPdK5Q,16357 +transformers/models/data2vec/configuration_data2vec_text.py,sha256=Ylj-Vb1EoeJ2_N7UhYl5nY1ynCFDUwT6u0HjFzaHP-o,7336 +transformers/models/data2vec/configuration_data2vec_vision.py,sha256=knPaO-3WZ78J6m6oM-IzP2UIIpd1OxcMZ_-qmuZwTHI,9305 +transformers/models/data2vec/convert_data2vec_audio_original_pytorch_checkpoint_to_pytorch.py,sha256=dvcTq8C9Zl4axc0gYcqYaTWTqUxgwve1O7xhXMeWu8c,10881 +transformers/models/data2vec/convert_data2vec_text_original_pytorch_checkpoint_to_pytorch.py,sha256=eryzP47_SwQ2keZGhuTodpoNS4WtFVU34XoG8dBafSw,9579 +transformers/models/data2vec/convert_data2vec_vision_original_pytorch_checkpoint_to_pytorch.py,sha256=qKjV-jqIgL-6i17m4yQLW_93SbPpGxQnvHjuy1xVxQU,15340 +transformers/models/data2vec/modeling_data2vec_audio.py,sha256=9FNetgqH89wIgtxU3t2_qxfiymkSGxozqdVgsU5ELNo,79045 +transformers/models/data2vec/modeling_data2vec_text.py,sha256=kdcQwBDe7Y7emvrNy48al-IkvPPrPinrxF536Sb3j8I,70696 +transformers/models/data2vec/modeling_data2vec_vision.py,sha256=sPgIXfz_Tj5PeDSE5csTv61Xxrq6ES--nnIb0Dx9U_Y,63810 +transformers/models/data2vec/modeling_tf_data2vec_vision.py,sha256=oPTndzxpOq1tgaFq7hn5ItUFotXv8XVdLGYrhlBY-i4,73525 +transformers/models/dbrx/__init__.py,sha256=Kzn3gm0QHW9RKEmog_IfdCGam5TXSCzkOs_WHC43sgM,989 +transformers/models/dbrx/__pycache__/__init__.cpython-310.pyc,, +transformers/models/dbrx/__pycache__/configuration_dbrx.cpython-310.pyc,, +transformers/models/dbrx/__pycache__/modeling_dbrx.cpython-310.pyc,, +transformers/models/dbrx/configuration_dbrx.py,sha256=xQAZE62JKpCA3uFt_vq7FzYnxCho-iqMZz8aiCBv0bc,9888 +transformers/models/dbrx/modeling_dbrx.py,sha256=zh4dmKyLGHAuw9L8ENkwTzk-hunG173g0rSXjziijUo,62561 +transformers/models/deberta/__init__.py,sha256=diL764eL8gu80XkBDQU9nI6Zy39ArO0d85MtcZ4_NPw,1119 +transformers/models/deberta/__pycache__/__init__.cpython-310.pyc,, +transformers/models/deberta/__pycache__/configuration_deberta.cpython-310.pyc,, +transformers/models/deberta/__pycache__/modeling_deberta.cpython-310.pyc,, +transformers/models/deberta/__pycache__/modeling_tf_deberta.cpython-310.pyc,, +transformers/models/deberta/__pycache__/tokenization_deberta.cpython-310.pyc,, +transformers/models/deberta/__pycache__/tokenization_deberta_fast.cpython-310.pyc,, +transformers/models/deberta/configuration_deberta.py,sha256=dld6vou_Gs_Ap_AW4YlpY7RHx_NneY-TEtwTkBf5KaU,8997 +transformers/models/deberta/modeling_deberta.py,sha256=EWYq2ts6kzMN_bPOcSp6rtsFufdM4KA9x029a6dOwIE,55906 +transformers/models/deberta/modeling_tf_deberta.py,sha256=fgNzpzCP-eXy2qpf8Sn1qRdVJEccnl4zxJxOem652IM,69243 +transformers/models/deberta/tokenization_deberta.py,sha256=V0VwuI-q1GH-1f1HhiuC_KmCcR-HQdwRibcmsPcvXcw,17084 +transformers/models/deberta/tokenization_deberta_fast.py,sha256=18QhuVxccQUZbP5-D51-hAcNRGuiIiTXOi5LxC4GGv8,10254 +transformers/models/deberta_v2/__init__.py,sha256=N6wcSGakSmmHDW_QelFsn58zuDFTuvbctgkyC0OfQ5Y,1134 +transformers/models/deberta_v2/__pycache__/__init__.cpython-310.pyc,, +transformers/models/deberta_v2/__pycache__/configuration_deberta_v2.cpython-310.pyc,, +transformers/models/deberta_v2/__pycache__/modeling_deberta_v2.cpython-310.pyc,, +transformers/models/deberta_v2/__pycache__/modeling_tf_deberta_v2.cpython-310.pyc,, +transformers/models/deberta_v2/__pycache__/tokenization_deberta_v2.cpython-310.pyc,, +transformers/models/deberta_v2/__pycache__/tokenization_deberta_v2_fast.cpython-310.pyc,, +transformers/models/deberta_v2/configuration_deberta_v2.py,sha256=t2xyEazeOSgC_3wzv1PHeumpMWrX9bx6pzyQOdFdVQc,8937 +transformers/models/deberta_v2/modeling_deberta_v2.py,sha256=KhgUX6y4oSD-tTX_6E5wvHQAcaDy6YqpzoVPKmgpTDw,64080 +transformers/models/deberta_v2/modeling_tf_deberta_v2.py,sha256=2ApxmWBRP2jY7A2FLny8Jv_88blDpQfPUK7Y-6P8j14,81668 +transformers/models/deberta_v2/tokenization_deberta_v2.py,sha256=ULTMnycvccuaijJoh0i87-TiYUonQc23SgewVtlp1ns,20737 +transformers/models/deberta_v2/tokenization_deberta_v2_fast.py,sha256=fKuOjE2SxFSvXwNe1m9oLO_BblM3v4c3r4gzIbywteU,9797 +transformers/models/decision_transformer/__init__.py,sha256=8XAHnFrFv8IFz495cQLTeaAk2G1AVRT7roauVHCGoJs,1021 +transformers/models/decision_transformer/__pycache__/__init__.cpython-310.pyc,, +transformers/models/decision_transformer/__pycache__/configuration_decision_transformer.cpython-310.pyc,, +transformers/models/decision_transformer/__pycache__/modeling_decision_transformer.cpython-310.pyc,, +transformers/models/decision_transformer/configuration_decision_transformer.py,sha256=2Dsh1_qQB9oZ_cSs6W-MGfiBFRyaNzgXVXvHiLE3f1Q,7028 +transformers/models/decision_transformer/modeling_decision_transformer.py,sha256=rzwqcU8nbVqJPFmbIrBrYW_A79bvajc83Wc442231Jk,44414 +transformers/models/deformable_detr/__init__.py,sha256=_ae-sABBY17hOT28SN_d0GLeRVjya0W4aqniH8u8Bcw,1176 +transformers/models/deformable_detr/__pycache__/__init__.cpython-310.pyc,, +transformers/models/deformable_detr/__pycache__/configuration_deformable_detr.cpython-310.pyc,, +transformers/models/deformable_detr/__pycache__/convert_deformable_detr_to_pytorch.cpython-310.pyc,, +transformers/models/deformable_detr/__pycache__/feature_extraction_deformable_detr.cpython-310.pyc,, +transformers/models/deformable_detr/__pycache__/image_processing_deformable_detr.cpython-310.pyc,, +transformers/models/deformable_detr/__pycache__/image_processing_deformable_detr_fast.cpython-310.pyc,, +transformers/models/deformable_detr/__pycache__/load_custom.cpython-310.pyc,, +transformers/models/deformable_detr/__pycache__/modeling_deformable_detr.cpython-310.pyc,, +transformers/models/deformable_detr/__pycache__/modular_deformable_detr.cpython-310.pyc,, +transformers/models/deformable_detr/configuration_deformable_detr.py,sha256=cNBfcSAQgMas4J6kIaAVm3WnWCdJNIhr9O7ZXomQ6mQ,14571 +transformers/models/deformable_detr/convert_deformable_detr_to_pytorch.py,sha256=ytsMFj4rqS-8fxDL9wojqFN-PscjWJ79rVcVQQk_1s8,9460 +transformers/models/deformable_detr/feature_extraction_deformable_detr.py,sha256=G398FdbrUYFlnOCmSF1yie1yKOvz0tlCjxxLblqvRqc,1593 +transformers/models/deformable_detr/image_processing_deformable_detr.py,sha256=-DwGZNPzBZbJdLZLzqJ4m8saLfUJrw1jxsZTgLPL_sA,73232 +transformers/models/deformable_detr/image_processing_deformable_detr_fast.py,sha256=kBV4WMrJC2B4KlXne8kdlVARmX7Rp0gwc3er0FInwKk,49149 +transformers/models/deformable_detr/load_custom.py,sha256=GvDeH883HST8-vH5Xl5jcR9VS_e0GSzbDoImSLug9rA,1559 +transformers/models/deformable_detr/modeling_deformable_detr.py,sha256=EN6Ssah2ppve2bXcOrSy6ZAmDtj3_Cng90mevH_2RcE,100706 +transformers/models/deformable_detr/modular_deformable_detr.py,sha256=wMvZzwSeNzGezxYzLPYWVy3mQQB0tymXaryEl060I1U,6586 +transformers/models/deit/__init__.py,sha256=_O6kYkQk57vwOspMLu4LH6c6GRBCQb4h4Ugc4h_ngPc,1109 +transformers/models/deit/__pycache__/__init__.cpython-310.pyc,, +transformers/models/deit/__pycache__/configuration_deit.cpython-310.pyc,, +transformers/models/deit/__pycache__/convert_deit_timm_to_pytorch.cpython-310.pyc,, +transformers/models/deit/__pycache__/feature_extraction_deit.cpython-310.pyc,, +transformers/models/deit/__pycache__/image_processing_deit.cpython-310.pyc,, +transformers/models/deit/__pycache__/modeling_deit.cpython-310.pyc,, +transformers/models/deit/__pycache__/modeling_tf_deit.cpython-310.pyc,, +transformers/models/deit/configuration_deit.py,sha256=jiP2Ah3GbSLrtdmdaAXcv_o2akAd4LFZgRsuXddGJQA,5740 +transformers/models/deit/convert_deit_timm_to_pytorch.py,sha256=7wWjhmCpS972rYukIRZsfhVfklvbdl8nclFqiRwb82Y,9216 +transformers/models/deit/feature_extraction_deit.py,sha256=D6sKbJwtaJphyzpoXiDbbU3eanJeA8WbuPkvOZdTySg,1209 +transformers/models/deit/image_processing_deit.py,sha256=0eqlPgfKXZqZUCIwlSNuHGTsdAZVoiI5dmHnlESp2hQ,15179 +transformers/models/deit/modeling_deit.py,sha256=sZ8LsDmD1W_gozwPiTGYJbSmZKdlCbeUZkPcGv-uD5g,43386 +transformers/models/deit/modeling_tf_deit.py,sha256=KF8t1-VfvdN2qZsz5dV0R5_KPa1P-ZVzK434rqrOi9o,51752 +transformers/models/deprecated/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0 +transformers/models/deprecated/__pycache__/__init__.cpython-310.pyc,, +transformers/models/deprecated/bort/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0 +transformers/models/deprecated/bort/__pycache__/__init__.cpython-310.pyc,, +transformers/models/deprecated/bort/__pycache__/convert_bort_original_gluonnlp_checkpoint_to_pytorch.cpython-310.pyc,, +transformers/models/deprecated/bort/convert_bort_original_gluonnlp_checkpoint_to_pytorch.py,sha256=9aY4nB-A4-WeVVH6CzE9kxoHr9uqL5f5NW2_OirzWxo,14067 +transformers/models/deprecated/deta/__init__.py,sha256=sRdhN6pSfT1G8VY04s6jNnZBKgyZrB4DsrBsAPs8Rw8,2038 +transformers/models/deprecated/deta/__pycache__/__init__.cpython-310.pyc,, +transformers/models/deprecated/deta/__pycache__/configuration_deta.cpython-310.pyc,, +transformers/models/deprecated/deta/__pycache__/convert_deta_resnet_to_pytorch.cpython-310.pyc,, +transformers/models/deprecated/deta/__pycache__/convert_deta_swin_to_pytorch.cpython-310.pyc,, +transformers/models/deprecated/deta/__pycache__/image_processing_deta.cpython-310.pyc,, +transformers/models/deprecated/deta/__pycache__/modeling_deta.cpython-310.pyc,, +transformers/models/deprecated/deta/configuration_deta.py,sha256=GTfTPOaP2JzdNKQa9zg3CQ52QPuQOkoxU9VLc-uZt2s,13948 +transformers/models/deprecated/deta/convert_deta_resnet_to_pytorch.py,sha256=Eu3xpsuFwp-vkyOPHhAjWYIlN1zvm9TirpGMTq5GzGw,16799 +transformers/models/deprecated/deta/convert_deta_swin_to_pytorch.py,sha256=RWx7DMt5PMBoOhQM3poOILmIAJPHEWhH9mY7DeZTVDg,18997 +transformers/models/deprecated/deta/image_processing_deta.py,sha256=0_ZAQjb4G-5XUFsXmDW61zf-TOg5-xUqmxnPb8R8WGw,54891 +transformers/models/deprecated/deta/modeling_deta.py,sha256=6Rd6LUDmEB5N1T-KnIL6fuIj5f0MpqH8fuUjK47V3VI,135663 +transformers/models/deprecated/efficientformer/__init__.py,sha256=u4KA4byDgoRkQ9uuNGgkA2PtpIh0BpZVTObA0Vgil-E,3188 +transformers/models/deprecated/efficientformer/__pycache__/__init__.cpython-310.pyc,, +transformers/models/deprecated/efficientformer/__pycache__/configuration_efficientformer.cpython-310.pyc,, +transformers/models/deprecated/efficientformer/__pycache__/convert_efficientformer_original_pytorch_checkpoint_to_pytorch.cpython-310.pyc,, +transformers/models/deprecated/efficientformer/__pycache__/image_processing_efficientformer.cpython-310.pyc,, +transformers/models/deprecated/efficientformer/__pycache__/modeling_efficientformer.cpython-310.pyc,, +transformers/models/deprecated/efficientformer/__pycache__/modeling_tf_efficientformer.cpython-310.pyc,, +transformers/models/deprecated/efficientformer/configuration_efficientformer.py,sha256=QFiBTmFQU6P8VllghZ5jQpR1Dthnm9uylTgf7z3uHMc,7719 +transformers/models/deprecated/efficientformer/convert_efficientformer_original_pytorch_checkpoint_to_pytorch.py,sha256=1ni0wyhRjTbF8U4BZ_FXU-_9Jzy43HMLKI3vGlyPjFc,9381 +transformers/models/deprecated/efficientformer/image_processing_efficientformer.py,sha256=Pd7PjkkXF4uQk0fJBku18OKRi7hMoYsPmf3uXhcOc3M,15698 +transformers/models/deprecated/efficientformer/modeling_efficientformer.py,sha256=029SXmbp69FUn13m9OdoTHivIAe-x-RkFUtk62yYLWk,33580 +transformers/models/deprecated/efficientformer/modeling_tf_efficientformer.py,sha256=tRs9Ljxf8bf1B-6MwpUPuUinzWik24kzQH50Ke58cjw,49194 +transformers/models/deprecated/ernie_m/__init__.py,sha256=V6C21iE8AKYSpDNW4Ffn3IGiKp69T4ro2LFcRXc0mq4,2458 +transformers/models/deprecated/ernie_m/__pycache__/__init__.cpython-310.pyc,, +transformers/models/deprecated/ernie_m/__pycache__/configuration_ernie_m.cpython-310.pyc,, +transformers/models/deprecated/ernie_m/__pycache__/modeling_ernie_m.cpython-310.pyc,, +transformers/models/deprecated/ernie_m/__pycache__/tokenization_ernie_m.cpython-310.pyc,, +transformers/models/deprecated/ernie_m/configuration_ernie_m.py,sha256=bGRUXTL8NdJEevZNBmDBh_aB_RwRNL8G1rfdNYMW69s,5885 +transformers/models/deprecated/ernie_m/modeling_ernie_m.py,sha256=flc9_1HuMrChGoHcx4au3IlEbt9VaWDYY29yaSmG3Dc,47028 +transformers/models/deprecated/ernie_m/tokenization_ernie_m.py,sha256=oGKdPntR5sjU3XrxbaRNySX76bQaSLlFWNTgLJfmXBI,16169 +transformers/models/deprecated/gptsan_japanese/__init__.py,sha256=8a1T_PBkN2MKzJDSTVJan5kSknwon3cRtUicoKVt2SY,2083 +transformers/models/deprecated/gptsan_japanese/__pycache__/__init__.cpython-310.pyc,, +transformers/models/deprecated/gptsan_japanese/__pycache__/configuration_gptsan_japanese.cpython-310.pyc,, +transformers/models/deprecated/gptsan_japanese/__pycache__/convert_gptsan_tf_checkpoint_to_pytorch.cpython-310.pyc,, +transformers/models/deprecated/gptsan_japanese/__pycache__/modeling_gptsan_japanese.cpython-310.pyc,, +transformers/models/deprecated/gptsan_japanese/__pycache__/tokenization_gptsan_japanese.cpython-310.pyc,, +transformers/models/deprecated/gptsan_japanese/configuration_gptsan_japanese.py,sha256=T8buHMjH3XFnx7BXXis6M5aTvWLwwnleTf-YDyySwNM,7124 +transformers/models/deprecated/gptsan_japanese/convert_gptsan_tf_checkpoint_to_pytorch.py,sha256=syF4TCbLQByZhm5VqIFgXfzQ4zImmCua8UNjCYJP5t8,9793 +transformers/models/deprecated/gptsan_japanese/modeling_gptsan_japanese.py,sha256=JbBnTAclyfHQHkWyRMfEVxlmK8I5IemxcPidnSMRyc8,64953 +transformers/models/deprecated/gptsan_japanese/tokenization_gptsan_japanese.py,sha256=zI356SLqne5ZLBkp1sZBjp8MCOP3VZ__zVVsl5iyDbU,22619 +transformers/models/deprecated/graphormer/__init__.py,sha256=ltRElMWou0jRd50T50NHoJoSUbvM5IrcT41EcFQ7mV0,1682 +transformers/models/deprecated/graphormer/__pycache__/__init__.cpython-310.pyc,, +transformers/models/deprecated/graphormer/__pycache__/collating_graphormer.cpython-310.pyc,, +transformers/models/deprecated/graphormer/__pycache__/configuration_graphormer.cpython-310.pyc,, +transformers/models/deprecated/graphormer/__pycache__/modeling_graphormer.cpython-310.pyc,, +transformers/models/deprecated/graphormer/algos_graphormer.pyx,sha256=b_Qlm1hKCHnAqx6oOLGC9LkivAV0K_AZRGgXT9MmBas,3635 +transformers/models/deprecated/graphormer/collating_graphormer.py,sha256=KRew-2p9_7heLTflAYA6dObor_Hxy47yIP8HFEgaj1U,6087 +transformers/models/deprecated/graphormer/configuration_graphormer.py,sha256=ZzNCBEZj_G1S1lg3MouwutiSeO9G47yFob14WGXXN9g,10380 +transformers/models/deprecated/graphormer/modeling_graphormer.py,sha256=Y3aYbgX5vIYB7FfM8jRkv2xZRLdoHZuS5aBtesLXqX8,37006 +transformers/models/deprecated/jukebox/__init__.py,sha256=96yLuu-yOBcAHaz1zhvc4RWwIvqkjycikMa-GXFcWm8,1889 +transformers/models/deprecated/jukebox/__pycache__/__init__.cpython-310.pyc,, +transformers/models/deprecated/jukebox/__pycache__/configuration_jukebox.cpython-310.pyc,, +transformers/models/deprecated/jukebox/__pycache__/convert_jukebox.cpython-310.pyc,, +transformers/models/deprecated/jukebox/__pycache__/modeling_jukebox.cpython-310.pyc,, +transformers/models/deprecated/jukebox/__pycache__/tokenization_jukebox.cpython-310.pyc,, +transformers/models/deprecated/jukebox/configuration_jukebox.py,sha256=-gLq4uKdqdjCWuV9ZbChsUiFGEI0a58st5oapPTixGI,26749 +transformers/models/deprecated/jukebox/convert_jukebox.py,sha256=RBgOPbwIMv_42mUFJYxRv4IAGZn4cAzjTqjrMI7HtVg,11789 +transformers/models/deprecated/jukebox/modeling_jukebox.py,sha256=O0xBJi3UyMF8Aj0TyXYbN_2wtouHjy9pIdMbUUdkiZQ,119471 +transformers/models/deprecated/jukebox/tokenization_jukebox.py,sha256=r1YcKG2OkPWAKdriQ2BXgX-MBsQHbeyccoc5aKLCpac,17352 +transformers/models/deprecated/mctct/__init__.py,sha256=aaM-CVsMyEUWqGHH5xAgnqUu6B5D730X_lTo7CMpo7o,1732 +transformers/models/deprecated/mctct/__pycache__/__init__.cpython-310.pyc,, +transformers/models/deprecated/mctct/__pycache__/configuration_mctct.cpython-310.pyc,, +transformers/models/deprecated/mctct/__pycache__/feature_extraction_mctct.cpython-310.pyc,, +transformers/models/deprecated/mctct/__pycache__/modeling_mctct.cpython-310.pyc,, +transformers/models/deprecated/mctct/__pycache__/processing_mctct.cpython-310.pyc,, +transformers/models/deprecated/mctct/configuration_mctct.py,sha256=OmrxkatPuycQORmuIQWznAHsi20nF9CM-HHtHWyh1gM,9073 +transformers/models/deprecated/mctct/feature_extraction_mctct.py,sha256=JsaSE20NeqBX8Uw-07Y5HdUcQtbYZqCrTN18Wu2B4rI,13460 +transformers/models/deprecated/mctct/modeling_mctct.py,sha256=YiUE1VOTKMH6oGk9vlqKf4Q8YxkQFJBQud9bJXgF6ug,32874 +transformers/models/deprecated/mctct/processing_mctct.py,sha256=EkokdjeJPgzsSxriPNmAthZ6WgO_iQyFMpQKXDeS7Uo,5931 +transformers/models/deprecated/mega/__init__.py,sha256=i_9dHqDl6RZJ1zebhj8pn3zPlgxRqEynwsM_mj9eWMs,1973 +transformers/models/deprecated/mega/__pycache__/__init__.cpython-310.pyc,, +transformers/models/deprecated/mega/__pycache__/configuration_mega.cpython-310.pyc,, +transformers/models/deprecated/mega/__pycache__/convert_mega_original_pytorch_checkpoint_to_pytorch.cpython-310.pyc,, +transformers/models/deprecated/mega/__pycache__/modeling_mega.cpython-310.pyc,, +transformers/models/deprecated/mega/configuration_mega.py,sha256=0m3Fsv9KqcZECi7Dbgjdz7nidKqf8MQbdfMsYMlMF_4,12588 +transformers/models/deprecated/mega/convert_mega_original_pytorch_checkpoint_to_pytorch.py,sha256=RqYrXvQNCa-mSlF9L0ayNvdrdaAayIsEIXpJ_j8c7FE,13155 +transformers/models/deprecated/mega/modeling_mega.py,sha256=-fLQbigFtoljvUS0VisbT_Y7_q9qiyYEkXsM-mdFQbc,109519 +transformers/models/deprecated/mmbt/__init__.py,sha256=0CCmesCwGIMNFlf2oDsL0gYaCSpsfAC1_bMOXRcAgF4,1480 +transformers/models/deprecated/mmbt/__pycache__/__init__.cpython-310.pyc,, +transformers/models/deprecated/mmbt/__pycache__/configuration_mmbt.cpython-310.pyc,, +transformers/models/deprecated/mmbt/__pycache__/modeling_mmbt.cpython-310.pyc,, +transformers/models/deprecated/mmbt/configuration_mmbt.py,sha256=mVkSYHpXNnKbvGiJ_0MOF8V_lqwu0l4rdhwIDTWFu7o,1597 +transformers/models/deprecated/mmbt/modeling_mmbt.py,sha256=ms_fa8G6Ww3kyk7jqLeAdba6k2E6VMBq82zMz5GvFKQ,18913 +transformers/models/deprecated/nat/__init__.py,sha256=1KgeUYAs8Ypq1rZgA1tS_cq0GNjTINvjycdQR-m0P7s,1613 +transformers/models/deprecated/nat/__pycache__/__init__.cpython-310.pyc,, +transformers/models/deprecated/nat/__pycache__/configuration_nat.cpython-310.pyc,, +transformers/models/deprecated/nat/__pycache__/modeling_nat.cpython-310.pyc,, +transformers/models/deprecated/nat/configuration_nat.py,sha256=o-nifDP9IvftvMzVeoXGGUycwUJ-wox1K6QVd4kpaik,6975 +transformers/models/deprecated/nat/modeling_nat.py,sha256=G2ggfJ_RJYBIIAbjvJu2DaAdgEzf74chqVvaNenvcSQ,39728 +transformers/models/deprecated/nezha/__init__.py,sha256=p4YuR6FmvGSeCniAaJaTWtL_9kqzMfnGeKAYskaWyeM,2062 +transformers/models/deprecated/nezha/__pycache__/__init__.cpython-310.pyc,, +transformers/models/deprecated/nezha/__pycache__/configuration_nezha.cpython-310.pyc,, +transformers/models/deprecated/nezha/__pycache__/modeling_nezha.cpython-310.pyc,, +transformers/models/deprecated/nezha/configuration_nezha.py,sha256=hfpG7tYqEHfddMFZ4Ni6h0DRAuG6UwNEsmlxmK562ew,4817 +transformers/models/deprecated/nezha/modeling_nezha.py,sha256=37Yuhy26qkonrj7djUw9tgJTaQp5yF1fVx8ctByXSo4,73924 +transformers/models/deprecated/open_llama/__init__.py,sha256=KJ11JLm0-ytx2jZjEVggJMC-BxjklPJqLVF2Fm1Okjw,2702 +transformers/models/deprecated/open_llama/__pycache__/__init__.cpython-310.pyc,, +transformers/models/deprecated/open_llama/__pycache__/configuration_open_llama.cpython-310.pyc,, +transformers/models/deprecated/open_llama/__pycache__/modeling_open_llama.cpython-310.pyc,, +transformers/models/deprecated/open_llama/configuration_open_llama.py,sha256=5O8r3FXbzYE4gHGqnhYj7LtSYVujAXKR4ZKkAsTGKLM,7771 +transformers/models/deprecated/open_llama/modeling_open_llama.py,sha256=ULLDJbRArLXssjlf_NZO4dtSKClF87u4fm082KpRW0U,43380 +transformers/models/deprecated/qdqbert/__init__.py,sha256=xDttpfygkbkttNuKo3pW6e-Z0_MwTbj5uICeiXWgppw,2223 +transformers/models/deprecated/qdqbert/__pycache__/__init__.cpython-310.pyc,, +transformers/models/deprecated/qdqbert/__pycache__/configuration_qdqbert.cpython-310.pyc,, +transformers/models/deprecated/qdqbert/__pycache__/modeling_qdqbert.cpython-310.pyc,, +transformers/models/deprecated/qdqbert/configuration_qdqbert.py,sha256=qYx_V85qk4g_o3L-OxiLObqndD834k0j0bDjQUNcfT8,5689 +transformers/models/deprecated/qdqbert/modeling_qdqbert.py,sha256=XjGtF8iB8I6lfvL0PQK6cVWRbY2-76Lp_RLi8F-y7Z8,77002 +transformers/models/deprecated/realm/__init__.py,sha256=_xkblqSgmTTryPK_c0N_ugcMVYc871UxI2hOskvo_pw,2504 +transformers/models/deprecated/realm/__pycache__/__init__.cpython-310.pyc,, +transformers/models/deprecated/realm/__pycache__/configuration_realm.cpython-310.pyc,, +transformers/models/deprecated/realm/__pycache__/modeling_realm.cpython-310.pyc,, +transformers/models/deprecated/realm/__pycache__/retrieval_realm.cpython-310.pyc,, +transformers/models/deprecated/realm/__pycache__/tokenization_realm.cpython-310.pyc,, +transformers/models/deprecated/realm/__pycache__/tokenization_realm_fast.cpython-310.pyc,, +transformers/models/deprecated/realm/configuration_realm.py,sha256=kUxwVQ0A99hr2wEFALWfvgoDJKp0OpxGjls42Q-yVZU,7557 +transformers/models/deprecated/realm/modeling_realm.py,sha256=NtHppLE8iK6rMwGjqeFRRMmGFLkm-Kc-54h1ijrvuGk,83476 +transformers/models/deprecated/realm/retrieval_realm.py,sha256=cebNTe43Mb5VN1xUzR13ewbvkGnlZ5nlJjGSj0ewoWc,6372 +transformers/models/deprecated/realm/tokenization_realm.py,sha256=sHOR4tnLFrZaiZXhXhqnibiZAaAQvtpwL18bJvWrj-c,23114 +transformers/models/deprecated/realm/tokenization_realm_fast.py,sha256=eZ76_VPhjsqdEEHRr0_4Yt3HK25y0AfOlDohgPf7790,10953 +transformers/models/deprecated/retribert/__init__.py,sha256=I5aMwp2FJw4zoL69qlsk2krQjpJMYIh436X47YxUMu8,2163 +transformers/models/deprecated/retribert/__pycache__/__init__.cpython-310.pyc,, +transformers/models/deprecated/retribert/__pycache__/configuration_retribert.cpython-310.pyc,, +transformers/models/deprecated/retribert/__pycache__/modeling_retribert.cpython-310.pyc,, +transformers/models/deprecated/retribert/__pycache__/tokenization_retribert.cpython-310.pyc,, +transformers/models/deprecated/retribert/__pycache__/tokenization_retribert_fast.cpython-310.pyc,, +transformers/models/deprecated/retribert/configuration_retribert.py,sha256=7liSa4MonQVeLEz2VlxuapZglk6Z_CzyG5i8Nxi2MTM,5200 +transformers/models/deprecated/retribert/modeling_retribert.py,sha256=tF4Sd2lY3_h0-DNqWQoiFXKxxbjfTqqsxPtJnKLEnd0,9297 +transformers/models/deprecated/retribert/tokenization_retribert.py,sha256=NnQiqNw0brmkZojd8gc6uudCYSMjsDaiTni-PUXhsd8,20650 +transformers/models/deprecated/retribert/tokenization_retribert_fast.py,sha256=hIkbxCjKbkfblfYAyEE6VOf-l7aEmgXO3myRQ917gho,7820 +transformers/models/deprecated/speech_to_text_2/__init__.py,sha256=FrO5Wtn6Uznx5DzVHyfLpdo7iDkeBpFgXC4bHXXgxxo,1951 +transformers/models/deprecated/speech_to_text_2/__pycache__/__init__.cpython-310.pyc,, +transformers/models/deprecated/speech_to_text_2/__pycache__/configuration_speech_to_text_2.cpython-310.pyc,, +transformers/models/deprecated/speech_to_text_2/__pycache__/modeling_speech_to_text_2.cpython-310.pyc,, +transformers/models/deprecated/speech_to_text_2/__pycache__/processing_speech_to_text_2.cpython-310.pyc,, +transformers/models/deprecated/speech_to_text_2/__pycache__/tokenization_speech_to_text_2.cpython-310.pyc,, +transformers/models/deprecated/speech_to_text_2/configuration_speech_to_text_2.py,sha256=HpJVunbFUp23kPZBCr9K5sIXKVp238icRyv8_YGcmCI,6001 +transformers/models/deprecated/speech_to_text_2/modeling_speech_to_text_2.py,sha256=asJ2UlO6N-HsLjO1eg6rFCWtf2UbH-5NgB7lsPGB0u4,43880 +transformers/models/deprecated/speech_to_text_2/processing_speech_to_text_2.py,sha256=7AWU3_OegyHwNxluEMSHjLzBGfYcg3m-TNHq9VHYJTo,4792 +transformers/models/deprecated/speech_to_text_2/tokenization_speech_to_text_2.py,sha256=S7biDmProh43S6iAbA0cIJyCVqb6fG6itYkbsI2Ccfc,8405 +transformers/models/deprecated/tapex/__init__.py,sha256=lQutKYtwbU8ztPva0tyRnnV-zOWw6rxkGyoOUSuvnUo,926 +transformers/models/deprecated/tapex/__pycache__/__init__.cpython-310.pyc,, +transformers/models/deprecated/tapex/__pycache__/tokenization_tapex.cpython-310.pyc,, +transformers/models/deprecated/tapex/tokenization_tapex.py,sha256=MPuB1JknrO9WY_j-Hgy8JWGNKvcowBDrjhFi-bCGALw,64347 +transformers/models/deprecated/trajectory_transformer/__init__.py,sha256=XnXDCm4ePannQqnQnMn1Fpqvmq9-1L0_mTeoqObM8-0,1806 +transformers/models/deprecated/trajectory_transformer/__pycache__/__init__.cpython-310.pyc,, +transformers/models/deprecated/trajectory_transformer/__pycache__/configuration_trajectory_transformer.cpython-310.pyc,, +transformers/models/deprecated/trajectory_transformer/__pycache__/convert_trajectory_transformer_original_pytorch_checkpoint_to_pytorch.cpython-310.pyc,, +transformers/models/deprecated/trajectory_transformer/__pycache__/modeling_trajectory_transformer.cpython-310.pyc,, +transformers/models/deprecated/trajectory_transformer/configuration_trajectory_transformer.py,sha256=qH3gf0InhrlutKUQNA4-OrqWp72n_Ha4B6jA_kZy55U,7061 +transformers/models/deprecated/trajectory_transformer/convert_trajectory_transformer_original_pytorch_checkpoint_to_pytorch.py,sha256=sUPWNSvy46IYr8eFEyxqLraW90abuzy4Snrt3uKFW34,3138 +transformers/models/deprecated/trajectory_transformer/modeling_trajectory_transformer.py,sha256=ts1LBqIjnC9gB53or9STcRJTAE7UaSdZnYHoh4jdtq4,25593 +transformers/models/deprecated/transfo_xl/__init__.py,sha256=5IURzrZTTTFlLaPUn2R1ErF8SvQ9nF9QcNM8ENdntyg,2879 +transformers/models/deprecated/transfo_xl/__pycache__/__init__.cpython-310.pyc,, +transformers/models/deprecated/transfo_xl/__pycache__/configuration_transfo_xl.cpython-310.pyc,, +transformers/models/deprecated/transfo_xl/__pycache__/convert_transfo_xl_original_tf_checkpoint_to_pytorch.cpython-310.pyc,, +transformers/models/deprecated/transfo_xl/__pycache__/modeling_tf_transfo_xl.cpython-310.pyc,, +transformers/models/deprecated/transfo_xl/__pycache__/modeling_tf_transfo_xl_utilities.cpython-310.pyc,, +transformers/models/deprecated/transfo_xl/__pycache__/modeling_transfo_xl.cpython-310.pyc,, +transformers/models/deprecated/transfo_xl/__pycache__/modeling_transfo_xl_utilities.cpython-310.pyc,, +transformers/models/deprecated/transfo_xl/__pycache__/tokenization_transfo_xl.cpython-310.pyc,, +transformers/models/deprecated/transfo_xl/configuration_transfo_xl.py,sha256=U3zrDVAkNbmSdcyLRnGVvHEo6BCZMONZKYHhrKIMGi0,7874 +transformers/models/deprecated/transfo_xl/convert_transfo_xl_original_tf_checkpoint_to_pytorch.py,sha256=mCK_3e0Q-vVBlo624MnZQoDZpq0AAZStLrWwmrPQv_U,5037 +transformers/models/deprecated/transfo_xl/modeling_tf_transfo_xl.py,sha256=ZTxGgzubBKUieotTK-Z71Tvt7KKO9S-CF5pmOtAWl_U,45905 +transformers/models/deprecated/transfo_xl/modeling_tf_transfo_xl_utilities.py,sha256=Dlv3ZzRduWFBnZZHn8RegbW45XeCecuYCzzzZC3bDXs,7633 +transformers/models/deprecated/transfo_xl/modeling_transfo_xl.py,sha256=tqUEFzTSvwGe55FfIeQ3X5dJovQUyG03XhuA5PWArlw,55892 +transformers/models/deprecated/transfo_xl/modeling_transfo_xl_utilities.py,sha256=L1l4K7sj8rwXzvhn7_-RK2UbOnYtfDUF0VdFr4L8nxA,10859 +transformers/models/deprecated/transfo_xl/tokenization_transfo_xl.py,sha256=d0gQf4hb0F_so24jKSJ3rLip1tVdwEYDXta-icyKuVs,32004 +transformers/models/deprecated/tvlt/__init__.py,sha256=Ryp_kcJdg3sqjFKgyRVZjXAMUp_Epg9CL2GLQAHD0k0,2520 +transformers/models/deprecated/tvlt/__pycache__/__init__.cpython-310.pyc,, +transformers/models/deprecated/tvlt/__pycache__/configuration_tvlt.cpython-310.pyc,, +transformers/models/deprecated/tvlt/__pycache__/feature_extraction_tvlt.cpython-310.pyc,, +transformers/models/deprecated/tvlt/__pycache__/image_processing_tvlt.cpython-310.pyc,, +transformers/models/deprecated/tvlt/__pycache__/modeling_tvlt.cpython-310.pyc,, +transformers/models/deprecated/tvlt/__pycache__/processing_tvlt.cpython-310.pyc,, +transformers/models/deprecated/tvlt/configuration_tvlt.py,sha256=uGh6Ie-Nu-uf5987LLtFBvpEqd8rLEjFzzkMKIol6b4,8623 +transformers/models/deprecated/tvlt/feature_extraction_tvlt.py,sha256=Mx7tuGJvK-1YnS7ggYL6j_emzolu8L8Hrce5ATPtPR0,10558 +transformers/models/deprecated/tvlt/image_processing_tvlt.py,sha256=--v_ekqBZ3NK9LrWxzAHYi8dvL8tHK_rKX_oenp3seU,20090 +transformers/models/deprecated/tvlt/modeling_tvlt.py,sha256=hQ0PyxS8oOjiokuLtMAxY1jNSL9WoKmH7k6RBrm6scs,56698 +transformers/models/deprecated/tvlt/processing_tvlt.py,sha256=pC3zQjapxdhkqrl1QdJ7mXkEOSGNooP7kTUEWKUr_nE,3507 +transformers/models/deprecated/van/__init__.py,sha256=UGKlepGOpOuVmsb6Mmess6kPa2qP4rLzfzJ0dsdQDno,1564 +transformers/models/deprecated/van/__pycache__/__init__.cpython-310.pyc,, +transformers/models/deprecated/van/__pycache__/configuration_van.cpython-310.pyc,, +transformers/models/deprecated/van/__pycache__/convert_van_to_pytorch.cpython-310.pyc,, +transformers/models/deprecated/van/__pycache__/modeling_van.cpython-310.pyc,, +transformers/models/deprecated/van/configuration_van.py,sha256=QpN-p2Hg0C59if2JSEG47j_zepx1f4KpCgIBYgLhCOY,4657 +transformers/models/deprecated/van/convert_van_to_pytorch.py,sha256=UMaipjtB68OPj6OQzoSyquPlBkG-IbN4q1FZvs51Lxg,10373 +transformers/models/deprecated/van/modeling_van.py,sha256=YwNP7YVzKpyURSQ3taijiOBDTlF086hEcT2fEGqQaTE,21130 +transformers/models/deprecated/vit_hybrid/__init__.py,sha256=Ld0UOl3F4y-YaI42jk7ym_wvTFswFGJj_M6jpSeXU_E,2125 +transformers/models/deprecated/vit_hybrid/__pycache__/__init__.cpython-310.pyc,, +transformers/models/deprecated/vit_hybrid/__pycache__/configuration_vit_hybrid.cpython-310.pyc,, +transformers/models/deprecated/vit_hybrid/__pycache__/convert_vit_hybrid_timm_to_pytorch.cpython-310.pyc,, +transformers/models/deprecated/vit_hybrid/__pycache__/image_processing_vit_hybrid.cpython-310.pyc,, +transformers/models/deprecated/vit_hybrid/__pycache__/modeling_vit_hybrid.cpython-310.pyc,, +transformers/models/deprecated/vit_hybrid/configuration_vit_hybrid.py,sha256=VwJpgMa1l9rL-Rx9jF-POC_mz69y88tnHqgS431Qa10,8230 +transformers/models/deprecated/vit_hybrid/convert_vit_hybrid_timm_to_pytorch.py,sha256=NT-72ZHHEaQbmmsHgh9_UI7gf_40Aykff8cW8XXspQQ,13412 +transformers/models/deprecated/vit_hybrid/image_processing_vit_hybrid.py,sha256=4Sw6X-fDYE1ySXQXk-AecKf5yHYrzmNihgUOgrCaegc,16219 +transformers/models/deprecated/vit_hybrid/modeling_vit_hybrid.py,sha256=22N1IODYpHHj1cNaqZ-aCukh9bViJHt3bMNo2qJQ9VQ,32563 +transformers/models/deprecated/xlm_prophetnet/__init__.py,sha256=OYkcL5jhbHEaLLvOAd0v-CppyQxbuRzNEzaSDBhXxKA,2408 +transformers/models/deprecated/xlm_prophetnet/__pycache__/__init__.cpython-310.pyc,, +transformers/models/deprecated/xlm_prophetnet/__pycache__/configuration_xlm_prophetnet.cpython-310.pyc,, +transformers/models/deprecated/xlm_prophetnet/__pycache__/modeling_xlm_prophetnet.cpython-310.pyc,, +transformers/models/deprecated/xlm_prophetnet/__pycache__/tokenization_xlm_prophetnet.cpython-310.pyc,, +transformers/models/deprecated/xlm_prophetnet/configuration_xlm_prophetnet.py,sha256=dfa6w3RvOEZLxS9GAdQOKaJF1vT6Gc-w06a8Eujg5Sk,8916 +transformers/models/deprecated/xlm_prophetnet/modeling_xlm_prophetnet.py,sha256=EX5mzNPUYfWqcN5B-YquCXvM_SE7TfZcbtzslv1WPBI,115593 +transformers/models/deprecated/xlm_prophetnet/tokenization_xlm_prophetnet.py,sha256=HpD7sKmAaelMUUDwNrAyHgBLhiWI2rnDaxn-yFFjdEY,13272 +transformers/models/depth_anything/__init__.py,sha256=Jbd8LXt-fU3_cTF7jBrkBBw-Kzscv6o7O0YiZy0R8-A,1009 +transformers/models/depth_anything/__pycache__/__init__.cpython-310.pyc,, +transformers/models/depth_anything/__pycache__/configuration_depth_anything.cpython-310.pyc,, +transformers/models/depth_anything/__pycache__/convert_depth_anything_to_hf.cpython-310.pyc,, +transformers/models/depth_anything/__pycache__/modeling_depth_anything.cpython-310.pyc,, +transformers/models/depth_anything/configuration_depth_anything.py,sha256=hROtoRA46y2aGhvVebFlgFP926bDuwR6Bi7sIQBgsmE,7974 +transformers/models/depth_anything/convert_depth_anything_to_hf.py,sha256=re3j1J_GBywWeFv0eMRe9FvNSWnNSb6-pJf852fZZYg,17818 +transformers/models/depth_anything/modeling_depth_anything.py,sha256=CD1cRmNVA4eNIL38TRAxZb4JzCuVJ2pDHmzL20715B8,18578 +transformers/models/detr/__init__.py,sha256=YEWZnoCCgWt4KZNfbSi-v4KNDOJT2-ii2sxanyVDkvY,1120 +transformers/models/detr/__pycache__/__init__.cpython-310.pyc,, +transformers/models/detr/__pycache__/configuration_detr.cpython-310.pyc,, +transformers/models/detr/__pycache__/convert_detr_original_pytorch_checkpoint_to_pytorch.cpython-310.pyc,, +transformers/models/detr/__pycache__/convert_detr_to_pytorch.cpython-310.pyc,, +transformers/models/detr/__pycache__/feature_extraction_detr.cpython-310.pyc,, +transformers/models/detr/__pycache__/image_processing_detr.cpython-310.pyc,, +transformers/models/detr/__pycache__/image_processing_detr_fast.cpython-310.pyc,, +transformers/models/detr/__pycache__/modeling_detr.cpython-310.pyc,, +transformers/models/detr/configuration_detr.py,sha256=osct05nXbvBIX3wrWvlJATnzSu0Tr1tRR1C3enR82vc,13531 +transformers/models/detr/convert_detr_original_pytorch_checkpoint_to_pytorch.py,sha256=k1F8poVxIjsqQUUJu7CJl2Bj3x1IuivlgTi54Y_miKk,13560 +transformers/models/detr/convert_detr_to_pytorch.py,sha256=1Y_Tz2WSdybHscDL-iuF5sJG2Ni2o2-85unaJLtaykw,18992 +transformers/models/detr/feature_extraction_detr.py,sha256=HTGOQ_7RlxFfmu6PfXwDB18yMnUblwi9lDWXql0So0M,1511 +transformers/models/detr/image_processing_detr.py,sha256=E3w8Z77pS0i9jB9vLps2pTkF0nyGChNnGioZQX8vFVs,94034 +transformers/models/detr/image_processing_detr_fast.py,sha256=BYGe2X6zHfRuHuW0lDxkRdPKQ6e-KJN8cJDYUMqCaV4,72661 +transformers/models/detr/modeling_detr.py,sha256=R6sFMhqaHRmkXkz1NcKn6FYlq2r-1cmxA8Yi8vRnqZQ,88290 +transformers/models/dialogpt/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0 +transformers/models/dialogpt/__pycache__/__init__.cpython-310.pyc,, +transformers/models/dialogpt/__pycache__/convert_dialogpt_original_pytorch_checkpoint_to_pytorch.cpython-310.pyc,, +transformers/models/dialogpt/convert_dialogpt_original_pytorch_checkpoint_to_pytorch.py,sha256=Zp59TmLBKEs-x1-quZZeqARhpS3cTnnmgT4nCI0zsHY,1537 +transformers/models/diffllama/__init__.py,sha256=Yosk5eQ82PblntLff-bL3pfJZ-AVKp5jbQK5R2SLVc8,1004 +transformers/models/diffllama/__pycache__/__init__.cpython-310.pyc,, +transformers/models/diffllama/__pycache__/configuration_diffllama.cpython-310.pyc,, +transformers/models/diffllama/__pycache__/modeling_diffllama.cpython-310.pyc,, +transformers/models/diffllama/__pycache__/modular_diffllama.cpython-310.pyc,, +transformers/models/diffllama/configuration_diffllama.py,sha256=Z53H3hXOScYSjNPWslS6899Xi_WdC6gxKdbuZ0LBbV8,10682 +transformers/models/diffllama/modeling_diffllama.py,sha256=HSVefe2BbjKppBxKr6TgGVR7ciOFHC0PfkdpNHP9H4o,65545 +transformers/models/diffllama/modular_diffllama.py,sha256=S4x7TRinLt2sjaW2A-5OY0c6ODegMBwdc37x46TedXI,21066 +transformers/models/dinat/__init__.py,sha256=N0HykajUSY5KsvPQNUxc8jAuuJntmDJ-Dz8Qa8_sJ9E,991 +transformers/models/dinat/__pycache__/__init__.cpython-310.pyc,, +transformers/models/dinat/__pycache__/configuration_dinat.cpython-310.pyc,, +transformers/models/dinat/__pycache__/modeling_dinat.cpython-310.pyc,, +transformers/models/dinat/configuration_dinat.py,sha256=-RhmPxqGfoTz4-snZUO7PGhetovRg6CrwZSlVtKn2mE,7356 +transformers/models/dinat/modeling_dinat.py,sha256=ioU0HtLrbY8BYJGnNnvgoSP4FtQEuLT9Z6-OteSzBZk,40423 +transformers/models/dinov2/__init__.py,sha256=fDyp5N-KcJzO-vUeT3fZA8UbC21FfGEhDOlYNvXHHDc,1033 +transformers/models/dinov2/__pycache__/__init__.cpython-310.pyc,, +transformers/models/dinov2/__pycache__/configuration_dinov2.cpython-310.pyc,, +transformers/models/dinov2/__pycache__/convert_dinov2_to_hf.cpython-310.pyc,, +transformers/models/dinov2/__pycache__/modeling_dinov2.cpython-310.pyc,, +transformers/models/dinov2/__pycache__/modeling_flax_dinov2.cpython-310.pyc,, +transformers/models/dinov2/configuration_dinov2.py,sha256=cR-qc6YhXEWBwL8D9m1BAcJIpdMBCQFYhue0G3TiiJY,8090 +transformers/models/dinov2/convert_dinov2_to_hf.py,sha256=UpXqLbLJ8sF9LUFJ2TgU_5DIZe6ViZmIXdWEmDhVUL0,11863 +transformers/models/dinov2/modeling_dinov2.py,sha256=hjdiKm7KVieyyWstyYLqexXlwhZgP7lzcMDvqvBmOj0,38787 +transformers/models/dinov2/modeling_flax_dinov2.py,sha256=bNtNAn1XdtjQzTLhHwyj3gcGa1LQFUUUHajaKy_GyLc,30785 +transformers/models/dinov2_with_registers/__init__.py,sha256=s0cefgSRnlIVcdZYV0qz3Q9X3IEChU7mkGbbnr2IH6E,1023 +transformers/models/dinov2_with_registers/__pycache__/__init__.cpython-310.pyc,, +transformers/models/dinov2_with_registers/__pycache__/configuration_dinov2_with_registers.cpython-310.pyc,, +transformers/models/dinov2_with_registers/__pycache__/convert_dinov2_with_registers_to_hf.cpython-310.pyc,, +transformers/models/dinov2_with_registers/__pycache__/modeling_dinov2_with_registers.cpython-310.pyc,, +transformers/models/dinov2_with_registers/__pycache__/modular_dinov2_with_registers.cpython-310.pyc,, +transformers/models/dinov2_with_registers/configuration_dinov2_with_registers.py,sha256=snup7E3s7-WBPv47EhCQhJfdfjkXZSi4yOQf1R9ot10,8633 +transformers/models/dinov2_with_registers/convert_dinov2_with_registers_to_hf.py,sha256=uA_cXmvLmeITOdOfPNsQXMsfT74UdYg5zeAPLzvyvNY,12372 +transformers/models/dinov2_with_registers/modeling_dinov2_with_registers.py,sha256=OyiPXXBVkoDaqeo_re9jrruYTrLrQot801xUg3zq_IY,41338 +transformers/models/dinov2_with_registers/modular_dinov2_with_registers.py,sha256=_RtypPdWkgnhvadM_7p8Q5endhHJTpfa_VwqpEGGdj0,17147 +transformers/models/distilbert/__init__.py,sha256=dKwCe9QsyAaNsdUJFMUa-vcuHPSQSuLKFoFBvK3cLEY,1178 +transformers/models/distilbert/__pycache__/__init__.cpython-310.pyc,, +transformers/models/distilbert/__pycache__/configuration_distilbert.cpython-310.pyc,, +transformers/models/distilbert/__pycache__/modeling_distilbert.cpython-310.pyc,, +transformers/models/distilbert/__pycache__/modeling_flax_distilbert.cpython-310.pyc,, +transformers/models/distilbert/__pycache__/modeling_tf_distilbert.cpython-310.pyc,, +transformers/models/distilbert/__pycache__/tokenization_distilbert.cpython-310.pyc,, +transformers/models/distilbert/__pycache__/tokenization_distilbert_fast.cpython-310.pyc,, +transformers/models/distilbert/configuration_distilbert.py,sha256=PTN973hHHX1YBCy3gT1goLLnzzDOSKnd99zg_ENFRKs,6046 +transformers/models/distilbert/modeling_distilbert.py,sha256=eqJGvwpLRfkBuDqauZPn3SwPuR9Pu4dARbrsiudCmm4,60370 +transformers/models/distilbert/modeling_flax_distilbert.py,sha256=NNdjM2UOvN_PC4O_OUVugBByL6RYxfCUyjv7gwU0o-k,32914 +transformers/models/distilbert/modeling_tf_distilbert.py,sha256=smsNlkXC7ujXHDrKcOcsk2tQt6rLAzS703kseCtM6vw,49145 +transformers/models/distilbert/tokenization_distilbert.py,sha256=tRbn7lfNADq37XnMa5ST06c9ObNb9uC6fxveb2qTMbk,22260 +transformers/models/distilbert/tokenization_distilbert_fast.py,sha256=Mof7pf-IhB9iRwct1fK9fIUWfT5cGF2-ds5aFOOlMRY,8077 +transformers/models/dit/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0 +transformers/models/dit/__pycache__/__init__.cpython-310.pyc,, +transformers/models/dit/__pycache__/convert_dit_unilm_to_pytorch.cpython-310.pyc,, +transformers/models/dit/convert_dit_unilm_to_pytorch.py,sha256=GEMnwc1iMEobjwXLOxLJZarjSqvABOIkUAfgSp9Gnyw,9419 +transformers/models/donut/__init__.py,sha256=ZlVffHEdFxl7gEozYS2EhRLmsSlqgnx8lXbhz60l28E,1123 +transformers/models/donut/__pycache__/__init__.cpython-310.pyc,, +transformers/models/donut/__pycache__/configuration_donut_swin.cpython-310.pyc,, +transformers/models/donut/__pycache__/convert_donut_to_pytorch.cpython-310.pyc,, +transformers/models/donut/__pycache__/feature_extraction_donut.cpython-310.pyc,, +transformers/models/donut/__pycache__/image_processing_donut.cpython-310.pyc,, +transformers/models/donut/__pycache__/modeling_donut_swin.cpython-310.pyc,, +transformers/models/donut/__pycache__/processing_donut.cpython-310.pyc,, +transformers/models/donut/configuration_donut_swin.py,sha256=mHg0P4MRxMOw_IsHKFtBIuSuuY0tINGN3FmUImMqST8,5785 +transformers/models/donut/convert_donut_to_pytorch.py,sha256=lIIm3j7oyJYa2xy-piZ3bvUEHOSP9mQ0x_MZhqPiBOY,9337 +transformers/models/donut/feature_extraction_donut.py,sha256=KvL5oyQoe6T_2zwoqvFTMTzKBHk1heN3yFeIjTEePmU,1217 +transformers/models/donut/image_processing_donut.py,sha256=3EMXoGSMlIYSURml9R58yhDGj0YdSwVfbL1hZAP6yTg,21795 +transformers/models/donut/modeling_donut_swin.py,sha256=eqbV6pVHAufb-lK70x54AHUfARue9hRQQluNWhlTlvw,45951 +transformers/models/donut/processing_donut.py,sha256=0GtsiOV33rwotqpzoTe3CNSgqz4kdlJb-3t3qd_19Bk,9695 +transformers/models/dpr/__init__.py,sha256=z4FocLkQ_ckWtBZctTh-aeV1haJJY-lXF0ZRKuVbVkc,1099 +transformers/models/dpr/__pycache__/__init__.cpython-310.pyc,, +transformers/models/dpr/__pycache__/configuration_dpr.cpython-310.pyc,, +transformers/models/dpr/__pycache__/convert_dpr_original_checkpoint_to_pytorch.cpython-310.pyc,, +transformers/models/dpr/__pycache__/modeling_dpr.cpython-310.pyc,, +transformers/models/dpr/__pycache__/modeling_tf_dpr.cpython-310.pyc,, +transformers/models/dpr/__pycache__/tokenization_dpr.cpython-310.pyc,, +transformers/models/dpr/__pycache__/tokenization_dpr_fast.cpython-310.pyc,, +transformers/models/dpr/configuration_dpr.py,sha256=7lGBYx2IC_PGrt-6FIKW_pxJCMNTv3rWLqmNKCXMw8M,6416 +transformers/models/dpr/convert_dpr_original_checkpoint_to_pytorch.py,sha256=XsxG5FBg46-EHlDsMq4w21C9W4wl8RZ6GZvx5coBmfk,6132 +transformers/models/dpr/modeling_dpr.py,sha256=cu-E9YFD__7WfuWuEZ34T90m-q8Y_yE7opLV5nf5mdo,28542 +transformers/models/dpr/modeling_tf_dpr.py,sha256=g_fxgbstNg0kNKfDjBDVvXdgRchwMBfROcmBI3cr5Jw,33829 +transformers/models/dpr/tokenization_dpr.py,sha256=X5F-34vnP8Y14hh4uWUCB8DtMI01oNP3KGrYkhjlyPg,15840 +transformers/models/dpr/tokenization_dpr_fast.py,sha256=X4fogc0iNFnvhVY6BML3-Lr0bLDSIO2f8nEHFllngks,16219 +transformers/models/dpt/__init__.py,sha256=zucFovHWc15Nyx-7PHrOvECmBOCXf5uYoFCWpmM9Nd4,1069 +transformers/models/dpt/__pycache__/__init__.cpython-310.pyc,, +transformers/models/dpt/__pycache__/configuration_dpt.cpython-310.pyc,, +transformers/models/dpt/__pycache__/convert_dinov2_depth_to_hf.cpython-310.pyc,, +transformers/models/dpt/__pycache__/convert_dpt_beit_to_hf.cpython-310.pyc,, +transformers/models/dpt/__pycache__/convert_dpt_hybrid_to_pytorch.cpython-310.pyc,, +transformers/models/dpt/__pycache__/convert_dpt_swinv2_to_hf.cpython-310.pyc,, +transformers/models/dpt/__pycache__/convert_dpt_to_pytorch.cpython-310.pyc,, +transformers/models/dpt/__pycache__/feature_extraction_dpt.cpython-310.pyc,, +transformers/models/dpt/__pycache__/image_processing_dpt.cpython-310.pyc,, +transformers/models/dpt/__pycache__/modeling_dpt.cpython-310.pyc,, +transformers/models/dpt/configuration_dpt.py,sha256=I6cn7DErZHQ-p1Ad6k3C25wPcpnN8qGVqQekkfgz9T4,14068 +transformers/models/dpt/convert_dinov2_depth_to_hf.py,sha256=RSRtp1_C6NG_jX4V-9esAMhKXXKwy5eIhMTS7YehCaY,16926 +transformers/models/dpt/convert_dpt_beit_to_hf.py,sha256=Xs1RAcAQitVKUyjyEYTxYgbY2udQyyFbnOtnDhEN64k,14346 +transformers/models/dpt/convert_dpt_hybrid_to_pytorch.py,sha256=SLxsxRpr5dZXe-Qzv6xsmXBZIg8IyhToooqjS5ptjBY,12995 +transformers/models/dpt/convert_dpt_swinv2_to_hf.py,sha256=LH0aOWueCHqVl8Huz_MLdoQqso5C8Tgh7_ArUxxvGYg,15175 +transformers/models/dpt/convert_dpt_to_pytorch.py,sha256=ezSKskhntUX7V-Y396PA7a5XhmdYfJDQPechPiy_Lr4,11877 +transformers/models/dpt/feature_extraction_dpt.py,sha256=oMO44qP7soEwEUlpmu2NsXG_g7ip9pnh9j9MiAjpqpo,1201 +transformers/models/dpt/image_processing_dpt.py,sha256=ebIqXdzlii69XA8sakXIt1NQcjkD57saZrDqeMwTDh0,24417 +transformers/models/dpt/modeling_dpt.py,sha256=2l87Q2cv9kyFiylXkVxxvyOlNqpahH9OcffXH8jAkMA,57296 +transformers/models/efficientnet/__init__.py,sha256=HJYCtGAWyIgt7AXI4xdDvRBY3Ya7_yZVB9XEcqTHk8A,1054 +transformers/models/efficientnet/__pycache__/__init__.cpython-310.pyc,, +transformers/models/efficientnet/__pycache__/configuration_efficientnet.cpython-310.pyc,, +transformers/models/efficientnet/__pycache__/convert_efficientnet_to_pytorch.cpython-310.pyc,, +transformers/models/efficientnet/__pycache__/image_processing_efficientnet.cpython-310.pyc,, +transformers/models/efficientnet/__pycache__/modeling_efficientnet.cpython-310.pyc,, +transformers/models/efficientnet/configuration_efficientnet.py,sha256=VeFBfwCoxtbd5M2c-KVrPs0y83XAaX1SGvfaFro65LE,7657 +transformers/models/efficientnet/convert_efficientnet_to_pytorch.py,sha256=e2Na1xvNc7z9XvvI7v6v1V2uFWr88MSTN3JPKR5GstM,12756 +transformers/models/efficientnet/image_processing_efficientnet.py,sha256=5Gvo0abaPeZIzjS45zPgs_A7b8jYaa4NIcULgGxzvx8,18342 +transformers/models/efficientnet/modeling_efficientnet.py,sha256=HXw35WXlyL8K658mmsxKf1Iw5A6JSdDd-wvFOkPRjtw,24049 +transformers/models/electra/__init__.py,sha256=e6DkZL6cjtWVsTx7tamR-zsyv0tuRYLbuYn-r-04P84,1160 +transformers/models/electra/__pycache__/__init__.cpython-310.pyc,, +transformers/models/electra/__pycache__/configuration_electra.cpython-310.pyc,, +transformers/models/electra/__pycache__/convert_electra_original_tf_checkpoint_to_pytorch.cpython-310.pyc,, +transformers/models/electra/__pycache__/modeling_electra.cpython-310.pyc,, +transformers/models/electra/__pycache__/modeling_flax_electra.cpython-310.pyc,, +transformers/models/electra/__pycache__/modeling_tf_electra.cpython-310.pyc,, +transformers/models/electra/__pycache__/tokenization_electra.cpython-310.pyc,, +transformers/models/electra/__pycache__/tokenization_electra_fast.cpython-310.pyc,, +transformers/models/electra/configuration_electra.py,sha256=xJWhtwgiJeGHQ1b3iXGaduC_m_h8yG5fFgQx2MQCR-o,9145 +transformers/models/electra/convert_electra_original_tf_checkpoint_to_pytorch.py,sha256=8G7yTj4TTecFVf1OPFeqFuPSuyzronf3gSeiQQeEMG4,2861 +transformers/models/electra/modeling_electra.py,sha256=9ZENM7LFeLDRweGt6qfvP_fBxSqtj8YJ44ol6qKZB8M,75146 +transformers/models/electra/modeling_flax_electra.py,sha256=mOacTelJvmP-61PF4ZJl1X3CFYqJwaqbPbNAJSft9wI,62595 +transformers/models/electra/modeling_tf_electra.py,sha256=JGK-UXBGqS8U-BGQhFWZI6BoqRUxUkZuLicSHZQzrXo,78614 +transformers/models/electra/tokenization_electra.py,sha256=u_tk2_VroyR1msCEEX_yX45hQp4p7hG_q_WrqD18m58,21260 +transformers/models/electra/tokenization_electra_fast.py,sha256=yZ4f6x_EP4MqTZ0DJ1Y_X3OjsvN5Tof_pT6RG3-smV8,7722 +transformers/models/emu3/__init__.py,sha256=VEBLADqeToacty2xd3Zu0F_fLQRxvhfiKPkuB9jwcFM,1070 +transformers/models/emu3/__pycache__/__init__.cpython-310.pyc,, +transformers/models/emu3/__pycache__/configuration_emu3.cpython-310.pyc,, +transformers/models/emu3/__pycache__/convert_emu3_weights_to_hf.cpython-310.pyc,, +transformers/models/emu3/__pycache__/image_processing_emu3.cpython-310.pyc,, +transformers/models/emu3/__pycache__/modeling_emu3.cpython-310.pyc,, +transformers/models/emu3/__pycache__/modular_emu3.cpython-310.pyc,, +transformers/models/emu3/__pycache__/processing_emu3.cpython-310.pyc,, +transformers/models/emu3/configuration_emu3.py,sha256=hgj0gFq8h6e_FOVD2bF5TkMeN_KLtLfoBFKx4aVwja8,16071 +transformers/models/emu3/convert_emu3_weights_to_hf.py,sha256=laVT-1mLt1euB5WoaN3Rwbwzi4NSgMrxpba94gs7pXI,16849 +transformers/models/emu3/image_processing_emu3.py,sha256=TSDLgPK6turxsWlz0SpxE5zuQ8r3L9W4G_oP_Z0RmPQ,27855 +transformers/models/emu3/modeling_emu3.py,sha256=JD_NpJ8q8xpOC0GysRTKrrKkBSf-QUZj-F9WUDoLIH4,84831 +transformers/models/emu3/modular_emu3.py,sha256=hEiuDDcjFLiUEtrYlchS4faAuz0gG4G3ovMasO0Nnfw,53620 +transformers/models/emu3/processing_emu3.py,sha256=OPL2csbNMBqIHszvyopq16vhE7QvmzCS2sGdbNkMtxE,10415 +transformers/models/encodec/__init__.py,sha256=QbO9yEfCaRwYKbK0vvmwKMbqRAToyos-HTHhRmf7n5s,1041 +transformers/models/encodec/__pycache__/__init__.cpython-310.pyc,, +transformers/models/encodec/__pycache__/configuration_encodec.cpython-310.pyc,, +transformers/models/encodec/__pycache__/convert_encodec_checkpoint_to_pytorch.cpython-310.pyc,, +transformers/models/encodec/__pycache__/feature_extraction_encodec.cpython-310.pyc,, +transformers/models/encodec/__pycache__/modeling_encodec.cpython-310.pyc,, +transformers/models/encodec/configuration_encodec.py,sha256=Gc1MglGDHO8Yhqo2zy73X_lLyFI7BVBFZuk701jDwI8,8525 +transformers/models/encodec/convert_encodec_checkpoint_to_pytorch.py,sha256=7IthG56LeBwH_WFzT5Cvwcfsgy1vIheurQHWpnMS318,15253 +transformers/models/encodec/feature_extraction_encodec.py,sha256=OwPRjcZanBWGoX-HmQo-NLEv-uRPK-tTWqfEWsjERIQ,9913 +transformers/models/encodec/modeling_encodec.py,sha256=BPiOslfKi7vMBkyhU15JPfsa8ZfsTtPQXvSd4F8ntCw,33842 +transformers/models/encoder_decoder/__init__.py,sha256=wxXN9-4nCvYICfq8pE592rdRiQXK7S69V2cWGVQyIkw,1107 +transformers/models/encoder_decoder/__pycache__/__init__.cpython-310.pyc,, +transformers/models/encoder_decoder/__pycache__/configuration_encoder_decoder.cpython-310.pyc,, +transformers/models/encoder_decoder/__pycache__/modeling_encoder_decoder.cpython-310.pyc,, +transformers/models/encoder_decoder/__pycache__/modeling_flax_encoder_decoder.cpython-310.pyc,, +transformers/models/encoder_decoder/__pycache__/modeling_tf_encoder_decoder.cpython-310.pyc,, +transformers/models/encoder_decoder/configuration_encoder_decoder.py,sha256=YpO0_JNh13uQI6WCv5mMf81bw-r2j0S_zZLMwuEfeSM,4586 +transformers/models/encoder_decoder/modeling_encoder_decoder.py,sha256=45y9NGKEnrCBGlYRLghxYya_Z03qUgONvZDDQd3Ujqw,35509 +transformers/models/encoder_decoder/modeling_flax_encoder_decoder.py,sha256=hMDet9z998RL2MRZG35qSXJ0PM0I21EqT73Zx_Hjs9U,43567 +transformers/models/encoder_decoder/modeling_tf_encoder_decoder.py,sha256=1lRgrNX6Y6yCIBkX2AtS-eNaE3hmCgWAmf2eQ0OsqAg,34344 +transformers/models/ernie/__init__.py,sha256=TyzaXpzGwu-WqsIn1tavDqa7BCV9X-mPho4JDa9gk0I,991 +transformers/models/ernie/__pycache__/__init__.cpython-310.pyc,, +transformers/models/ernie/__pycache__/configuration_ernie.cpython-310.pyc,, +transformers/models/ernie/__pycache__/modeling_ernie.cpython-310.pyc,, +transformers/models/ernie/configuration_ernie.py,sha256=7DStIkig-TYcDsneQwKwz2DZ2XGK9EjHYeJeZ7TbgaU,7694 +transformers/models/ernie/modeling_ernie.py,sha256=arfeMxtasIdnEeCWo5HSi4ZQiyY6Syuo8hBmmIRoL6c,83359 +transformers/models/esm/__init__.py,sha256=muSqvVMt6mySkoAm7MjweiFHJVBSj70LlakjHmZ6PEE,1094 +transformers/models/esm/__pycache__/__init__.cpython-310.pyc,, +transformers/models/esm/__pycache__/configuration_esm.cpython-310.pyc,, +transformers/models/esm/__pycache__/convert_esm.cpython-310.pyc,, +transformers/models/esm/__pycache__/modeling_esm.cpython-310.pyc,, +transformers/models/esm/__pycache__/modeling_esmfold.cpython-310.pyc,, +transformers/models/esm/__pycache__/modeling_tf_esm.cpython-310.pyc,, +transformers/models/esm/__pycache__/tokenization_esm.cpython-310.pyc,, +transformers/models/esm/configuration_esm.py,sha256=3CtwlRuK-R8d6rBHlmTOui7_cNx1ZvDeKnTQZ2wr0xY,14410 +transformers/models/esm/convert_esm.py,sha256=x7Bz5lkBXMQJ_vYZ0-L3_z61lcTRQ9clIyC5WEkX2Kg,18469 +transformers/models/esm/modeling_esm.py,sha256=b4Z1ca7uD7n0RMsjrF6MoYmqSwVCSq2YbJx0ts5BLLg,55718 +transformers/models/esm/modeling_esmfold.py,sha256=NRq9A_z2tkEOzz3kwRMHbOupuMT057VFiB9q7SY5skk,86971 +transformers/models/esm/modeling_tf_esm.py,sha256=RbaZNctSONOdCx0i4_HE7F2psPvKWf3MOsOE2OiPS6A,69124 +transformers/models/esm/openfold_utils/__init__.py,sha256=Xy2uqvFsLC8Ax-OOce5PgoBDiZgEJgJPqs__p5SBWUY,446 +transformers/models/esm/openfold_utils/__pycache__/__init__.cpython-310.pyc,, +transformers/models/esm/openfold_utils/__pycache__/chunk_utils.cpython-310.pyc,, +transformers/models/esm/openfold_utils/__pycache__/data_transforms.cpython-310.pyc,, +transformers/models/esm/openfold_utils/__pycache__/feats.cpython-310.pyc,, +transformers/models/esm/openfold_utils/__pycache__/loss.cpython-310.pyc,, +transformers/models/esm/openfold_utils/__pycache__/protein.cpython-310.pyc,, +transformers/models/esm/openfold_utils/__pycache__/residue_constants.cpython-310.pyc,, +transformers/models/esm/openfold_utils/__pycache__/rigid_utils.cpython-310.pyc,, +transformers/models/esm/openfold_utils/__pycache__/tensor_utils.cpython-310.pyc,, +transformers/models/esm/openfold_utils/chunk_utils.py,sha256=co29vXYCaTh3g6PPSsvb_5GyePXVudMkISVHkARDT38,14390 +transformers/models/esm/openfold_utils/data_transforms.py,sha256=F4wGANRhKLd6MLHrwg2IxpqCxCJEx8aFSxqAdsXsBMo,3764 +transformers/models/esm/openfold_utils/feats.py,sha256=RHH65TclSlcI-fuGP16f6xr_QolV0aGRXEWUq-0boIU,8368 +transformers/models/esm/openfold_utils/loss.py,sha256=wY2ONqbuRvWMomjkpfPwfoa7dqCO2vFkM-kmNfhjivo,3705 +transformers/models/esm/openfold_utils/protein.py,sha256=R7diEvvIOtJY28B-_6TSMZdWmLFY4NOwaMzQmAg0x_w,11491 +transformers/models/esm/openfold_utils/residue_constants.py,sha256=FtPlWVweacknPfmi4XCrR66kFr4EuYXywvx0IEY8KAs,37992 +transformers/models/esm/openfold_utils/rigid_utils.py,sha256=J-xQV4KrkBNwHR4TSHBwT85pOYKf-nJ78Os4JtiJbxE,41130 +transformers/models/esm/openfold_utils/tensor_utils.py,sha256=cySnhhaYbdq4SqyWyAF3qGeUWPfWKsuTYWRnX-h21sE,4781 +transformers/models/esm/tokenization_esm.py,sha256=8A5P1nkmAFSiW2LTtBHwikMrA767SYUSLW9fNTE2knI,5385 +transformers/models/falcon/__init__.py,sha256=qmBlF_xusyrueKMfriC2ldVrHzeLIT7ruSdduMODuE4,993 +transformers/models/falcon/__pycache__/__init__.cpython-310.pyc,, +transformers/models/falcon/__pycache__/configuration_falcon.cpython-310.pyc,, +transformers/models/falcon/__pycache__/convert_custom_code_checkpoint.cpython-310.pyc,, +transformers/models/falcon/__pycache__/modeling_falcon.cpython-310.pyc,, +transformers/models/falcon/configuration_falcon.py,sha256=aPqHUHDkM-SeeoHw-qgtBa9y-ec4oQhaP3LYMQU3-sc,10917 +transformers/models/falcon/convert_custom_code_checkpoint.py,sha256=XPJ1owRjRno_Y1AD5UeoPE4oo6a-SeQR9w9u-EIUktE,3061 +transformers/models/falcon/modeling_falcon.py,sha256=74h1FIqCkaNCformd2nQJTzcLvb1hXyM4FpqXs2jXXg,74111 +transformers/models/falcon_mamba/__init__.py,sha256=Czo-T_Nt73nvRbK-yJEZAYsU3Bxu4i1fOxFuPosiFPw,1005 +transformers/models/falcon_mamba/__pycache__/__init__.cpython-310.pyc,, +transformers/models/falcon_mamba/__pycache__/configuration_falcon_mamba.cpython-310.pyc,, +transformers/models/falcon_mamba/__pycache__/modeling_falcon_mamba.cpython-310.pyc,, +transformers/models/falcon_mamba/configuration_falcon_mamba.py,sha256=S6EfKK6HVnT9bNZ_tqSuOoaeBtvYpsOAX6Zb0yVAGX4,7762 +transformers/models/falcon_mamba/modeling_falcon_mamba.py,sha256=bFVpKBBSQ37KekGG1S1vTFhqYiwwgbfh7wLscZ14e3k,40646 +transformers/models/fastspeech2_conformer/__init__.py,sha256=pILmX51CcqSiFGtl_dsX1yW2S_QugA3UHAT8f4psOtA,1077 +transformers/models/fastspeech2_conformer/__pycache__/__init__.cpython-310.pyc,, +transformers/models/fastspeech2_conformer/__pycache__/configuration_fastspeech2_conformer.cpython-310.pyc,, +transformers/models/fastspeech2_conformer/__pycache__/convert_fastspeech2_conformer_original_pytorch_checkpoint_to_pytorch.cpython-310.pyc,, +transformers/models/fastspeech2_conformer/__pycache__/convert_hifigan.cpython-310.pyc,, +transformers/models/fastspeech2_conformer/__pycache__/convert_model_with_hifigan.cpython-310.pyc,, +transformers/models/fastspeech2_conformer/__pycache__/modeling_fastspeech2_conformer.cpython-310.pyc,, +transformers/models/fastspeech2_conformer/__pycache__/tokenization_fastspeech2_conformer.cpython-310.pyc,, +transformers/models/fastspeech2_conformer/configuration_fastspeech2_conformer.py,sha256=Sff6-HrwSc_WQzF3YwWjTJJ_Xfe5m3hN_WL7OKXbAXQ,24631 +transformers/models/fastspeech2_conformer/convert_fastspeech2_conformer_original_pytorch_checkpoint_to_pytorch.py,sha256=-ToJHpwI-xoLLMzLYdqFrBL6j6nsSPlNbkQ3pfTgJ6Y,8939 +transformers/models/fastspeech2_conformer/convert_hifigan.py,sha256=RC1PaVnl1cLx8c2LdYycNti7iYRhUM7_KrX2mF5WyCM,5431 +transformers/models/fastspeech2_conformer/convert_model_with_hifigan.py,sha256=wT4pQGgEHVFoWI1Lb71L7_i6ujfNrSMDGYuDGb4oeh8,3471 +transformers/models/fastspeech2_conformer/modeling_fastspeech2_conformer.py,sha256=WV5sEofZpghpK9e1DytzlnmMiDprSp-TC4WPCFuubao,78023 +transformers/models/fastspeech2_conformer/tokenization_fastspeech2_conformer.py,sha256=XZumdFJwNj9wfU_ijmZSUWKY2fn8RTuB8zSzyOza328,6265 +transformers/models/flaubert/__init__.py,sha256=LdGmxq7pcDPVcvqO1ol7VYtpjKKCAQuiJ1ISrNT9nEs,1078 +transformers/models/flaubert/__pycache__/__init__.cpython-310.pyc,, +transformers/models/flaubert/__pycache__/configuration_flaubert.cpython-310.pyc,, +transformers/models/flaubert/__pycache__/modeling_flaubert.cpython-310.pyc,, +transformers/models/flaubert/__pycache__/modeling_tf_flaubert.cpython-310.pyc,, +transformers/models/flaubert/__pycache__/tokenization_flaubert.cpython-310.pyc,, +transformers/models/flaubert/configuration_flaubert.py,sha256=bd7BCSDeNq6Q2hu79BXPjOkYqQL9RjkeWPtzAaSp_SM,11241 +transformers/models/flaubert/modeling_flaubert.py,sha256=UXQ9oVdgyu1zLzsG1wMKaC3j2qLr518QNsYL50wMDTw,57891 +transformers/models/flaubert/modeling_tf_flaubert.py,sha256=4hGU3PUC_IswNagCpGqfrzLu27F8OYSGmFOm4_XSh9A,57346 +transformers/models/flaubert/tokenization_flaubert.py,sha256=SNK5TKNn2tctqVQUWSSwDuLLe_v9d6Ozw68OXk6dv40,22208 +transformers/models/flava/__init__.py,sha256=u1s081HA6nko-0YymDOaaHoK5uRqO_bLdJCTgdX0vcA,1113 +transformers/models/flava/__pycache__/__init__.cpython-310.pyc,, +transformers/models/flava/__pycache__/configuration_flava.cpython-310.pyc,, +transformers/models/flava/__pycache__/convert_dalle_to_flava_codebook.cpython-310.pyc,, +transformers/models/flava/__pycache__/convert_flava_original_pytorch_to_hf.cpython-310.pyc,, +transformers/models/flava/__pycache__/feature_extraction_flava.cpython-310.pyc,, +transformers/models/flava/__pycache__/image_processing_flava.cpython-310.pyc,, +transformers/models/flava/__pycache__/modeling_flava.cpython-310.pyc,, +transformers/models/flava/__pycache__/processing_flava.cpython-310.pyc,, +transformers/models/flava/configuration_flava.py,sha256=sjJJklCE6Lr_Gxlx71jOyd8CuVh_4EZkAbeyaGhsEag,34065 +transformers/models/flava/convert_dalle_to_flava_codebook.py,sha256=iEJM9W_cKk3HK0gKS6i2ygEMeyymWCMl18LDaQXRAhY,3428 +transformers/models/flava/convert_flava_original_pytorch_to_hf.py,sha256=LilQpbe6qeN2P_uXljae6zEPx_KoepoRv4uvCEAo0QA,4372 +transformers/models/flava/feature_extraction_flava.py,sha256=2KMUGwnka1QJbCmDYmiqETaSNDCWQheR_zWWWvcIk9w,1239 +transformers/models/flava/image_processing_flava.py,sha256=cci87P-2ZE_CyNKQTDA4BLEBCOn2KrJQD2IxeTFzWyk,37437 +transformers/models/flava/modeling_flava.py,sha256=PE0T1Y9Zw9GFnRGM5Ayo6FdW9spXEa5sihXMDIYTZ6Y,96700 +transformers/models/flava/processing_flava.py,sha256=jruLqgFkfdRXWRyP-QI4FlbqRz8xS-cVBRvSAwD4Okw,6863 +transformers/models/fnet/__init__.py,sha256=V3nuz_DsD_K5-RuL-Gt4hr5FVtNz12s46O_Vtx_xvCY,1068 +transformers/models/fnet/__pycache__/__init__.cpython-310.pyc,, +transformers/models/fnet/__pycache__/configuration_fnet.cpython-310.pyc,, +transformers/models/fnet/__pycache__/convert_fnet_original_flax_checkpoint_to_pytorch.cpython-310.pyc,, +transformers/models/fnet/__pycache__/modeling_fnet.cpython-310.pyc,, +transformers/models/fnet/__pycache__/tokenization_fnet.cpython-310.pyc,, +transformers/models/fnet/__pycache__/tokenization_fnet_fast.cpython-310.pyc,, +transformers/models/fnet/configuration_fnet.py,sha256=oZVGszdEYsE-nJnpSlmU3r4tENCfwHnNKaL4NmrD7N4,5567 +transformers/models/fnet/convert_fnet_original_flax_checkpoint_to_pytorch.py,sha256=s2hJZxxZnljY-aQE5S5CT6tL2yYbwH28KxCMKrzzHMY,6911 +transformers/models/fnet/modeling_fnet.py,sha256=TwvNEgouigYEV2tDOUD2yNZgfnZlrkLfdk3HYNT7LuI,49558 +transformers/models/fnet/tokenization_fnet.py,sha256=jUkuFNfelVVWVhPHNVBl2kbx5k9IfVE26xnqHnyF7B4,14578 +transformers/models/fnet/tokenization_fnet_fast.py,sha256=6t6PuMKkgdlQ2BUpA-67KSfu7fEB0Ubk-Voxqq9SGJo,8096 +transformers/models/focalnet/__init__.py,sha256=kFk7pYv4troBIWdCYosHMKh8PAnpXqjlxaRRQ5adkG0,997 +transformers/models/focalnet/__pycache__/__init__.cpython-310.pyc,, +transformers/models/focalnet/__pycache__/configuration_focalnet.cpython-310.pyc,, +transformers/models/focalnet/__pycache__/convert_focalnet_to_hf_format.cpython-310.pyc,, +transformers/models/focalnet/__pycache__/modeling_focalnet.cpython-310.pyc,, +transformers/models/focalnet/configuration_focalnet.py,sha256=y2d2fA5dtonpX0OtCNY9gVKmz0xITrylfbQYWlwuyM4,8057 +transformers/models/focalnet/convert_focalnet_to_hf_format.py,sha256=xBoop7K4unfPawCbmlv7BTQHpbJkaUWasrwsw8dW_KI,9450 +transformers/models/focalnet/modeling_focalnet.py,sha256=mQ9ooxYgR-I03yG3cjUGZ_f9cYU3ObjjxCCyUar87sI,43294 +transformers/models/fsmt/__init__.py,sha256=u_Xx7d3qDicqwR_W0js1h2wPiLKWM1RlMu7fsBdIHy4,1026 +transformers/models/fsmt/__pycache__/__init__.cpython-310.pyc,, +transformers/models/fsmt/__pycache__/configuration_fsmt.cpython-310.pyc,, +transformers/models/fsmt/__pycache__/convert_fsmt_original_pytorch_checkpoint_to_pytorch.cpython-310.pyc,, +transformers/models/fsmt/__pycache__/modeling_fsmt.cpython-310.pyc,, +transformers/models/fsmt/__pycache__/tokenization_fsmt.cpython-310.pyc,, +transformers/models/fsmt/configuration_fsmt.py,sha256=IOCuyx1-F-_Nrp1bnHCUxmf75OtArzQf-w0HfcWDJHo,10090 +transformers/models/fsmt/convert_fsmt_original_pytorch_checkpoint_to_pytorch.py,sha256=BWtn90XQAuWGp8k9zns5St9On_os395ESNgkaXy6y2g,11264 +transformers/models/fsmt/modeling_fsmt.py,sha256=gmWfAOfZtcd48SjWR8tB1j_JYysEx_22_QUtlTRu56w,57798 +transformers/models/fsmt/tokenization_fsmt.py,sha256=vSDmTbQxJRS1-nfPCW8JglxwhOW8ius4TCekcbtVTrc,19263 +transformers/models/funnel/__init__.py,sha256=087Y3Xz6y0HA5SgKe-s2z-ZzUIq1u_axxCRh2__gVro,1182 +transformers/models/funnel/__pycache__/__init__.cpython-310.pyc,, +transformers/models/funnel/__pycache__/configuration_funnel.cpython-310.pyc,, +transformers/models/funnel/__pycache__/convert_funnel_original_tf_checkpoint_to_pytorch.cpython-310.pyc,, +transformers/models/funnel/__pycache__/modeling_funnel.cpython-310.pyc,, +transformers/models/funnel/__pycache__/modeling_tf_funnel.cpython-310.pyc,, +transformers/models/funnel/__pycache__/tokenization_funnel.cpython-310.pyc,, +transformers/models/funnel/__pycache__/tokenization_funnel_fast.cpython-310.pyc,, +transformers/models/funnel/configuration_funnel.py,sha256=cyXfyi1BlvtD683UockXRWIgmV1clSEuwj1euzsw36Q,7680 +transformers/models/funnel/convert_funnel_original_tf_checkpoint_to_pytorch.py,sha256=zxR3e3QerGgEpcSc1Dg1dTupzRDZdZ-WIqdm53wqehc,2349 +transformers/models/funnel/modeling_funnel.py,sha256=ra6KVVAtBparReiOl1sb2D2wn_8B-OcmBYhJhdum-aA,69763 +transformers/models/funnel/modeling_tf_funnel.py,sha256=sQ8kdJvvWpjY2Ghodmzff5xuGtgdV-NW3kXDBddlRdw,80462 +transformers/models/funnel/tokenization_funnel.py,sha256=FJ93eN1Pn2MlGC43HgyqKb2YjDGPWQr73Ekqnk-AaL4,22718 +transformers/models/funnel/tokenization_funnel_fast.py,sha256=UeGB97RlM-XJ_e22EZLFjFyL5bf6uZ3ejAbucN3cdzw,8679 +transformers/models/fuyu/__init__.py,sha256=NcygIhTFvIZzXPZUReC1WYReGAVINSpG0xW7KqEmd8c,1065 +transformers/models/fuyu/__pycache__/__init__.cpython-310.pyc,, +transformers/models/fuyu/__pycache__/configuration_fuyu.cpython-310.pyc,, +transformers/models/fuyu/__pycache__/convert_fuyu_model_weights_to_hf.cpython-310.pyc,, +transformers/models/fuyu/__pycache__/image_processing_fuyu.cpython-310.pyc,, +transformers/models/fuyu/__pycache__/modeling_fuyu.cpython-310.pyc,, +transformers/models/fuyu/__pycache__/processing_fuyu.cpython-310.pyc,, +transformers/models/fuyu/configuration_fuyu.py,sha256=9YDe0lk2Ipss7hxKLK2TnvadXiA7G5Pphky2FiCUvQo,9985 +transformers/models/fuyu/convert_fuyu_model_weights_to_hf.py,sha256=c8A4qiUY47MfPeEG518qofxFdzut0me3EtFNizEHv6Q,4847 +transformers/models/fuyu/image_processing_fuyu.py,sha256=iWiavvkAdnFT7pd2xJVYu9QDs9NNGgGyOb5vG18FjoQ,33511 +transformers/models/fuyu/modeling_fuyu.py,sha256=MppcXZ6d6yMtsehfMI-044k6HddOk7c-FDSDP2B8pls,18892 +transformers/models/fuyu/processing_fuyu.py,sha256=rLG3RdORZcRvoBz3pfBc_Dp6GmGoqNNSLxqe7NQ5UwQ,33176 +transformers/models/gemma/__init__.py,sha256=xXoIfeCXNQOEnARxU3QucfH5mn-a_AE4wp69YkykT50,1111 +transformers/models/gemma/__pycache__/__init__.cpython-310.pyc,, +transformers/models/gemma/__pycache__/configuration_gemma.cpython-310.pyc,, +transformers/models/gemma/__pycache__/convert_gemma_weights_to_hf.cpython-310.pyc,, +transformers/models/gemma/__pycache__/modeling_flax_gemma.cpython-310.pyc,, +transformers/models/gemma/__pycache__/modeling_gemma.cpython-310.pyc,, +transformers/models/gemma/__pycache__/modular_gemma.cpython-310.pyc,, +transformers/models/gemma/__pycache__/tokenization_gemma.cpython-310.pyc,, +transformers/models/gemma/__pycache__/tokenization_gemma_fast.cpython-310.pyc,, +transformers/models/gemma/configuration_gemma.py,sha256=FVTza9-vcexOewOy43MRiXzniMfmdZibiXaSJgu006A,7787 +transformers/models/gemma/convert_gemma_weights_to_hf.py,sha256=UCoyJd4wVYKlikKMK0-9GRFAa-Cm3OtLt7oSJjXOuPA,7366 +transformers/models/gemma/modeling_flax_gemma.py,sha256=9hIEDz0Y8Ld1nk6s5spdv1tZmwsq5Dv12AwkKOYP0LY,32416 +transformers/models/gemma/modeling_gemma.py,sha256=_bTR_L7SEL327L4-cuSvA9vtTAo-ja1ujjghnSbnCiI,47529 +transformers/models/gemma/modular_gemma.py,sha256=BocqSmMRnjG6ZGn_y_4_oDExb7t4z9xeUKWor1rdHEg,22516 +transformers/models/gemma/tokenization_gemma.py,sha256=uNYpw60r1EYnNaHZF-mm19inSbaxlHw_VStjGyU3jl4,14166 +transformers/models/gemma/tokenization_gemma_fast.py,sha256=7JHnl4sHyWUdaFv_j7__HxauwKNnkyqXI37dmFUyhUs,8314 +transformers/models/gemma2/__init__.py,sha256=H0jWJX-AcGRTjdzkGJagKnjB6GnpqVUG4ODFhMF9OWM,993 +transformers/models/gemma2/__pycache__/__init__.cpython-310.pyc,, +transformers/models/gemma2/__pycache__/configuration_gemma2.cpython-310.pyc,, +transformers/models/gemma2/__pycache__/convert_gemma2_weights_to_hf.cpython-310.pyc,, +transformers/models/gemma2/__pycache__/modeling_gemma2.cpython-310.pyc,, +transformers/models/gemma2/__pycache__/modular_gemma2.cpython-310.pyc,, +transformers/models/gemma2/configuration_gemma2.py,sha256=PdUUVP6fPjR1MHalnIRwFkTYHGbol9il5hqGa9WHYTA,8739 +transformers/models/gemma2/convert_gemma2_weights_to_hf.py,sha256=Dp4GAX_zZ5GCs8bLWmUuc7Pbw4pNOUIfFKOZ3KMYi8A,8543 +transformers/models/gemma2/modeling_gemma2.py,sha256=b-uHvDu9C-rHgxVTul1nyJg19kr6KpiC-C_5gnamzYk,52519 +transformers/models/gemma2/modular_gemma2.py,sha256=aqV6UkCp12xbwGwzI2Gwgy8N8-Bt94np2GMzTqimCUY,31376 +transformers/models/git/__init__.py,sha256=jY1iLd7UMOmcCfrKgzoUJawLa0DQ55wHN26L09YSwhc,1021 +transformers/models/git/__pycache__/__init__.cpython-310.pyc,, +transformers/models/git/__pycache__/configuration_git.cpython-310.pyc,, +transformers/models/git/__pycache__/convert_git_to_pytorch.cpython-310.pyc,, +transformers/models/git/__pycache__/modeling_git.cpython-310.pyc,, +transformers/models/git/__pycache__/processing_git.cpython-310.pyc,, +transformers/models/git/configuration_git.py,sha256=avdu5gPUzKcJfphFgf5d-QIu4RFjUtQZSKYIFDzADEQ,10431 +transformers/models/git/convert_git_to_pytorch.py,sha256=V5Mp-Bzy49DDkULMWEFBFUYv9XBK4p7DSEDrsnqDd9M,23105 +transformers/models/git/modeling_git.py,sha256=7IBoq9ljyu1QGTUOirSLKT6ymONQUzbxMBHa0ukrwBw,73510 +transformers/models/git/processing_git.py,sha256=1uGrwlR0D2-Z-8TslVcrSyCuwVrb2uk-chdG0yJDW48,6876 +transformers/models/glm/__init__.py,sha256=fIafw6FAflbbeG_nEM_VPJyMJHnu_NbWHTHjECIAvIs,987 +transformers/models/glm/__pycache__/__init__.cpython-310.pyc,, +transformers/models/glm/__pycache__/configuration_glm.cpython-310.pyc,, +transformers/models/glm/__pycache__/convert_glm_weights_to_hf.cpython-310.pyc,, +transformers/models/glm/__pycache__/modeling_glm.cpython-310.pyc,, +transformers/models/glm/__pycache__/modular_glm.cpython-310.pyc,, +transformers/models/glm/configuration_glm.py,sha256=foQ_naBoGdrXYpdbJk5IwFH5LUxY0tpPJxnA1e-bUIA,6862 +transformers/models/glm/convert_glm_weights_to_hf.py,sha256=lNUaFD2IWXTSpMGW9B8zj7E9ygz26fh3S0ueYAlqius,7229 +transformers/models/glm/modeling_glm.py,sha256=5rG4OwmRu0yFL6awkfyXgp_saoM8G6_lhrAHXPMJtpw,47823 +transformers/models/glm/modular_glm.py,sha256=rOSXBsyECZhENwaJ8M9bBxJ1NK2Lwv7POZu67Uu3xgQ,4093 +transformers/models/glpn/__init__.py,sha256=YYoaugUj0un_FnfusrkzFfT_UtvUJEjMDaRDS8IcYAE,1073 +transformers/models/glpn/__pycache__/__init__.cpython-310.pyc,, +transformers/models/glpn/__pycache__/configuration_glpn.cpython-310.pyc,, +transformers/models/glpn/__pycache__/convert_glpn_to_pytorch.cpython-310.pyc,, +transformers/models/glpn/__pycache__/feature_extraction_glpn.cpython-310.pyc,, +transformers/models/glpn/__pycache__/image_processing_glpn.cpython-310.pyc,, +transformers/models/glpn/__pycache__/modeling_glpn.cpython-310.pyc,, +transformers/models/glpn/configuration_glpn.py,sha256=FmxBJ1zoC4IDy68CV6eiHrhbzzI9bt0e0lscsZNShFM,5998 +transformers/models/glpn/convert_glpn_to_pytorch.py,sha256=i-_gJfqz7ethjzLpEz82rajkhSvpXULlI443jFdczsM,8557 +transformers/models/glpn/feature_extraction_glpn.py,sha256=-LIWcn0jmu6Wgk05foZGFGZLrS8dnCknF8uENaHFzfE,1209 +transformers/models/glpn/image_processing_glpn.py,sha256=h_0tMBDMLGmM8QLKkYC6G3sSTiCjIDhGG25tZFUuiZo,12691 +transformers/models/glpn/modeling_glpn.py,sha256=ifTKy0R9ztGbcm0PKyxsene9t4mesTM4oJ_ULBsdHf8,31490 +transformers/models/gpt2/__init__.py,sha256=NRi7aYu3gezDPsiXiiG6dgSpCMHSIvFpC3iI0w-JMA0,1182 +transformers/models/gpt2/__pycache__/__init__.cpython-310.pyc,, +transformers/models/gpt2/__pycache__/configuration_gpt2.cpython-310.pyc,, +transformers/models/gpt2/__pycache__/convert_gpt2_original_tf_checkpoint_to_pytorch.cpython-310.pyc,, +transformers/models/gpt2/__pycache__/modeling_flax_gpt2.cpython-310.pyc,, +transformers/models/gpt2/__pycache__/modeling_gpt2.cpython-310.pyc,, +transformers/models/gpt2/__pycache__/modeling_tf_gpt2.cpython-310.pyc,, +transformers/models/gpt2/__pycache__/tokenization_gpt2.cpython-310.pyc,, +transformers/models/gpt2/__pycache__/tokenization_gpt2_fast.cpython-310.pyc,, +transformers/models/gpt2/__pycache__/tokenization_gpt2_tf.cpython-310.pyc,, +transformers/models/gpt2/configuration_gpt2.py,sha256=kfBMy7KVmNF7RPyLnUnVyBSsN1pAP5pZdLO3UqF01zw,12028 +transformers/models/gpt2/convert_gpt2_original_tf_checkpoint_to_pytorch.py,sha256=3qlC-RgCP9x3U-mXQlUD7OEF3q4-b-EF3njeqp7X-wM,2531 +transformers/models/gpt2/modeling_flax_gpt2.py,sha256=vKZO4-Suzi-gQqK3QK_LQhCaXAwsTl13C7Hm_83ODPo,32094 +transformers/models/gpt2/modeling_gpt2.py,sha256=UH5PbsDYhw9qnvOYYGdjdeR9aBm9-9MjAdI9tuM2hb8,76082 +transformers/models/gpt2/modeling_tf_gpt2.py,sha256=zOaUkEuIc0QPcp9qV5dwZ3lc6ongy8sN4XDgTrMzAEM,56777 +transformers/models/gpt2/tokenization_gpt2.py,sha256=ktid1ESF1ddwlv6JznDnocuESJMl-knLrqlaIa_NHW8,13169 +transformers/models/gpt2/tokenization_gpt2_fast.py,sha256=_9MdILprCNEYRzHFM9wWTt_3frnvxOYkZ45xxLVslmw,5281 +transformers/models/gpt2/tokenization_gpt2_tf.py,sha256=u7aLICmUtkhZhe9THkS0qq8hBVbHmyuktWmUEUXF5W0,3865 +transformers/models/gpt_bigcode/__init__.py,sha256=KQNb7PO57eZpP345wSbe_C3iL-N4VPscw1GY2mv81uE,1003 +transformers/models/gpt_bigcode/__pycache__/__init__.cpython-310.pyc,, +transformers/models/gpt_bigcode/__pycache__/configuration_gpt_bigcode.cpython-310.pyc,, +transformers/models/gpt_bigcode/__pycache__/modeling_gpt_bigcode.cpython-310.pyc,, +transformers/models/gpt_bigcode/configuration_gpt_bigcode.py,sha256=HRmAGutvqlrQWtmsfGCsixHxhla7465UrgcFBCDt9hU,6311 +transformers/models/gpt_bigcode/modeling_gpt_bigcode.py,sha256=Vq2zC64xi3PjbKFZBouG9PymFtsY4l_IHfNBvU28Rus,65952 +transformers/models/gpt_neo/__init__.py,sha256=b25qxianvucgAd3OxuI00Rr5324o-CRes0zrcEIOCZI,1036 +transformers/models/gpt_neo/__pycache__/__init__.cpython-310.pyc,, +transformers/models/gpt_neo/__pycache__/configuration_gpt_neo.cpython-310.pyc,, +transformers/models/gpt_neo/__pycache__/convert_gpt_neo_mesh_tf_to_pytorch.cpython-310.pyc,, +transformers/models/gpt_neo/__pycache__/modeling_flax_gpt_neo.cpython-310.pyc,, +transformers/models/gpt_neo/__pycache__/modeling_gpt_neo.cpython-310.pyc,, +transformers/models/gpt_neo/configuration_gpt_neo.py,sha256=5te0JrZ9pGYDXQzEgOc6nrgMUYXZaI-Q3lVCDIdN1aQ,11880 +transformers/models/gpt_neo/convert_gpt_neo_mesh_tf_to_pytorch.py,sha256=qerp3UqKhts8pfHtNNBJ30YpyX_Qsjfc33ubRabrRkU,2588 +transformers/models/gpt_neo/modeling_flax_gpt_neo.py,sha256=MPxNyFQD0T7jQfOZgzVo9l-ipRVILoL0E48B4Bc0kdg,28160 +transformers/models/gpt_neo/modeling_gpt_neo.py,sha256=HGDUIz1tJXXoD_AgaZvYelzSfFB-CwZ3G7JujMhYWxc,59198 +transformers/models/gpt_neox/__init__.py,sha256=6CL92CuqBTIDJ-YH_doFwb-oRylAffw7pwxedv3a-40,1043 +transformers/models/gpt_neox/__pycache__/__init__.cpython-310.pyc,, +transformers/models/gpt_neox/__pycache__/configuration_gpt_neox.cpython-310.pyc,, +transformers/models/gpt_neox/__pycache__/modeling_gpt_neox.cpython-310.pyc,, +transformers/models/gpt_neox/__pycache__/tokenization_gpt_neox_fast.cpython-310.pyc,, +transformers/models/gpt_neox/configuration_gpt_neox.py,sha256=KUkKPUbHfsNPCLBx0PbxDxbqPdAPyuFvg6U80cTEL-Y,10450 +transformers/models/gpt_neox/modeling_gpt_neox.py,sha256=hJOATvAouuvy1TtRWVjIFL6KDhQOsqfbdyveEaEgAE0,63027 +transformers/models/gpt_neox/tokenization_gpt_neox_fast.py,sha256=d3-37M7gx1M-gRMtRC7FRUReD4pF8AAuioITt8ZEy98,8985 +transformers/models/gpt_neox_japanese/__init__.py,sha256=z4kbUmZSjE-Hs9ba8ul3Yncc9ZJy7ePufbwwRlfqWqw,1065 +transformers/models/gpt_neox_japanese/__pycache__/__init__.cpython-310.pyc,, +transformers/models/gpt_neox_japanese/__pycache__/configuration_gpt_neox_japanese.cpython-310.pyc,, +transformers/models/gpt_neox_japanese/__pycache__/modeling_gpt_neox_japanese.cpython-310.pyc,, +transformers/models/gpt_neox_japanese/__pycache__/tokenization_gpt_neox_japanese.cpython-310.pyc,, +transformers/models/gpt_neox_japanese/configuration_gpt_neox_japanese.py,sha256=rugQmK2-6yU1q97fGoDiADoqvgQthnWWVvIEPDba-nI,9122 +transformers/models/gpt_neox_japanese/modeling_gpt_neox_japanese.py,sha256=6SL6FYNuT2tJJ4nR25Hk7h3Z80C5gfO1bB_b-O3O_3k,41062 +transformers/models/gpt_neox_japanese/tokenization_gpt_neox_japanese.py,sha256=hD_GTFt0ONJAmi_hez8VeHhlALPO5tgVUgHASFADrgs,16250 +transformers/models/gpt_sw3/__init__.py,sha256=-g6WlJ6EhhrJKCCsPf78cgvGD7oWvfeW9GBGBpW6wcM,958 +transformers/models/gpt_sw3/__pycache__/__init__.cpython-310.pyc,, +transformers/models/gpt_sw3/__pycache__/convert_megatron_to_pytorch.cpython-310.pyc,, +transformers/models/gpt_sw3/__pycache__/tokenization_gpt_sw3.cpython-310.pyc,, +transformers/models/gpt_sw3/convert_megatron_to_pytorch.py,sha256=CBBDQ0Kb94o1VGiJ2XbdZ4vrPiFkwnaASQXxlGVWwRc,8155 +transformers/models/gpt_sw3/tokenization_gpt_sw3.py,sha256=un1AuiD5lYaO9alUHo2O3OuIA5l6BF2AzKrIsRbajvc,12502 +transformers/models/gptj/__init__.py,sha256=rgFDJcsxcq1ytl7BTZthr7sSmaxqggSbvrIseycmE-s,1063 +transformers/models/gptj/__pycache__/__init__.cpython-310.pyc,, +transformers/models/gptj/__pycache__/configuration_gptj.cpython-310.pyc,, +transformers/models/gptj/__pycache__/modeling_flax_gptj.cpython-310.pyc,, +transformers/models/gptj/__pycache__/modeling_gptj.cpython-310.pyc,, +transformers/models/gptj/__pycache__/modeling_tf_gptj.cpython-310.pyc,, +transformers/models/gptj/configuration_gptj.py,sha256=eqtAfhaoNMwytXXkhVWmO9KxhFZEo-jOAVBIvM7hs9s,8829 +transformers/models/gptj/modeling_flax_gptj.py,sha256=llNypW7hkw-afhxaQeT5UZbxE6LZOeppoEofLT2yHQ8,28605 +transformers/models/gptj/modeling_gptj.py,sha256=5mdFr2P_cZZRD-ojxKkUDmsz4qWVxNklUwtAmbJEjso,62483 +transformers/models/gptj/modeling_tf_gptj.py,sha256=nejSjWc60rjQD1Ga8yeNZWSl7XxXNYzfwTRh11EuW-I,48249 +transformers/models/granite/__init__.py,sha256=GJDr9klPSZcxXbXjAdwKMM8nB96VAvxVGIP2N_FeTJQ,1633 +transformers/models/granite/__pycache__/__init__.cpython-310.pyc,, +transformers/models/granite/__pycache__/configuration_granite.cpython-310.pyc,, +transformers/models/granite/__pycache__/modeling_granite.cpython-310.pyc,, +transformers/models/granite/__pycache__/modular_granite.cpython-310.pyc,, +transformers/models/granite/configuration_granite.py,sha256=FPCmiIOBRGwyeIIkfOO7PPy7Tf6jWZw6N2oYDiYyPsE,9087 +transformers/models/granite/modeling_granite.py,sha256=XArp7cUiwFv2V8L0IiaTITTwyjiE9Sn7YNk3tvKycOc,41096 +transformers/models/granite/modular_granite.py,sha256=L7_C1BCabNKwfQurB7X7DciPjkzknX23r0boSm7WAVU,12467 +transformers/models/granitemoe/__init__.py,sha256=e4KKtNT7YFkYkPBfcS0VyhpT_1vF0JkR2qdYKPqRUcE,1001 +transformers/models/granitemoe/__pycache__/__init__.cpython-310.pyc,, +transformers/models/granitemoe/__pycache__/configuration_granitemoe.cpython-310.pyc,, +transformers/models/granitemoe/__pycache__/modeling_granitemoe.cpython-310.pyc,, +transformers/models/granitemoe/configuration_granitemoe.py,sha256=YBrDyqDjMYoHfK33-MXFR9LsqTl-UhkPfwuTuYimp6A,9400 +transformers/models/granitemoe/modeling_granitemoe.py,sha256=x7gHh-keXdT8ET5GgSfNLkDomH89cILZ8IrhB4RLvWg,65646 +transformers/models/grounding_dino/__init__.py,sha256=hrTbNDHz5rgQfwPqtLzd8BGLCOgG1azEiWXR1ottlGk,1105 +transformers/models/grounding_dino/__pycache__/__init__.cpython-310.pyc,, +transformers/models/grounding_dino/__pycache__/configuration_grounding_dino.cpython-310.pyc,, +transformers/models/grounding_dino/__pycache__/convert_grounding_dino_to_hf.cpython-310.pyc,, +transformers/models/grounding_dino/__pycache__/image_processing_grounding_dino.cpython-310.pyc,, +transformers/models/grounding_dino/__pycache__/modeling_grounding_dino.cpython-310.pyc,, +transformers/models/grounding_dino/__pycache__/processing_grounding_dino.cpython-310.pyc,, +transformers/models/grounding_dino/configuration_grounding_dino.py,sha256=PZ-enTohyIN1dsu3nr5CKPmkrpNEq4fdgJJdnoycRnA,14818 +transformers/models/grounding_dino/convert_grounding_dino_to_hf.py,sha256=U3T2-FjtYv9unK36_iA17Ifww7kWWYWpPbVC6u6-unQ,25445 +transformers/models/grounding_dino/image_processing_grounding_dino.py,sha256=1mwzxVQu9yVlhK2OhCpLqCqekC-nB96oaRCViypQWp8,70837 +transformers/models/grounding_dino/modeling_grounding_dino.py,sha256=6NbpFwjR8QM4VDmkdHB2RWn7NjtQGUh3ZfH9IdY9klw,133604 +transformers/models/grounding_dino/processing_grounding_dino.py,sha256=nRy13os2LotSvrk0TyoYr9ZRceRI2t41TGqitGg1DKI,9531 +transformers/models/groupvit/__init__.py,sha256=vrJ-tBa1XOd1CloHhXKMCIlggMxOS4M7jCcqlLQxMo4,1037 +transformers/models/groupvit/__pycache__/__init__.cpython-310.pyc,, +transformers/models/groupvit/__pycache__/configuration_groupvit.cpython-310.pyc,, +transformers/models/groupvit/__pycache__/convert_groupvit_nvlab_to_hf.cpython-310.pyc,, +transformers/models/groupvit/__pycache__/modeling_groupvit.cpython-310.pyc,, +transformers/models/groupvit/__pycache__/modeling_tf_groupvit.cpython-310.pyc,, +transformers/models/groupvit/configuration_groupvit.py,sha256=78MBQzraYHdisuhkdgBndBOxcHiDd_Vs_2hu6ahPl2c,19161 +transformers/models/groupvit/convert_groupvit_nvlab_to_hf.py,sha256=9gQxkcjVNCP5lvV54SbbSsOjkKCHORcoiwq2gcczYCM,9775 +transformers/models/groupvit/modeling_groupvit.py,sha256=vMyIH0B67NqEw7PMnTgBKJ3XdIav8F59jwHSsV04Ens,68290 +transformers/models/groupvit/modeling_tf_groupvit.py,sha256=ykaNj-Xmqc7OH95-SSlexolku7ni6cA3xOhkpsMTCAE,90178 +transformers/models/helium/__init__.py,sha256=b1Senw5Mr129rzZSd1sW6-Ies2kIAUHfplpzgGeuTFE,993 +transformers/models/helium/__pycache__/__init__.cpython-310.pyc,, +transformers/models/helium/__pycache__/configuration_helium.cpython-310.pyc,, +transformers/models/helium/__pycache__/modeling_helium.cpython-310.pyc,, +transformers/models/helium/__pycache__/modular_helium.cpython-310.pyc,, +transformers/models/helium/configuration_helium.py,sha256=FWV3qbgLty0GDzXx3T8XoHpTeAz2RINK--AyH0mWhNk,6791 +transformers/models/helium/modeling_helium.py,sha256=jPGZcfVsMAHB5c1lo8eQVaaO6WhhYY-UewviB78xfVI,47544 +transformers/models/helium/modular_helium.py,sha256=wYe0RXeNnhhKaLoqntZbWT5mOIwPD6B3-JwCalAWSFM,5943 +transformers/models/herbert/__init__.py,sha256=3i5hlRANc-OFP86y2qzb_OCWVjJQ9XQswiglh5KbU7Y,1003 +transformers/models/herbert/__pycache__/__init__.cpython-310.pyc,, +transformers/models/herbert/__pycache__/tokenization_herbert.cpython-310.pyc,, +transformers/models/herbert/__pycache__/tokenization_herbert_fast.cpython-310.pyc,, +transformers/models/herbert/tokenization_herbert.py,sha256=rBagn0nnG4HO81Met6tYGlKQbdebCCNq6jrtEiWnD54,25067 +transformers/models/herbert/tokenization_herbert_fast.py,sha256=oG2IA_5OGqVFgpJBWI1banP3v2R4db1YMSzDVrW0j5A,5963 +transformers/models/hiera/__init__.py,sha256=b1kwKtpZVISJZ5Pri421uvH2v3IoRQ6XXHzxFOPHN-g,991 +transformers/models/hiera/__pycache__/__init__.cpython-310.pyc,, +transformers/models/hiera/__pycache__/configuration_hiera.cpython-310.pyc,, +transformers/models/hiera/__pycache__/convert_hiera_to_hf.cpython-310.pyc,, +transformers/models/hiera/__pycache__/modeling_hiera.cpython-310.pyc,, +transformers/models/hiera/configuration_hiera.py,sha256=N2aU73lEM1cILh0EMY-NKdkkbRWMC-evWuE1x8dBOaU,9319 +transformers/models/hiera/convert_hiera_to_hf.py,sha256=mFswCFkaqq_0sJJFOGZ2eQEAnF4YijmFJexrY6W9G3c,16620 +transformers/models/hiera/modeling_hiera.py,sha256=Xu3f6bNxcNZMgvKCU7hdFJFLfvOlqYBBmWwUTeWtucY,69784 +transformers/models/hubert/__init__.py,sha256=ai560JtgkksShocy0zcDejelkRZnK4IZPVKaTHCOxPQ,1031 +transformers/models/hubert/__pycache__/__init__.cpython-310.pyc,, +transformers/models/hubert/__pycache__/configuration_hubert.cpython-310.pyc,, +transformers/models/hubert/__pycache__/convert_distilhubert_original_s3prl_checkpoint_to_pytorch.cpython-310.pyc,, +transformers/models/hubert/__pycache__/convert_hubert_original_pytorch_checkpoint_to_pytorch.cpython-310.pyc,, +transformers/models/hubert/__pycache__/convert_hubert_original_s3prl_checkpoint_to_pytorch.cpython-310.pyc,, +transformers/models/hubert/__pycache__/modeling_hubert.cpython-310.pyc,, +transformers/models/hubert/__pycache__/modeling_tf_hubert.cpython-310.pyc,, +transformers/models/hubert/configuration_hubert.py,sha256=QWOZSdSe0z3S0bbuY4VTAJli1_1JTjy30rZ5fAmG2YA,14938 +transformers/models/hubert/convert_distilhubert_original_s3prl_checkpoint_to_pytorch.py,sha256=4EOE_E4BIIbVesPmNCL4bVO5A91wxMpAhfAdOMpTDzg,8941 +transformers/models/hubert/convert_hubert_original_pytorch_checkpoint_to_pytorch.py,sha256=B--7c9u_xCwo2-2EgTlPjSNwdPtVzdY6YnpvniWm5UI,11027 +transformers/models/hubert/convert_hubert_original_s3prl_checkpoint_to_pytorch.py,sha256=QuwvhsVyqcAXCPQN-peyGc-1C6Gjk6d-ha4Kt5TYF3w,2894 +transformers/models/hubert/modeling_hubert.py,sha256=M6oAW0fCeK1fWU9zspHjaZdgDQZdHBCcEJWIIw13ZS0,74469 +transformers/models/hubert/modeling_tf_hubert.py,sha256=NvS59tLaZiFItSWBofz7oPqY_jhGHWYPeTQ5s2Z9MDw,70775 +transformers/models/ibert/__init__.py,sha256=UMTcE54y6O9UNF8l9VV2rrTlJSAHooxeNeHNzPSgr_E,991 +transformers/models/ibert/__pycache__/__init__.cpython-310.pyc,, +transformers/models/ibert/__pycache__/configuration_ibert.cpython-310.pyc,, +transformers/models/ibert/__pycache__/modeling_ibert.cpython-310.pyc,, +transformers/models/ibert/__pycache__/quant_modules.cpython-310.pyc,, +transformers/models/ibert/configuration_ibert.py,sha256=w0Do79Q-0Cpr-pqu4YjO2MYQgaqlCkBbBu1z11m7kBQ,7094 +transformers/models/ibert/modeling_ibert.py,sha256=26mBZllMhlnKwa3OpcCwJykvocahMFr9LN8x7iwleoo,57105 +transformers/models/ibert/quant_modules.py,sha256=ItU76CIx0XcZCPOR21dz99J9k5rK2fzffQz0jJCuNmM,30072 +transformers/models/idefics/__init__.py,sha256=zc4m1Vd6-Szs7Urt0Ry6eUScpza8iD-QPG4cq4xX34g,1116 +transformers/models/idefics/__pycache__/__init__.cpython-310.pyc,, +transformers/models/idefics/__pycache__/configuration_idefics.cpython-310.pyc,, +transformers/models/idefics/__pycache__/image_processing_idefics.cpython-310.pyc,, +transformers/models/idefics/__pycache__/modeling_idefics.cpython-310.pyc,, +transformers/models/idefics/__pycache__/modeling_tf_idefics.cpython-310.pyc,, +transformers/models/idefics/__pycache__/perceiver.cpython-310.pyc,, +transformers/models/idefics/__pycache__/perceiver_tf.cpython-310.pyc,, +transformers/models/idefics/__pycache__/processing_idefics.cpython-310.pyc,, +transformers/models/idefics/__pycache__/vision.cpython-310.pyc,, +transformers/models/idefics/__pycache__/vision_tf.cpython-310.pyc,, +transformers/models/idefics/configuration_idefics.py,sha256=4j7sAul74adsu3fXPiq34FePCqJJaafCg2dmHU9h_GU,15304 +transformers/models/idefics/image_processing_idefics.py,sha256=z0TVQSuCYmxZwR5uC06vrrabhxdPaOzrpHfM7lgsZuM,7802 +transformers/models/idefics/modeling_idefics.py,sha256=KsOCt7kb50nKpnXkF3d3c20siLdRKxQ9Vj6WQf17_hU,81409 +transformers/models/idefics/modeling_tf_idefics.py,sha256=r-J4WxqoV5dBwLIiH48jJDHewRAUwxsc8TZmjKqbPuA,80311 +transformers/models/idefics/perceiver.py,sha256=uGv8FH2wZ-NO1EIaFclI1nkwUqaTA7i0PS9XxY7ivn0,9433 +transformers/models/idefics/perceiver_tf.py,sha256=rYqXv9j6bmr4NyZLAV1MhVMiiIMV7RZ9CafybPtYc9I,10006 +transformers/models/idefics/processing_idefics.py,sha256=7mpmmEN6XDJC1aanPLCb2V8wtupKdVQfJy1PZYq0xl4,23867 +transformers/models/idefics/vision.py,sha256=EVQ5lOtdV00gK_3TAuLI4zUeHbw4zV1RdZNXZqUXXiQ,22493 +transformers/models/idefics/vision_tf.py,sha256=Kf_PenRY1vhlBA62PvjdvUDyQTKIi30XqB_bMBN1Mrw,26010 +transformers/models/idefics2/__init__.py,sha256=_nHEhSqYWOAwg_SKrfxmkyYAVQK-29zRoohqGI-rfbk,1081 +transformers/models/idefics2/__pycache__/__init__.cpython-310.pyc,, +transformers/models/idefics2/__pycache__/configuration_idefics2.cpython-310.pyc,, +transformers/models/idefics2/__pycache__/convert_idefics2_weights_to_hf.cpython-310.pyc,, +transformers/models/idefics2/__pycache__/image_processing_idefics2.cpython-310.pyc,, +transformers/models/idefics2/__pycache__/modeling_idefics2.cpython-310.pyc,, +transformers/models/idefics2/__pycache__/processing_idefics2.cpython-310.pyc,, +transformers/models/idefics2/configuration_idefics2.py,sha256=M4Oe8-smrRQQI4cI4_hIysHyR0tlHWZC-PPc8nwAgLQ,12018 +transformers/models/idefics2/convert_idefics2_weights_to_hf.py,sha256=3nd_V1qNTv7DehZZQLLAeKh0086xvjECNuWBvJmFbNM,6669 +transformers/models/idefics2/image_processing_idefics2.py,sha256=97VqFl-F8EFalLUs4dSNBnHCRZZ7slh9QGIDgKnwzb8,27492 +transformers/models/idefics2/modeling_idefics2.py,sha256=Hrlh9W8hgduoPuZgkTVddRGcal2otwsK1l6UTZ_frC0,82820 +transformers/models/idefics2/processing_idefics2.py,sha256=ojQ8WISuDar8OspuEp64dUElC-diRnYaR7reVYI-_LI,12701 +transformers/models/idefics3/__init__.py,sha256=PT5AUxZiOzqyl7GIlwykcoRLTT0EvqX2UFACoyGUres,1081 +transformers/models/idefics3/__pycache__/__init__.cpython-310.pyc,, +transformers/models/idefics3/__pycache__/configuration_idefics3.cpython-310.pyc,, +transformers/models/idefics3/__pycache__/convert_idefics3_weights_to_hf.cpython-310.pyc,, +transformers/models/idefics3/__pycache__/image_processing_idefics3.cpython-310.pyc,, +transformers/models/idefics3/__pycache__/modeling_idefics3.cpython-310.pyc,, +transformers/models/idefics3/__pycache__/processing_idefics3.cpython-310.pyc,, +transformers/models/idefics3/configuration_idefics3.py,sha256=DVwzNwatzIql6aI6qw2eaeJlyzVYD08hbHDXfOu05Ag,8597 +transformers/models/idefics3/convert_idefics3_weights_to_hf.py,sha256=x-Ve2iDf4TL9jXcpmBKJBMPDVgqrfMEcMrcIFp_RWDA,7373 +transformers/models/idefics3/image_processing_idefics3.py,sha256=Ki1FTLZF1GmnuxFyc0SSA1sZf3QT3FrtaegvV2rtmeI,42447 +transformers/models/idefics3/modeling_idefics3.py,sha256=r61sxaU1saddu6v7oX7iLbNt3VpSeIcJSGo4XSrOl6s,64271 +transformers/models/idefics3/processing_idefics3.py,sha256=VjoLPimACQBxje-nMUh0W5mwbJxVMDf4jsAtAMUOmt8,16627 +transformers/models/ijepa/__init__.py,sha256=O0_Jqpy8kmorYC-x0QsoMYSHdqQt3E1j-UZGLQ9aCv0,991 +transformers/models/ijepa/__pycache__/__init__.cpython-310.pyc,, +transformers/models/ijepa/__pycache__/configuration_ijepa.cpython-310.pyc,, +transformers/models/ijepa/__pycache__/convert_ijepa_to_hf.cpython-310.pyc,, +transformers/models/ijepa/__pycache__/modeling_ijepa.cpython-310.pyc,, +transformers/models/ijepa/__pycache__/modular_ijepa.cpython-310.pyc,, +transformers/models/ijepa/configuration_ijepa.py,sha256=x1QvgcpUgPhW-J1oG1dbhQTTp6FvBExw_HboDar_6g4,4829 +transformers/models/ijepa/convert_ijepa_to_hf.py,sha256=AA82mqoS6QyfV5R44kCgkXZ2XsDW9lfrIVjRtSQMHIM,10387 +transformers/models/ijepa/modeling_ijepa.py,sha256=Edu6jEs2lUYHJy2gPwycFrcfd20b0hJqXrDkxzBXxiY,32116 +transformers/models/ijepa/modular_ijepa.py,sha256=tFZY-SyewSjc0jGxvBk48oR1zngq_p8lfUNGig-4s2Y,10201 +transformers/models/imagegpt/__init__.py,sha256=XxwI4UaVyyvTcGuJQGruvLi-dHHl8MdOvhAum3FXaGo,1089 +transformers/models/imagegpt/__pycache__/__init__.cpython-310.pyc,, +transformers/models/imagegpt/__pycache__/configuration_imagegpt.cpython-310.pyc,, +transformers/models/imagegpt/__pycache__/convert_imagegpt_original_tf2_to_pytorch.cpython-310.pyc,, +transformers/models/imagegpt/__pycache__/feature_extraction_imagegpt.cpython-310.pyc,, +transformers/models/imagegpt/__pycache__/image_processing_imagegpt.cpython-310.pyc,, +transformers/models/imagegpt/__pycache__/modeling_imagegpt.cpython-310.pyc,, +transformers/models/imagegpt/configuration_imagegpt.py,sha256=e6ks4KifsBoGYsE-0mo5RF3mMcbWkoRH4EImnHrKPYo,8772 +transformers/models/imagegpt/convert_imagegpt_original_tf2_to_pytorch.py,sha256=BMqKNGn1Jv4rntP9fG5C0yG_lF1MY_0h9yv0Qt4rjpM,2690 +transformers/models/imagegpt/feature_extraction_imagegpt.py,sha256=w66K-BFee-magI7KhFo60l6DUNOD10Wa1bQlnjKka3w,1241 +transformers/models/imagegpt/image_processing_imagegpt.py,sha256=ijrartcKJxQmKtGpTb-mxUdKvhky8ukbpDk84qAe1EM,14344 +transformers/models/imagegpt/modeling_imagegpt.py,sha256=xDrADURL2XOlcP6ev0YjEiol1BHaex_OcTjOj3qV6W8,52185 +transformers/models/informer/__init__.py,sha256=L-BwVQfdq5ve06VJJ-OnTh-m_YqSMNcpDQ1z6sbDtNI,997 +transformers/models/informer/__pycache__/__init__.cpython-310.pyc,, +transformers/models/informer/__pycache__/configuration_informer.cpython-310.pyc,, +transformers/models/informer/__pycache__/modeling_informer.cpython-310.pyc,, +transformers/models/informer/configuration_informer.py,sha256=k-Zyf1sZFC_YmUWABPkEV5fv6qtNjKfECm5AWJqz9Q4,12443 +transformers/models/informer/modeling_informer.py,sha256=T9V7oLXeq0U4B0_OU7plncjeCaWS86JY-bvu_1m9W84,101578 +transformers/models/instructblip/__init__.py,sha256=gI7F0N1dRSYdZtTumtuoPcIJcuBI8PO4DEOQS4_nWuc,1048 +transformers/models/instructblip/__pycache__/__init__.cpython-310.pyc,, +transformers/models/instructblip/__pycache__/configuration_instructblip.cpython-310.pyc,, +transformers/models/instructblip/__pycache__/convert_instructblip_original_to_pytorch.cpython-310.pyc,, +transformers/models/instructblip/__pycache__/modeling_instructblip.cpython-310.pyc,, +transformers/models/instructblip/__pycache__/processing_instructblip.cpython-310.pyc,, +transformers/models/instructblip/configuration_instructblip.py,sha256=f60ByFWdStqZJqxoJU6hO_mTK63Pdi-82grQWKwGDgo,15763 +transformers/models/instructblip/convert_instructblip_original_to_pytorch.py,sha256=iustpBsjHHzjQzbAhPJvhI7ZBSXCDoa9njtK9m_gm_I,13399 +transformers/models/instructblip/modeling_instructblip.py,sha256=xpv9KJqHr6qRi8-pYIwonAHJZnfnxjPNlaVfqiOD50g,75405 +transformers/models/instructblip/processing_instructblip.py,sha256=54A9fcN9s8B9zobUyUtetnZiUMLCcEemi0aTLvqeHkA,10441 +transformers/models/instructblipvideo/__init__.py,sha256=sTOPrPaq8f-igvxw5Bd9Tu9_bPMDDg2fmHk9sjjLpw0,2688 +transformers/models/instructblipvideo/__pycache__/__init__.cpython-310.pyc,, +transformers/models/instructblipvideo/__pycache__/configuration_instructblipvideo.cpython-310.pyc,, +transformers/models/instructblipvideo/__pycache__/convert_instructblipvideo_original_to_pytorch.cpython-310.pyc,, +transformers/models/instructblipvideo/__pycache__/image_processing_instructblipvideo.cpython-310.pyc,, +transformers/models/instructblipvideo/__pycache__/modeling_instructblipvideo.cpython-310.pyc,, +transformers/models/instructblipvideo/__pycache__/modular_instructblipvideo.cpython-310.pyc,, +transformers/models/instructblipvideo/__pycache__/processing_instructblipvideo.cpython-310.pyc,, +transformers/models/instructblipvideo/configuration_instructblipvideo.py,sha256=GAufg0z0_cfHtdwaUpigSfH6Cn2ZKi8mpg3Y1u-tP68,16764 +transformers/models/instructblipvideo/convert_instructblipvideo_original_to_pytorch.py,sha256=F69HaLiFYx7EyneqAjLG7q5jiPEy0bMTF8Q1qF2h7d4,13508 +transformers/models/instructblipvideo/image_processing_instructblipvideo.py,sha256=rSbXpJMGJILC9qReM3FgLjT-t86OwVd77ox6Mntuqso,17364 +transformers/models/instructblipvideo/modeling_instructblipvideo.py,sha256=1P5_VSS5vZ_6aDK22Nu6bZd1qn88GP8SVP4ngZluaVc,77070 +transformers/models/instructblipvideo/modular_instructblipvideo.py,sha256=__reTsgsOntKyJl7yDN8D1OHogtvLm5eyYRt_edP4aQ,22839 +transformers/models/instructblipvideo/processing_instructblipvideo.py,sha256=DPpL4g4Mw1TGB_DoUPiqGSaN3nNo8ri0qWgRvSq27bY,11108 +transformers/models/jamba/__init__.py,sha256=zN7Rmr--d5GCEJzMA7gxIz-BYFydPN3cyuif85YU0Fk,991 +transformers/models/jamba/__pycache__/__init__.cpython-310.pyc,, +transformers/models/jamba/__pycache__/configuration_jamba.cpython-310.pyc,, +transformers/models/jamba/__pycache__/modeling_jamba.cpython-310.pyc,, +transformers/models/jamba/configuration_jamba.py,sha256=ZRM9HpnSRP8lupWix3yq_VtMVl-u8C20EPAFNXgIWwc,11739 +transformers/models/jamba/modeling_jamba.py,sha256=ZCeuFqNh-VDWMpX1-abLWoNDr9x4yCPKBGVwZzLyLcM,80484 +transformers/models/jetmoe/__init__.py,sha256=zhqtP2ZDCCl3Fp3VBnnuaA044Ztbh7fsUKogAKABOt0,993 +transformers/models/jetmoe/__pycache__/__init__.cpython-310.pyc,, +transformers/models/jetmoe/__pycache__/configuration_jetmoe.cpython-310.pyc,, +transformers/models/jetmoe/__pycache__/modeling_jetmoe.cpython-310.pyc,, +transformers/models/jetmoe/configuration_jetmoe.py,sha256=xp-1vQy18spKEumF7RJgpDrv1dkbWwTiKDlWDIBf2KM,6804 +transformers/models/jetmoe/modeling_jetmoe.py,sha256=u4jp9cC3aMt-GKC4QLag2gTUk7y2rfzyitc26La5R_0,67285 +transformers/models/kosmos2/__init__.py,sha256=Ow8cLelhxl6fm5XvXzNQtPLt1xjIdVmGUwz5NoVVVto,1033 +transformers/models/kosmos2/__pycache__/__init__.cpython-310.pyc,, +transformers/models/kosmos2/__pycache__/configuration_kosmos2.cpython-310.pyc,, +transformers/models/kosmos2/__pycache__/convert_kosmos2_original_pytorch_checkpoint_to_pytorch.cpython-310.pyc,, +transformers/models/kosmos2/__pycache__/modeling_kosmos2.cpython-310.pyc,, +transformers/models/kosmos2/__pycache__/processing_kosmos2.cpython-310.pyc,, +transformers/models/kosmos2/configuration_kosmos2.py,sha256=E4I_iIyhD0dgA0vsI4uqEGifNGFd1YbfJu_GO2pmnEE,11880 +transformers/models/kosmos2/convert_kosmos2_original_pytorch_checkpoint_to_pytorch.py,sha256=3ejv6hUd6irzFnmSuFVI6Eu1NVWmtJf3_ql2h9P4AHk,2724 +transformers/models/kosmos2/modeling_kosmos2.py,sha256=uvOgyW8VRb5rdmj7kd9PnRwDa5j-dGnO7PvLo-Vsl2U,98127 +transformers/models/kosmos2/processing_kosmos2.py,sha256=jIYEs55absoFBqedLfjd6Tp7t8g4lX78v-r5c7GOPFI,31494 +transformers/models/layoutlm/__init__.py,sha256=Mv-k01_9_SxbADuSx2pWoNGBxgUe4IH15Kcg-vc_0OI,1124 +transformers/models/layoutlm/__pycache__/__init__.cpython-310.pyc,, +transformers/models/layoutlm/__pycache__/configuration_layoutlm.cpython-310.pyc,, +transformers/models/layoutlm/__pycache__/modeling_layoutlm.cpython-310.pyc,, +transformers/models/layoutlm/__pycache__/modeling_tf_layoutlm.cpython-310.pyc,, +transformers/models/layoutlm/__pycache__/tokenization_layoutlm.cpython-310.pyc,, +transformers/models/layoutlm/__pycache__/tokenization_layoutlm_fast.cpython-310.pyc,, +transformers/models/layoutlm/configuration_layoutlm.py,sha256=xxDGkuaOKRhkkqlmME687WUHTTMlkpzh-8hVqIxrN04,9134 +transformers/models/layoutlm/modeling_layoutlm.py,sha256=HQYodTM7NF6KmqbWPb1LmopwAGFTnAp23WFjMwyOGgU,61223 +transformers/models/layoutlm/modeling_tf_layoutlm.py,sha256=P5lttgAuGDEVmDchmoYfFSG55FcFzlISQBpXnSb1Lq0,73365 +transformers/models/layoutlm/tokenization_layoutlm.py,sha256=VmytJNpv_8tgHL6eoSnlAA_CuZKNVCP0YVeiH7XhFRE,21294 +transformers/models/layoutlm/tokenization_layoutlm_fast.py,sha256=w98iEAqvfjQKbs_mke052pYTn2yIBe8MGe5SYJ0M0Ks,7824 +transformers/models/layoutlmv2/__init__.py,sha256=1w_91TbhMawClC5t7Bzc43MTFfHykZktH6WI3xPAOSQ,1229 +transformers/models/layoutlmv2/__pycache__/__init__.cpython-310.pyc,, +transformers/models/layoutlmv2/__pycache__/configuration_layoutlmv2.cpython-310.pyc,, +transformers/models/layoutlmv2/__pycache__/feature_extraction_layoutlmv2.cpython-310.pyc,, +transformers/models/layoutlmv2/__pycache__/image_processing_layoutlmv2.cpython-310.pyc,, +transformers/models/layoutlmv2/__pycache__/modeling_layoutlmv2.cpython-310.pyc,, +transformers/models/layoutlmv2/__pycache__/processing_layoutlmv2.cpython-310.pyc,, +transformers/models/layoutlmv2/__pycache__/tokenization_layoutlmv2.cpython-310.pyc,, +transformers/models/layoutlmv2/__pycache__/tokenization_layoutlmv2_fast.cpython-310.pyc,, +transformers/models/layoutlmv2/configuration_layoutlmv2.py,sha256=E9lC45QK-thRX8thYV3oqncBAqlE62QIG4boZ5XPKKw,10914 +transformers/models/layoutlmv2/feature_extraction_layoutlmv2.py,sha256=F0wTQ0y6NVCgr3539d9BvmxY_cAaCTGgzmlJpRyw54o,1238 +transformers/models/layoutlmv2/image_processing_layoutlmv2.py,sha256=ore7EoSB64mFlJ5uy9zA6FtSb28QbX9_ymz8yLUnjp8,13496 +transformers/models/layoutlmv2/modeling_layoutlmv2.py,sha256=706aQE-mPMxGsHKbMHiJ_VGPrJWXPt0T0CIj6qkaRTw,62127 +transformers/models/layoutlmv2/processing_layoutlmv2.py,sha256=r0cXp_U_SgfqFGvVklwV_65pYJdbP1oZgD9qfho_vU4,9328 +transformers/models/layoutlmv2/tokenization_layoutlmv2.py,sha256=3zu1gnR70B_LGh44eHmS5giPSsEzFb6An8dtmsrb59Q,73219 +transformers/models/layoutlmv2/tokenization_layoutlmv2_fast.py,sha256=SMekhJaPdJogdBEoP_64FLaiG9PtwZQ0rYA0TSHxvtA,38140 +transformers/models/layoutlmv3/__init__.py,sha256=2iZqJqrr9RPk4_l3L-UMihFqDD0sdY-YYFIEUAHud3w,1271 +transformers/models/layoutlmv3/__pycache__/__init__.cpython-310.pyc,, +transformers/models/layoutlmv3/__pycache__/configuration_layoutlmv3.cpython-310.pyc,, +transformers/models/layoutlmv3/__pycache__/feature_extraction_layoutlmv3.cpython-310.pyc,, +transformers/models/layoutlmv3/__pycache__/image_processing_layoutlmv3.cpython-310.pyc,, +transformers/models/layoutlmv3/__pycache__/modeling_layoutlmv3.cpython-310.pyc,, +transformers/models/layoutlmv3/__pycache__/modeling_tf_layoutlmv3.cpython-310.pyc,, +transformers/models/layoutlmv3/__pycache__/processing_layoutlmv3.cpython-310.pyc,, +transformers/models/layoutlmv3/__pycache__/tokenization_layoutlmv3.cpython-310.pyc,, +transformers/models/layoutlmv3/__pycache__/tokenization_layoutlmv3_fast.cpython-310.pyc,, +transformers/models/layoutlmv3/configuration_layoutlmv3.py,sha256=7GaoEBAjg3vJaj-j6OxDCaKOpLpcgJVK5nXIPoZXzcE,13261 +transformers/models/layoutlmv3/feature_extraction_layoutlmv3.py,sha256=pETM5HZm6mKClv1_S5I032mMmyQ3c55p2izYTgx7glY,1238 +transformers/models/layoutlmv3/image_processing_layoutlmv3.py,sha256=Tanssi6TwE-1aChEnL7JdaNaDqU7496csm0_icY89Z0,18365 +transformers/models/layoutlmv3/modeling_layoutlmv3.py,sha256=Ucn5-mzOaTZu9DSsbKcfHUt1T-GNiBxytpLYiXrJXVA,60584 +transformers/models/layoutlmv3/modeling_tf_layoutlmv3.py,sha256=LN8SP7lOW-NPLyf2WydOha4ghqNqIyrBSUI9ezvqW1g,76978 +transformers/models/layoutlmv3/processing_layoutlmv3.py,sha256=-9MWqD-yhk6MI6CCQVJ0pyVa-cNgI_Ut7lLy8reyND0,9179 +transformers/models/layoutlmv3/tokenization_layoutlmv3.py,sha256=7kqYcH_Qwt53fsd1exkTCsPdwbdHuy4aZbyOOvccyo0,73227 +transformers/models/layoutlmv3/tokenization_layoutlmv3_fast.py,sha256=TpfwrMfvTMZmN3Zsws_3lgr3aOXehMfFek_tXnBLaCs,39919 +transformers/models/layoutxlm/__init__.py,sha256=djfI2YGJISwww_XDfyf4kCj3a_HiC6Hld1rlaHRtHPg,1047 +transformers/models/layoutxlm/__pycache__/__init__.cpython-310.pyc,, +transformers/models/layoutxlm/__pycache__/processing_layoutxlm.cpython-310.pyc,, +transformers/models/layoutxlm/__pycache__/tokenization_layoutxlm.cpython-310.pyc,, +transformers/models/layoutxlm/__pycache__/tokenization_layoutxlm_fast.cpython-310.pyc,, +transformers/models/layoutxlm/processing_layoutxlm.py,sha256=4rk4UxnW0UaWAi931JoVCA6xTRQRWB2InoSswv7AepA,9278 +transformers/models/layoutxlm/tokenization_layoutxlm.py,sha256=jm7sRc5e-578_yV3ufZby-Z_kyoe9yINBv6vwVdNcHE,58242 +transformers/models/layoutxlm/tokenization_layoutxlm_fast.py,sha256=AyHviK71YozaXOwhp6jkq_Mjp4HA_woqRk4dVytFbtE,40620 +transformers/models/led/__init__.py,sha256=KaOht9jIet9WQrPRli8DwD7q5fzTWsffxf7LK-sQuw4,1099 +transformers/models/led/__pycache__/__init__.cpython-310.pyc,, +transformers/models/led/__pycache__/configuration_led.cpython-310.pyc,, +transformers/models/led/__pycache__/modeling_led.cpython-310.pyc,, +transformers/models/led/__pycache__/modeling_tf_led.cpython-310.pyc,, +transformers/models/led/__pycache__/tokenization_led.cpython-310.pyc,, +transformers/models/led/__pycache__/tokenization_led_fast.cpython-310.pyc,, +transformers/models/led/configuration_led.py,sha256=0L6BqCbtr1jaUDQCY5NW3AArfHO6Ccw5d1IoPN8dKdM,7445 +transformers/models/led/modeling_led.py,sha256=qWNMDJh5YeVVshZ774KRh1vEPWeX1r4N3C7hUrfCRYw,138177 +transformers/models/led/modeling_tf_led.py,sha256=YG_yIkbczt_94ST4ZthmW02wrx4eg-pb2jphCHA-kps,123154 +transformers/models/led/tokenization_led.py,sha256=ggi2e6cabWsfvQdGcbnkBxV35-4PT3OMIw6D90d6Gvg,19865 +transformers/models/led/tokenization_led_fast.py,sha256=0dmDbm6pwKbJBkl8YFU4ZtqQObmRN6s5cG9NG9WThgs,14190 +transformers/models/levit/__init__.py,sha256=ZY6VTbq_W51UT3b2ZHY8Odf4YyUQzt_QB0eHsHtPWAw,1077 +transformers/models/levit/__pycache__/__init__.cpython-310.pyc,, +transformers/models/levit/__pycache__/configuration_levit.cpython-310.pyc,, +transformers/models/levit/__pycache__/convert_levit_timm_to_pytorch.cpython-310.pyc,, +transformers/models/levit/__pycache__/feature_extraction_levit.cpython-310.pyc,, +transformers/models/levit/__pycache__/image_processing_levit.cpython-310.pyc,, +transformers/models/levit/__pycache__/modeling_levit.cpython-310.pyc,, +transformers/models/levit/configuration_levit.py,sha256=asZmZ6gnETFkfVHpHaQjI1y0n3iqNIqNjfsWwObyGQQ,5763 +transformers/models/levit/convert_levit_timm_to_pytorch.py,sha256=TN87M03CQV4LRb0jH0SCNTtfoZ8rGPvbLgykEC14kLE,6257 +transformers/models/levit/feature_extraction_levit.py,sha256=SQOki6yb9tPoHOCi--xlFaUPdCA2GH3lAQ7HPklGzPI,1242 +transformers/models/levit/image_processing_levit.py,sha256=CTM6y0V6jdirUeEybMesPXE6sal94EGSxcmSVoHFHeU,16602 +transformers/models/levit/modeling_levit.py,sha256=d7hIISKEkpFbjhb14WbD6AV538_x5LYqXdUIK6Wn8Zk,29508 +transformers/models/lilt/__init__.py,sha256=9XEq7kJwN0mKO469mR0mtlRUdljjq7V80gejpqb59K0,989 +transformers/models/lilt/__pycache__/__init__.cpython-310.pyc,, +transformers/models/lilt/__pycache__/configuration_lilt.cpython-310.pyc,, +transformers/models/lilt/__pycache__/modeling_lilt.cpython-310.pyc,, +transformers/models/lilt/configuration_lilt.py,sha256=vbN5535Vj5EprsxbZONYIZiK7LaxRdQlO-roFLvv0pM,6721 +transformers/models/lilt/modeling_lilt.py,sha256=w0KuPhiH9MHZaItjq7zlmmjrlqAPcTXHm0VVF1iD58I,52867 +transformers/models/llama/__init__.py,sha256=k1HnOc4-BwvgSizE8E0IlrkCh_TVgv1XX8G-xozfgLo,1111 +transformers/models/llama/__pycache__/__init__.cpython-310.pyc,, +transformers/models/llama/__pycache__/configuration_llama.cpython-310.pyc,, +transformers/models/llama/__pycache__/convert_llama_weights_to_hf.cpython-310.pyc,, +transformers/models/llama/__pycache__/modeling_flax_llama.cpython-310.pyc,, +transformers/models/llama/__pycache__/modeling_llama.cpython-310.pyc,, +transformers/models/llama/__pycache__/tokenization_llama.cpython-310.pyc,, +transformers/models/llama/__pycache__/tokenization_llama_fast.cpython-310.pyc,, +transformers/models/llama/configuration_llama.py,sha256=3djJ6d4pllpClKZN_23BfwlQyI1o6v9PrrnzsJM_N4Q,11761 +transformers/models/llama/convert_llama_weights_to_hf.py,sha256=0RoiDNp8Z1xH9E9eQD8yYADSbPxShddTEgZ8hNqGtf0,24137 +transformers/models/llama/modeling_flax_llama.py,sha256=iGRuks9vOKsPC633ersl5iOR_fbrwE18sg6qZH-a0z8,30652 +transformers/models/llama/modeling_llama.py,sha256=iXtm_1z6jLVSPrGRxTafHBNDY-svF5MWwqjbCjwXoNg,50891 +transformers/models/llama/tokenization_llama.py,sha256=Tu78jFgHHansqXOf6Fvu6AkDD2RYnvUu4VnysR0cQq4,18666 +transformers/models/llama/tokenization_llama_fast.py,sha256=zEuLIx-1wQTReiKzPclwSOC4dKwDEBwJCEDBrGbXaTs,11182 +transformers/models/llava/__init__.py,sha256=FoNes_YNsdGJ2NjSSUD1ilY4ofieJ_oE54b6fCf36jc,1027 +transformers/models/llava/__pycache__/__init__.cpython-310.pyc,, +transformers/models/llava/__pycache__/configuration_llava.cpython-310.pyc,, +transformers/models/llava/__pycache__/convert_llava_weights_to_hf.cpython-310.pyc,, +transformers/models/llava/__pycache__/modeling_llava.cpython-310.pyc,, +transformers/models/llava/__pycache__/processing_llava.cpython-310.pyc,, +transformers/models/llava/configuration_llava.py,sha256=GomAlEmCqvDFKMsJX2nrTdYlt5uxReyh0iuGkVVqIVI,5787 +transformers/models/llava/convert_llava_weights_to_hf.py,sha256=lcWUyM08AWMkqzEaWGuWgaqltJkm_nmM4uTcGg2fDfY,7838 +transformers/models/llava/modeling_llava.py,sha256=aMj0Fc2OLzuevyYzydNgBcAmNxiZlQXoRFCAFV5VgMA,28764 +transformers/models/llava/processing_llava.py,sha256=Jbs2pnPZWvSBye3dH87Ha-diXtCtNn4XSt0wOx5BS-E,9303 +transformers/models/llava_next/__init__.py,sha256=Fvw6PybqBNbOQstZatv5dCA2hdQPff0OjUYvjCSAcdg,1089 +transformers/models/llava_next/__pycache__/__init__.cpython-310.pyc,, +transformers/models/llava_next/__pycache__/configuration_llava_next.cpython-310.pyc,, +transformers/models/llava_next/__pycache__/convert_llava_next_weights_to_hf.cpython-310.pyc,, +transformers/models/llava_next/__pycache__/image_processing_llava_next.cpython-310.pyc,, +transformers/models/llava_next/__pycache__/modeling_llava_next.cpython-310.pyc,, +transformers/models/llava_next/__pycache__/processing_llava_next.cpython-310.pyc,, +transformers/models/llava_next/configuration_llava_next.py,sha256=pahGETPZgvdk3v6VlAaMHU68gMm8ABdwAw01hr12A7A,6803 +transformers/models/llava_next/convert_llava_next_weights_to_hf.py,sha256=WpC-jvY4TtlZGpW9MCI82HUIcQl9lpsUZT_cmzfJqfU,20785 +transformers/models/llava_next/image_processing_llava_next.py,sha256=atY1NTfQqjzfOX9-78r85K_Lb2qgW3BDmjr6offpgo4,36523 +transformers/models/llava_next/modeling_llava_next.py,sha256=_3i-tIz68vLfzZqWrPhzbZ57mwIfY-dfpAadXNfrrto,48676 +transformers/models/llava_next/processing_llava_next.py,sha256=uKRaPGZcEYaUogSRrxtqebo-i5HpYxubUBiBMQWqjQM,11521 +transformers/models/llava_next_video/__init__.py,sha256=OGiUL7X9x0bzmsnZi0KA6Sl2ycalLQHkTgOpISYu3q8,1113 +transformers/models/llava_next_video/__pycache__/__init__.cpython-310.pyc,, +transformers/models/llava_next_video/__pycache__/configuration_llava_next_video.cpython-310.pyc,, +transformers/models/llava_next_video/__pycache__/convert_llava_next_video_weights_to_hf.cpython-310.pyc,, +transformers/models/llava_next_video/__pycache__/image_processing_llava_next_video.cpython-310.pyc,, +transformers/models/llava_next_video/__pycache__/modeling_llava_next_video.cpython-310.pyc,, +transformers/models/llava_next_video/__pycache__/modular_llava_next_video.cpython-310.pyc,, +transformers/models/llava_next_video/__pycache__/processing_llava_next_video.cpython-310.pyc,, +transformers/models/llava_next_video/configuration_llava_next_video.py,sha256=j3KrlthwZHOmHGX3MqpXmHLWDTty53MOM61ZTTjVeIw,8247 +transformers/models/llava_next_video/convert_llava_next_video_weights_to_hf.py,sha256=-2knTp91EFTMB3vNXPGoaIW9u1s7yAXv1XySmGVTpJo,10511 +transformers/models/llava_next_video/image_processing_llava_next_video.py,sha256=ITMaeftLR0CWMT5OB-Nu9Hc65UVvEV3FutV_Lnevo_w,21470 +transformers/models/llava_next_video/modeling_llava_next_video.py,sha256=cRknEQTpjDLmRl5nLuGf7MclA-1Chq2e65h-LIZv3BQ,56702 +transformers/models/llava_next_video/modular_llava_next_video.py,sha256=JtE3O-nmMyUnR59SjfdOk5aUVJAePUwrDzV-_sxllDc,27604 +transformers/models/llava_next_video/processing_llava_next_video.py,sha256=A69avBCjChMd42Atp4wvEZ-u_deNNtKXSTPMfVaTviw,15121 +transformers/models/llava_onevision/__init__.py,sha256=mGRUmBSkY3zYIa1boJ1ZwYnqorXKOFaUqmsuROIxCIc,1161 +transformers/models/llava_onevision/__pycache__/__init__.cpython-310.pyc,, +transformers/models/llava_onevision/__pycache__/configuration_llava_onevision.cpython-310.pyc,, +transformers/models/llava_onevision/__pycache__/convert_llava_onevision_weights_to_hf.cpython-310.pyc,, +transformers/models/llava_onevision/__pycache__/image_processing_llava_onevision.cpython-310.pyc,, +transformers/models/llava_onevision/__pycache__/modeling_llava_onevision.cpython-310.pyc,, +transformers/models/llava_onevision/__pycache__/processing_llava_onevision.cpython-310.pyc,, +transformers/models/llava_onevision/__pycache__/video_processing_llava_onevision.cpython-310.pyc,, +transformers/models/llava_onevision/configuration_llava_onevision.py,sha256=L_DsGN4GSfAag-b3dtXX9NDcE-umVxMMMDU8YH_Qmx4,7861 +transformers/models/llava_onevision/convert_llava_onevision_weights_to_hf.py,sha256=Z7tFP1ARgWDWWUvdznw2o3r0LtwMJoM9ydmwiXBVgwQ,19878 +transformers/models/llava_onevision/image_processing_llava_onevision.py,sha256=LKnzfTEYxA_kBZAWg1RQUj5GS9-6w3ByLn4AJLMPH6g,34240 +transformers/models/llava_onevision/modeling_llava_onevision.py,sha256=p-P9ArB_ime3DLRVAvDJ0tE4OXv9GuPB0vRuL0r_xOs,41695 +transformers/models/llava_onevision/processing_llava_onevision.py,sha256=mKqZr4U87pwSygSfWhXXQBRQleTyZX6lani9t9TPZcg,15594 +transformers/models/llava_onevision/video_processing_llava_onevision.py,sha256=_6amoZxrxCWUx_EeopPf5WQ5QhwXCRUFJRihrgJoTpg,16946 +transformers/models/longformer/__init__.py,sha256=vg5ScmyEX2D-xPfnxNNBhdj6-Xj0t3HoPmt709PQjTE,1134 +transformers/models/longformer/__pycache__/__init__.cpython-310.pyc,, +transformers/models/longformer/__pycache__/configuration_longformer.cpython-310.pyc,, +transformers/models/longformer/__pycache__/convert_longformer_original_pytorch_lightning_to_pytorch.cpython-310.pyc,, +transformers/models/longformer/__pycache__/modeling_longformer.cpython-310.pyc,, +transformers/models/longformer/__pycache__/modeling_tf_longformer.cpython-310.pyc,, +transformers/models/longformer/__pycache__/tokenization_longformer.cpython-310.pyc,, +transformers/models/longformer/__pycache__/tokenization_longformer_fast.cpython-310.pyc,, +transformers/models/longformer/configuration_longformer.py,sha256=PugPJuv12EETU77Unxdp3tmQ6vGVa9kpjgRPhChZiWo,8822 +transformers/models/longformer/convert_longformer_original_pytorch_lightning_to_pytorch.py,sha256=bt-0zsqIGsD8WONirTLHw44ZjIJf-NvMXlLwIg5rqJk,3025 +transformers/models/longformer/modeling_longformer.py,sha256=4B78g4uNVGU6PKfWJ1G1UB_9ZBM6LCcfz43BfSN4KG4,114116 +transformers/models/longformer/modeling_tf_longformer.py,sha256=5YDMosU1eAxh613cCXPzlUVD4-kg-Rb0c60MMVjN9Ck,129648 +transformers/models/longformer/tokenization_longformer.py,sha256=4oTvnk6y6Kgpw40mKMWuoXvX19wdetSA86M0cPw3TJ4,16833 +transformers/models/longformer/tokenization_longformer_fast.py,sha256=ysIwuVAdWqTlirIcbCa7_SJTD8-tTZhr5Z9kVNDLg20,11243 +transformers/models/longt5/__init__.py,sha256=TzoI1JGkvJIf9NlHDQY8_EUuW-upkQZ23wh_8Urtet0,1033 +transformers/models/longt5/__pycache__/__init__.cpython-310.pyc,, +transformers/models/longt5/__pycache__/configuration_longt5.cpython-310.pyc,, +transformers/models/longt5/__pycache__/convert_longt5x_checkpoint_to_flax.cpython-310.pyc,, +transformers/models/longt5/__pycache__/modeling_flax_longt5.cpython-310.pyc,, +transformers/models/longt5/__pycache__/modeling_longt5.cpython-310.pyc,, +transformers/models/longt5/configuration_longt5.py,sha256=s6wNUR2DwLe7sVM-3stgsGwlwZ2wLqapd2nhYORCSlw,8107 +transformers/models/longt5/convert_longt5x_checkpoint_to_flax.py,sha256=att5rZjP3_mlIRv1icJO3wxQxuwFYEvCz4bFr2seW4k,11091 +transformers/models/longt5/modeling_flax_longt5.py,sha256=A8cA-Tk2m6QxXvcE2F0rw0AMtKL3t7i7r0rgclhfqvk,105769 +transformers/models/longt5/modeling_longt5.py,sha256=O3I23R9-5d8EgErYtLI2ZfXg-SxjKXBEoQGwjb5uun8,112300 +transformers/models/luke/__init__.py,sha256=YQL403sV6tk5t8sjvi-4hgvx1rvyThx45l7S4T4xpEE,1026 +transformers/models/luke/__pycache__/__init__.cpython-310.pyc,, +transformers/models/luke/__pycache__/configuration_luke.cpython-310.pyc,, +transformers/models/luke/__pycache__/convert_luke_original_pytorch_checkpoint_to_pytorch.cpython-310.pyc,, +transformers/models/luke/__pycache__/modeling_luke.cpython-310.pyc,, +transformers/models/luke/__pycache__/tokenization_luke.cpython-310.pyc,, +transformers/models/luke/configuration_luke.py,sha256=Th-ke2nWmSnDRZNu_0_DnYFbAzzzEj6Gct6YCR-nlb0,6620 +transformers/models/luke/convert_luke_original_pytorch_checkpoint_to_pytorch.py,sha256=pfnDfBvJDRyCLBLdcsalZaKV01aEz0W1og2Z364hTDs,7467 +transformers/models/luke/modeling_luke.py,sha256=NppKcmXpqWoxMQFzceWfs1jtPY_32NlDO9qRqvopxXo,104090 +transformers/models/luke/tokenization_luke.py,sha256=cS-ZLe07IvCpA2tMtMJhrI-G6mTEYC9wpyhxPQ0v67E,85678 +transformers/models/lxmert/__init__.py,sha256=iUyLmlBuiz_av7H5ghaQB4RNbpw275N7wwdmiiV0PAc,1114 +transformers/models/lxmert/__pycache__/__init__.cpython-310.pyc,, +transformers/models/lxmert/__pycache__/configuration_lxmert.cpython-310.pyc,, +transformers/models/lxmert/__pycache__/convert_lxmert_original_tf_checkpoint_to_pytorch.cpython-310.pyc,, +transformers/models/lxmert/__pycache__/modeling_lxmert.cpython-310.pyc,, +transformers/models/lxmert/__pycache__/modeling_tf_lxmert.cpython-310.pyc,, +transformers/models/lxmert/__pycache__/tokenization_lxmert.cpython-310.pyc,, +transformers/models/lxmert/__pycache__/tokenization_lxmert_fast.cpython-310.pyc,, +transformers/models/lxmert/configuration_lxmert.py,sha256=ScIeG49SSLVuwrj25wKY85troVtNniZdjnuZcrewx1g,8934 +transformers/models/lxmert/convert_lxmert_original_tf_checkpoint_to_pytorch.py,sha256=Ps5iNo91Yj3XLkEuXrP2KFLSjWhYJyP-1vtqMyt1Lqk,2108 +transformers/models/lxmert/modeling_lxmert.py,sha256=A7hb7XFbzjQAmAnlteFp_y1VjWZOt8vJnD-dJz9M1Is,66043 +transformers/models/lxmert/modeling_tf_lxmert.py,sha256=TrJItgAEXtQx_TCHmOTUHUyMvmxp3DyASG6bwYglj50,72777 +transformers/models/lxmert/tokenization_lxmert.py,sha256=BbNrfRfJZr1Y7uGS6XOh9CJkpP0AJ3e_tq-636nXaBg,21316 +transformers/models/lxmert/tokenization_lxmert_fast.py,sha256=eujDqy2iYa1Tz7a5WNBYgaRa9f6yh2B00jSH6h4Ez6o,7756 +transformers/models/m2m_100/__init__.py,sha256=0uPov299rgQmMwwSyM_m0yGFejP5djgaUY37GkNGnC8,1035 +transformers/models/m2m_100/__pycache__/__init__.cpython-310.pyc,, +transformers/models/m2m_100/__pycache__/configuration_m2m_100.cpython-310.pyc,, +transformers/models/m2m_100/__pycache__/convert_m2m100_original_checkpoint_to_pytorch.cpython-310.pyc,, +transformers/models/m2m_100/__pycache__/modeling_m2m_100.cpython-310.pyc,, +transformers/models/m2m_100/__pycache__/tokenization_m2m_100.cpython-310.pyc,, +transformers/models/m2m_100/configuration_m2m_100.py,sha256=CsVEF-ussIdQvMYTwbtLzEdDuEko_EdPN5SL83JH3Ss,13411 +transformers/models/m2m_100/convert_m2m100_original_checkpoint_to_pytorch.py,sha256=xNG8NE20odOve8Z1zKPDHJr5Ev8jM30N-mJsJqfsXtM,3159 +transformers/models/m2m_100/modeling_m2m_100.py,sha256=eCvND0xFN3pUBtq3V6o6tdfOF0zCYXHNVb9uaozjUAE,79232 +transformers/models/m2m_100/tokenization_m2m_100.py,sha256=7sTz3PU6MkKKy6RuAQg5rhHKKfPyB_aSj-rqx_L4vYU,16353 +transformers/models/mamba/__init__.py,sha256=4oGJySQbwoALRGVWMEwXBm0A6fhKsr4Raly46a5g1G0,991 +transformers/models/mamba/__pycache__/__init__.cpython-310.pyc,, +transformers/models/mamba/__pycache__/configuration_mamba.cpython-310.pyc,, +transformers/models/mamba/__pycache__/convert_mamba_ssm_checkpoint_to_pytorch.cpython-310.pyc,, +transformers/models/mamba/__pycache__/modeling_mamba.cpython-310.pyc,, +transformers/models/mamba/configuration_mamba.py,sha256=jLOixlXsTzDBM_q3rV0cVNg8CU8RSqPf2Um7vlm2tho,7432 +transformers/models/mamba/convert_mamba_ssm_checkpoint_to_pytorch.py,sha256=BK6M1tYEwLwoM7NE3fdm0BRpZH8LvCTGvdvsee9taZA,6454 +transformers/models/mamba/modeling_mamba.py,sha256=cgpY8QXvC2KUlQfxsU3z5kjyCPtKzI_T3WfpG6UC4f8,38061 +transformers/models/mamba2/__init__.py,sha256=Ui4j-I2cnPEEszkzRTLSUW42SE4Qg1YTuW6hGeaOFZg,993 +transformers/models/mamba2/__pycache__/__init__.cpython-310.pyc,, +transformers/models/mamba2/__pycache__/configuration_mamba2.cpython-310.pyc,, +transformers/models/mamba2/__pycache__/convert_mamba2_ssm_checkpoint_to_pytorch.cpython-310.pyc,, +transformers/models/mamba2/__pycache__/modeling_mamba2.cpython-310.pyc,, +transformers/models/mamba2/configuration_mamba2.py,sha256=-aPC9WlLdLSiPtfiOmsuILLMiiBTL2WrBQkYKPycShw,7918 +transformers/models/mamba2/convert_mamba2_ssm_checkpoint_to_pytorch.py,sha256=Pa2GsK7tP3N1uPYzDvq9A39BqFIWNgIAcFkQ_9EiBUs,7515 +transformers/models/mamba2/modeling_mamba2.py,sha256=g2hdeFwN9leP78qNWi7TgtcOZR_8g8ALj0C2SAegT9s,51930 +transformers/models/marian/__init__.py,sha256=Yg8jbvM0Hf6WXua0__v_G-34dvG6zFib5R5e_qHtmYM,1110 +transformers/models/marian/__pycache__/__init__.cpython-310.pyc,, +transformers/models/marian/__pycache__/configuration_marian.cpython-310.pyc,, +transformers/models/marian/__pycache__/convert_marian_tatoeba_to_pytorch.cpython-310.pyc,, +transformers/models/marian/__pycache__/convert_marian_to_pytorch.cpython-310.pyc,, +transformers/models/marian/__pycache__/modeling_flax_marian.cpython-310.pyc,, +transformers/models/marian/__pycache__/modeling_marian.cpython-310.pyc,, +transformers/models/marian/__pycache__/modeling_tf_marian.cpython-310.pyc,, +transformers/models/marian/__pycache__/tokenization_marian.cpython-310.pyc,, +transformers/models/marian/configuration_marian.py,sha256=Dv_60lB0pJolwdN-1onmOp_dLaTEIHUE2FvujXcjShs,18377 +transformers/models/marian/convert_marian_tatoeba_to_pytorch.py,sha256=KXL31oNi5WuLN25fwurL1ujDwItoQYn-FmTOF2PXTUM,36375 +transformers/models/marian/convert_marian_to_pytorch.py,sha256=7xWRLSnFcqxnErnWHoOkwhjEvgrer1JgNU6bhDwYXtY,27124 +transformers/models/marian/modeling_flax_marian.py,sha256=mdLvaEt8x5AX_EHipw6CVpuIxKAAg-Efn0GuSkaTw1c,64343 +transformers/models/marian/modeling_marian.py,sha256=8952BlJiRbgUrAx5_4897LyH93T53V-H3-3P7B3HSMc,79576 +transformers/models/marian/modeling_tf_marian.py,sha256=pGmWoXmGt1bj-hWyy1utMZC1BX90_wrkg9xuJFd3lS0,72756 +transformers/models/marian/tokenization_marian.py,sha256=uM-1KmZ_WXyeMEGXuz3zobsOthMPMB32DUhcsc6h-So,16844 +transformers/models/markuplm/__init__.py,sha256=PyhrxFsms-oD4SOBO5j3t2mIPLN3PHjKBjTGaUTITMY,1170 +transformers/models/markuplm/__pycache__/__init__.cpython-310.pyc,, +transformers/models/markuplm/__pycache__/configuration_markuplm.cpython-310.pyc,, +transformers/models/markuplm/__pycache__/feature_extraction_markuplm.cpython-310.pyc,, +transformers/models/markuplm/__pycache__/modeling_markuplm.cpython-310.pyc,, +transformers/models/markuplm/__pycache__/processing_markuplm.cpython-310.pyc,, +transformers/models/markuplm/__pycache__/tokenization_markuplm.cpython-310.pyc,, +transformers/models/markuplm/__pycache__/tokenization_markuplm_fast.cpython-310.pyc,, +transformers/models/markuplm/configuration_markuplm.py,sha256=70RVe4KsIBXJBvY3uEOBk83YoDaxC0NSh0GGYjZcfv8,7342 +transformers/models/markuplm/feature_extraction_markuplm.py,sha256=5TaHlA8AsJQXC7tq0c2I5XDJalRdrfVMduE50H6ne8o,6449 +transformers/models/markuplm/modeling_markuplm.py,sha256=34_MWgbEpSE4_zRXBoXORWYCQwSDcBlaHqNrRvWX6AI,57310 +transformers/models/markuplm/processing_markuplm.py,sha256=WuabRmuYMRBgWn3y4aLlwx4Dff8NEnXmu7GNU41DGko,6383 +transformers/models/markuplm/tokenization_markuplm.py,sha256=pTjbIQ4OqbRT-6kh6i_n8nKplClX7GYnLOWkdtnzJzg,70142 +transformers/models/markuplm/tokenization_markuplm_fast.py,sha256=H9s2Wj1BTJtdV_GeilV1csAyIWgMxo0NoVPRoa1ke6U,43324 +transformers/models/mask2former/__init__.py,sha256=6gmVc8RS8CDX2nkBzyySXTjdw61BJgjiIukresOTuFg,1051 +transformers/models/mask2former/__pycache__/__init__.cpython-310.pyc,, +transformers/models/mask2former/__pycache__/configuration_mask2former.cpython-310.pyc,, +transformers/models/mask2former/__pycache__/convert_mask2former_original_pytorch_checkpoint_to_pytorch.cpython-310.pyc,, +transformers/models/mask2former/__pycache__/image_processing_mask2former.cpython-310.pyc,, +transformers/models/mask2former/__pycache__/modeling_mask2former.cpython-310.pyc,, +transformers/models/mask2former/configuration_mask2former.py,sha256=S80wQDek68I0jIWZ2Y8DXjZyh-l3SVAwdhnAyYCHMiY,12375 +transformers/models/mask2former/convert_mask2former_original_pytorch_checkpoint_to_pytorch.py,sha256=v4a-VTdnEHxZLAykOn5AgqLXZ9yFZzhY4CUu4c3XHUE,45688 +transformers/models/mask2former/image_processing_mask2former.py,sha256=MMKOAXSXL_S3PozL5RssgkHtDWhrNB2iP2NRN1OOrKw,57296 +transformers/models/mask2former/modeling_mask2former.py,sha256=QpISz6oA-bpNoayW5pAd57m0ewIS7s_bH2u_iji5hn4,122040 +transformers/models/maskformer/__init__.py,sha256=gNY7kNWBY38tpjXbqjijMoGOOQBzju9Woxs7svG09es,1190 +transformers/models/maskformer/__pycache__/__init__.cpython-310.pyc,, +transformers/models/maskformer/__pycache__/configuration_maskformer.cpython-310.pyc,, +transformers/models/maskformer/__pycache__/configuration_maskformer_swin.cpython-310.pyc,, +transformers/models/maskformer/__pycache__/convert_maskformer_original_pytorch_checkpoint_to_pytorch.cpython-310.pyc,, +transformers/models/maskformer/__pycache__/convert_maskformer_resnet_to_pytorch.cpython-310.pyc,, +transformers/models/maskformer/__pycache__/convert_maskformer_swin_to_pytorch.cpython-310.pyc,, +transformers/models/maskformer/__pycache__/feature_extraction_maskformer.cpython-310.pyc,, +transformers/models/maskformer/__pycache__/image_processing_maskformer.cpython-310.pyc,, +transformers/models/maskformer/__pycache__/modeling_maskformer.cpython-310.pyc,, +transformers/models/maskformer/__pycache__/modeling_maskformer_swin.cpython-310.pyc,, +transformers/models/maskformer/configuration_maskformer.py,sha256=GRAhXC4xsMk2-SGRksQVBEeVz11ZhKLZLetwfxprTl0,10293 +transformers/models/maskformer/configuration_maskformer_swin.py,sha256=HbFVMCwZaLJjr8HgN1tDd4z7NBhH3vQBkdPpKQSOF9I,7253 +transformers/models/maskformer/convert_maskformer_original_pytorch_checkpoint_to_pytorch.py,sha256=-qpfICbzwlGqhMNaS36phSUp-aM_qkWO3uw6Md6ntk0,32341 +transformers/models/maskformer/convert_maskformer_resnet_to_pytorch.py,sha256=HaN2doNGYc8j4quw3ECspQCY2lzJ1NVaaMKSZM9wN0Y,20837 +transformers/models/maskformer/convert_maskformer_swin_to_pytorch.py,sha256=ilBQYg7BYwVmcKBYO1LiD6m9-BMoq00xXiyKEKOqy3Q,20429 +transformers/models/maskformer/feature_extraction_maskformer.py,sha256=8JchapZ-d7HrbJxFPnd1v9eQLxUWGcJMY1IsS7twwHA,1257 +transformers/models/maskformer/image_processing_maskformer.py,sha256=RluOw5hWmpuNB509A30snwPerMM8Cx-c9uWEdLm1rZ0,58190 +transformers/models/maskformer/modeling_maskformer.py,sha256=4u_Y_JM2SB9uvj4Ba5l2xbMBpySpGvqs0TvthB19sjc,90872 +transformers/models/maskformer/modeling_maskformer_swin.py,sha256=mBuo1xCCfIlYpiOjRwnfpPPtthztzT3W5XN4gFAKp-8,43072 +transformers/models/mbart/__init__.py,sha256=VefKwprf7OVOTgkXowKV2hT8X3mM369sRJXDY5a49ig,1148 +transformers/models/mbart/__pycache__/__init__.cpython-310.pyc,, +transformers/models/mbart/__pycache__/configuration_mbart.cpython-310.pyc,, +transformers/models/mbart/__pycache__/convert_mbart_original_checkpoint_to_pytorch.cpython-310.pyc,, +transformers/models/mbart/__pycache__/modeling_flax_mbart.cpython-310.pyc,, +transformers/models/mbart/__pycache__/modeling_mbart.cpython-310.pyc,, +transformers/models/mbart/__pycache__/modeling_tf_mbart.cpython-310.pyc,, +transformers/models/mbart/__pycache__/tokenization_mbart.cpython-310.pyc,, +transformers/models/mbart/__pycache__/tokenization_mbart_fast.cpython-310.pyc,, +transformers/models/mbart/configuration_mbart.py,sha256=e4Bz1c8ZW2BKGPQPsJErgIT-ZJTJoAmZTQl8x-Np2vo,18209 +transformers/models/mbart/convert_mbart_original_checkpoint_to_pytorch.py,sha256=xVW9Mj-jd7X_MImJCgS52Aok1CGPf-E6u8ptvG1hK8o,3035 +transformers/models/mbart/modeling_flax_mbart.py,sha256=lXjbKiHARX8jIqqD05Jx8B9nuqpaRecdwKRQbUQ-9kk,75287 +transformers/models/mbart/modeling_mbart.py,sha256=RCgS7qEo3HT51dwL0xUCuRVc53RNpcq7g1SruN3kp2g,101839 +transformers/models/mbart/modeling_tf_mbart.py,sha256=huZgEbvXI_D_0bORe1uAM71BWbgSqxPmBjexQOMQFYM,74283 +transformers/models/mbart/tokenization_mbart.py,sha256=nFSUWWKpbYunRMV69t-w6ib__vqJTQskBQMl6KiuhkM,14137 +transformers/models/mbart/tokenization_mbart_fast.py,sha256=q2Ol2COBvP-Cmi7Uzdet1ofbYC3l8FWVfxcir5MwBmA,11032 +transformers/models/mbart50/__init__.py,sha256=9ukVFi1NqU3OoJcCJ-iKpJUZiu-K0t8yINuJHGltup0,1003 +transformers/models/mbart50/__pycache__/__init__.cpython-310.pyc,, +transformers/models/mbart50/__pycache__/tokenization_mbart50.cpython-310.pyc,, +transformers/models/mbart50/__pycache__/tokenization_mbart50_fast.cpython-310.pyc,, +transformers/models/mbart50/tokenization_mbart50.py,sha256=nOOpupssZR8xpbBOGae57GMDfiTOc16SDIV_u9WeQnU,16340 +transformers/models/mbart50/tokenization_mbart50_fast.py,sha256=oQj0N-DTI33kyykNqyJ9VJLw0SGxdgrdRXa-QarlfaM,11631 +transformers/models/megatron_bert/__init__.py,sha256=u1UIYjQlrfHcy81i2FzehRDJpt6KNfNJ4AePQYKgwOU,1007 +transformers/models/megatron_bert/__pycache__/__init__.cpython-310.pyc,, +transformers/models/megatron_bert/__pycache__/configuration_megatron_bert.cpython-310.pyc,, +transformers/models/megatron_bert/__pycache__/convert_megatron_bert_checkpoint.cpython-310.pyc,, +transformers/models/megatron_bert/__pycache__/modeling_megatron_bert.cpython-310.pyc,, +transformers/models/megatron_bert/configuration_megatron_bert.py,sha256=amM48KV4ndrBgCKY4opvk10mGucRngBb_mMQDO_RPiI,6501 +transformers/models/megatron_bert/convert_megatron_bert_checkpoint.py,sha256=VAMD1MFdVG8w9cQkRfmlZCEvaMgoo-lyFI9deunD5OA,13686 +transformers/models/megatron_bert/modeling_megatron_bert.py,sha256=JBmO0MiExIJ6fubqqYJTamVLR-rYFm4EUhstlCMogI0,82885 +transformers/models/megatron_gpt2/__init__.py,sha256=WycFl9cUevoXIBhB76qKtnNRIPMk2LoTDkmkfAfOy9M,630 +transformers/models/megatron_gpt2/__pycache__/__init__.cpython-310.pyc,, +transformers/models/megatron_gpt2/__pycache__/checkpoint_reshaping_and_interoperability.cpython-310.pyc,, +transformers/models/megatron_gpt2/__pycache__/convert_megatron_gpt2_checkpoint.cpython-310.pyc,, +transformers/models/megatron_gpt2/checkpoint_reshaping_and_interoperability.py,sha256=2yI0NmcgRXJ54yjDcug5NdtHNeyDb7UM66EFdhQoLaU,37444 +transformers/models/megatron_gpt2/convert_megatron_gpt2_checkpoint.py,sha256=UPLXCjF4Fixnw_gy6kzxTK64ioxo_EIxwSVO6oKCqqQ,13661 +transformers/models/mgp_str/__init__.py,sha256=Qb3mXPCrWbQ1ksMRYMeXorrva97OOFNr1zoy4YQg-9k,1073 +transformers/models/mgp_str/__pycache__/__init__.cpython-310.pyc,, +transformers/models/mgp_str/__pycache__/configuration_mgp_str.cpython-310.pyc,, +transformers/models/mgp_str/__pycache__/modeling_mgp_str.cpython-310.pyc,, +transformers/models/mgp_str/__pycache__/processing_mgp_str.cpython-310.pyc,, +transformers/models/mgp_str/__pycache__/tokenization_mgp_str.cpython-310.pyc,, +transformers/models/mgp_str/configuration_mgp_str.py,sha256=HYlPZgcF71-Qr_TTba6rkf3P5qFm0dNJrfywOEE5DTU,5810 +transformers/models/mgp_str/modeling_mgp_str.py,sha256=BdxKDPksoHrSC8PGtImWMEjGtyY0SNZjuSNkxPFj13k,22010 +transformers/models/mgp_str/processing_mgp_str.py,sha256=Qm2OduvUf4Yq2nUYxdIEx9eMGSfujsEzxuVOL6A_CLI,9330 +transformers/models/mgp_str/tokenization_mgp_str.py,sha256=8U0UW-dlrKNzwBqiPOdfR0ydmlULY1bcaZvhkwDmNuA,3808 +transformers/models/mimi/__init__.py,sha256=VXRZ-D8-AyOYcmRGvSxhjwTYQcSNXcCXi5ubks6Qxhk,989 +transformers/models/mimi/__pycache__/__init__.cpython-310.pyc,, +transformers/models/mimi/__pycache__/configuration_mimi.cpython-310.pyc,, +transformers/models/mimi/__pycache__/convert_mimi_checkpoint_to_pytorch.cpython-310.pyc,, +transformers/models/mimi/__pycache__/modeling_mimi.cpython-310.pyc,, +transformers/models/mimi/configuration_mimi.py,sha256=G9Il35tvzshXo694vG87mnVvI1NIDwYeDCFt8b1sUgM,11921 +transformers/models/mimi/convert_mimi_checkpoint_to_pytorch.py,sha256=aeCILFdZTaapdCj-vUfwKf__fxCC9quZRj1j1LbaQz8,6797 +transformers/models/mimi/modeling_mimi.py,sha256=Iot7fl8EuXSxbv4Tzx6qQ8sshgI2GnadldC_ZnFIb50,83273 +transformers/models/mistral/__init__.py,sha256=Gd3l8JZ-Oxe8fvqYKH1BW_GI4Pvc4sYy89_5h9hosFI,3248 +transformers/models/mistral/__pycache__/__init__.cpython-310.pyc,, +transformers/models/mistral/__pycache__/configuration_mistral.cpython-310.pyc,, +transformers/models/mistral/__pycache__/convert_mistral_weights_to_hf.cpython-310.pyc,, +transformers/models/mistral/__pycache__/modeling_flax_mistral.cpython-310.pyc,, +transformers/models/mistral/__pycache__/modeling_mistral.cpython-310.pyc,, +transformers/models/mistral/__pycache__/modeling_tf_mistral.cpython-310.pyc,, +transformers/models/mistral/__pycache__/modular_mistral.cpython-310.pyc,, +transformers/models/mistral/configuration_mistral.py,sha256=xkMi23SM51G8-eGsZhO_0sJtXGTkW9g_ZSXGjdEjWck,7534 +transformers/models/mistral/convert_mistral_weights_to_hf.py,sha256=AzsbN26L3zoRpPTpfdtOafNjptY0wQx26SRH2gNI5Uo,10780 +transformers/models/mistral/modeling_flax_mistral.py,sha256=M2Fio6tl63E0fATg077-CwLoT1iNV7pZWmquxNvOTAI,31682 +transformers/models/mistral/modeling_mistral.py,sha256=1IwuPr7EErj0oGbfx2epszV-Czdcvn3NS13WQeDcUro,52361 +transformers/models/mistral/modeling_tf_mistral.py,sha256=wEjTFfDOFp2z3-5Fb53D8XTKVsLfTaabBvvRXdSpjKo,45264 +transformers/models/mistral/modular_mistral.py,sha256=ms2uDorKpuB9bHSUlyOXvfEUfCPigQ1AGq-i8885HP4,16028 +transformers/models/mixtral/__init__.py,sha256=K-r1Mh5wzRlbIfzTr9PSpvZqZPUwIgdXMCEBK7WOPn4,1890 +transformers/models/mixtral/__pycache__/__init__.cpython-310.pyc,, +transformers/models/mixtral/__pycache__/configuration_mixtral.cpython-310.pyc,, +transformers/models/mixtral/__pycache__/convert_mixtral_weights_to_hf.cpython-310.pyc,, +transformers/models/mixtral/__pycache__/modeling_mixtral.cpython-310.pyc,, +transformers/models/mixtral/__pycache__/modular_mixtral.cpython-310.pyc,, +transformers/models/mixtral/configuration_mixtral.py,sha256=0Ot5feJcxZer1bINS0DNYtH-SuzzDq8g4zsZY-aJGVM,8366 +transformers/models/mixtral/convert_mixtral_weights_to_hf.py,sha256=WExicalIwkZccqWyRjUU2LBvbL6cM6yiOG_Oby6t3Ok,9156 +transformers/models/mixtral/modeling_mixtral.py,sha256=CAtmJaYJRFFQMbtVm4EOh075zqDDAsgk-u9rteZ9uDk,63459 +transformers/models/mixtral/modular_mixtral.py,sha256=HksJTLMnJoZ7TOoPQVzHt-3_3UYmZK3WxOsrtdcHIM8,23989 +transformers/models/mllama/__init__.py,sha256=2lTGCiL6EZirXNcu4aKV7vSmv50iRsQnCV-c9sahNXg,1073 +transformers/models/mllama/__pycache__/__init__.cpython-310.pyc,, +transformers/models/mllama/__pycache__/configuration_mllama.cpython-310.pyc,, +transformers/models/mllama/__pycache__/convert_mllama_weights_to_hf.cpython-310.pyc,, +transformers/models/mllama/__pycache__/image_processing_mllama.cpython-310.pyc,, +transformers/models/mllama/__pycache__/modeling_mllama.cpython-310.pyc,, +transformers/models/mllama/__pycache__/processing_mllama.cpython-310.pyc,, +transformers/models/mllama/configuration_mllama.py,sha256=b6NUWeOFTa6Oz85ouPqsVRvRPp-E0aqq8fqCefhxPAg,18146 +transformers/models/mllama/convert_mllama_weights_to_hf.py,sha256=rSbdea3kATEFE1-OEzdnjTse1ZSi6FM7zg1fVAxdLas,29851 +transformers/models/mllama/image_processing_mllama.py,sha256=9NUKmWLNPNReizz9SCcclS6x-tveMskTw7vFkc7dFUM,39213 +transformers/models/mllama/modeling_mllama.py,sha256=RaI5aaBsvMk_VrIxqKlUqq5uL2bflG65xu8qjXgTD8Y,105526 +transformers/models/mllama/processing_mllama.py,sha256=-AQ_ghEUtBj3eV6Go1u85EGdGL7cR1pCV5UVglcfxHM,16309 +transformers/models/mluke/__init__.py,sha256=e_3cNftWOmhNXk-zsA1-2DOBT9L56SHr-6qev0xI7Ws,956 +transformers/models/mluke/__pycache__/__init__.cpython-310.pyc,, +transformers/models/mluke/__pycache__/convert_mluke_original_pytorch_checkpoint_to_pytorch.cpython-310.pyc,, +transformers/models/mluke/__pycache__/tokenization_mluke.cpython-310.pyc,, +transformers/models/mluke/convert_mluke_original_pytorch_checkpoint_to_pytorch.py,sha256=G6Z94-1_AiilSTU96PSjX_pdgFIx-b_bk8xlMKX5TuE,10185 +transformers/models/mluke/tokenization_mluke.py,sha256=8jrk37E5OS1Vxf1FZXkZoLD1xZWnVWDp8CBQMtpHjUE,82104 +transformers/models/mobilebert/__init__.py,sha256=Jy7IZ2oQAjyE_KOoT-I7Z9bqPRVLfsOwx8XY3Y43RFc,1134 +transformers/models/mobilebert/__pycache__/__init__.cpython-310.pyc,, +transformers/models/mobilebert/__pycache__/configuration_mobilebert.cpython-310.pyc,, +transformers/models/mobilebert/__pycache__/convert_mobilebert_original_tf_checkpoint_to_pytorch.cpython-310.pyc,, +transformers/models/mobilebert/__pycache__/modeling_mobilebert.cpython-310.pyc,, +transformers/models/mobilebert/__pycache__/modeling_tf_mobilebert.cpython-310.pyc,, +transformers/models/mobilebert/__pycache__/tokenization_mobilebert.cpython-310.pyc,, +transformers/models/mobilebert/__pycache__/tokenization_mobilebert_fast.cpython-310.pyc,, +transformers/models/mobilebert/configuration_mobilebert.py,sha256=xE45pc42VCyNfA3dDQSaa1PBq9sGiHyh6M5TpeLI7Sc,8274 +transformers/models/mobilebert/convert_mobilebert_original_tf_checkpoint_to_pytorch.py,sha256=MRW9sorswIo4RiWq7PVVmaZsYm4wJEc1-DhcLzssDRU,2200 +transformers/models/mobilebert/modeling_mobilebert.py,sha256=p2xG2mYD8wtpkAAYRcUXp_rjG8zIppejoz61CryRL8c,70973 +transformers/models/mobilebert/modeling_tf_mobilebert.py,sha256=yi0BW1vZCHDFvL5BKW56FUFSC42SCLvuPWwAh8SPlOY,84096 +transformers/models/mobilebert/tokenization_mobilebert.py,sha256=wdTDWMIuroy9XIgcc0YAE8jH4dHqv5Lzr_q-WoRa8CQ,21304 +transformers/models/mobilebert/tokenization_mobilebert_fast.py,sha256=SO23cnn17U1YmVkRb27jKg-0OXNzFHKag-etFKHYHlM,7838 +transformers/models/mobilenet_v1/__init__.py,sha256=3U7ptbKYiiXR37wVJRbjEKXSe1YBQr03WxtkvkY7lpE,1105 +transformers/models/mobilenet_v1/__pycache__/__init__.cpython-310.pyc,, +transformers/models/mobilenet_v1/__pycache__/configuration_mobilenet_v1.cpython-310.pyc,, +transformers/models/mobilenet_v1/__pycache__/convert_original_tf_checkpoint_to_pytorch.cpython-310.pyc,, +transformers/models/mobilenet_v1/__pycache__/feature_extraction_mobilenet_v1.cpython-310.pyc,, +transformers/models/mobilenet_v1/__pycache__/image_processing_mobilenet_v1.cpython-310.pyc,, +transformers/models/mobilenet_v1/__pycache__/modeling_mobilenet_v1.cpython-310.pyc,, +transformers/models/mobilenet_v1/configuration_mobilenet_v1.py,sha256=v5hjZ9pV9ZigSYs-3HuJ6t2EPDPKMpvNxGExvvr7egQ,4930 +transformers/models/mobilenet_v1/convert_original_tf_checkpoint_to_pytorch.py,sha256=kT9UyoJ0ZfpG2-oVhG8TNs18R3cTgXUnK1CcC-57IYM,4931 +transformers/models/mobilenet_v1/feature_extraction_mobilenet_v1.py,sha256=anErZjMwKNsiNUdDtE9dIkppxsEUU-IU298ctbE2B94,1266 +transformers/models/mobilenet_v1/image_processing_mobilenet_v1.py,sha256=wViNCNs_3wB0BpNfCqzwNMxzgWJfZhWnLsBOSoNxQI8,15280 +transformers/models/mobilenet_v1/modeling_mobilenet_v1.py,sha256=vEoD4MW4Ytp_NWVtaBcIZZgWuPvVAakyeuwg3O3Mhz8,18748 +transformers/models/mobilenet_v2/__init__.py,sha256=n-a4qG6zDuMSEnOYeIsqErxILr_0z3m1zanJksulIa4,1105 +transformers/models/mobilenet_v2/__pycache__/__init__.cpython-310.pyc,, +transformers/models/mobilenet_v2/__pycache__/configuration_mobilenet_v2.cpython-310.pyc,, +transformers/models/mobilenet_v2/__pycache__/convert_original_tf_checkpoint_to_pytorch.cpython-310.pyc,, +transformers/models/mobilenet_v2/__pycache__/feature_extraction_mobilenet_v2.cpython-310.pyc,, +transformers/models/mobilenet_v2/__pycache__/image_processing_mobilenet_v2.cpython-310.pyc,, +transformers/models/mobilenet_v2/__pycache__/modeling_mobilenet_v2.cpython-310.pyc,, +transformers/models/mobilenet_v2/configuration_mobilenet_v2.py,sha256=ddJ4XJItvwVjK3QY9tjKuKOpUuOLmODC9rVMpv--gPc,6826 +transformers/models/mobilenet_v2/convert_original_tf_checkpoint_to_pytorch.py,sha256=mJVhzYCMvutwkrULewE2GGsVOxOsW4wjjE8XzTWlWIk,6401 +transformers/models/mobilenet_v2/feature_extraction_mobilenet_v2.py,sha256=0tWo6rF6EIg06KW21VYuu_6nOt0FpuGKLT6bmNXHmag,1266 +transformers/models/mobilenet_v2/image_processing_mobilenet_v2.py,sha256=tCiIyGu9K3nFYC6X90pohk5d3KcZOOnZBW_yVkw8KIk,17634 +transformers/models/mobilenet_v2/modeling_mobilenet_v2.py,sha256=dXVsvtnCL002K6Dl9X6PSVsYlL9uckAEJGDMZqdcwkc,34677 +transformers/models/mobilevit/__init__.py,sha256=v313uWvioi8yQuYM408mf0aEWVNcwFHjBeplAo6GtV0,1134 +transformers/models/mobilevit/__pycache__/__init__.cpython-310.pyc,, +transformers/models/mobilevit/__pycache__/configuration_mobilevit.cpython-310.pyc,, +transformers/models/mobilevit/__pycache__/convert_mlcvnets_to_pytorch.cpython-310.pyc,, +transformers/models/mobilevit/__pycache__/feature_extraction_mobilevit.cpython-310.pyc,, +transformers/models/mobilevit/__pycache__/image_processing_mobilevit.cpython-310.pyc,, +transformers/models/mobilevit/__pycache__/modeling_mobilevit.cpython-310.pyc,, +transformers/models/mobilevit/__pycache__/modeling_tf_mobilevit.cpython-310.pyc,, +transformers/models/mobilevit/configuration_mobilevit.py,sha256=rW2nFW5N3MxWzlMI7DOESzPykR-kFbxUlM9PohjET7k,7587 +transformers/models/mobilevit/convert_mlcvnets_to_pytorch.py,sha256=nphj4DM8G43EUUWhQIL6TYHWB855GmZI9KPk8mLXX8E,12401 +transformers/models/mobilevit/feature_extraction_mobilevit.py,sha256=AmtPlzbLUP_7urr2s6U36GfM8QnyNXUJ_MlaI2q-UhI,1249 +transformers/models/mobilevit/image_processing_mobilevit.py,sha256=JW_jtrF81tOknX5N2hdHIwHWxudwmEile9jNeOPeOpw,21512 +transformers/models/mobilevit/modeling_mobilevit.py,sha256=U9dyAT7Rkkt1lvluh4a5U4vLmYuM0lYBuhw8zjc-YKE,40279 +transformers/models/mobilevit/modeling_tf_mobilevit.py,sha256=pUuFUsH-6t6J2vDNneT4Bkci4OGXpuNYwVESZzswQ_Y,54833 +transformers/models/mobilevitv2/__init__.py,sha256=pAGk_9X22yOYvlcwbqTc4nm6fL4rPhAhDpdBguna5Q0,1003 +transformers/models/mobilevitv2/__pycache__/__init__.cpython-310.pyc,, +transformers/models/mobilevitv2/__pycache__/configuration_mobilevitv2.cpython-310.pyc,, +transformers/models/mobilevitv2/__pycache__/convert_mlcvnets_to_pytorch.cpython-310.pyc,, +transformers/models/mobilevitv2/__pycache__/modeling_mobilevitv2.cpython-310.pyc,, +transformers/models/mobilevitv2/configuration_mobilevitv2.py,sha256=XJcBxOXaWExP78dtuFQ8HoA77B6smeTBB-204JnUoIM,7150 +transformers/models/mobilevitv2/convert_mlcvnets_to_pytorch.py,sha256=zl8FGQnMDhoN5PgtZLtHvLWDFrL2wg6AauROmo7zicc,12681 +transformers/models/mobilevitv2/modeling_mobilevitv2.py,sha256=tnTmaQmeEt41z7aphPg4BYEj-PEBvQM-e1jRErkbaY4,38362 +transformers/models/modernbert/__init__.py,sha256=BEQFRFfcKvUlphA1ibW3s34Vkbm-MUuyqzaLbrIFiAA,1006 +transformers/models/modernbert/__pycache__/__init__.cpython-310.pyc,, +transformers/models/modernbert/__pycache__/configuration_modernbert.cpython-310.pyc,, +transformers/models/modernbert/__pycache__/modeling_modernbert.cpython-310.pyc,, +transformers/models/modernbert/__pycache__/modular_modernbert.cpython-310.pyc,, +transformers/models/modernbert/configuration_modernbert.py,sha256=CwKAU-4iX8-bscAu0HrZpOif7q28guf3F8j5-5qpbfw,11310 +transformers/models/modernbert/modeling_modernbert.py,sha256=4jXoAgxu4IKiGm8mK_Wzyz9HbWMCAc46FRjxcgCujes,58897 +transformers/models/modernbert/modular_modernbert.py,sha256=FukcrPEKIe8I-Ee4ztuoEgaY_SCx_niI3PspKqY2QUs,63215 +transformers/models/moonshine/__init__.py,sha256=eBgvc9LtoDnB6HnNvrObDWL3h_L4Sgn5-D-hepNfAmI,999 +transformers/models/moonshine/__pycache__/__init__.cpython-310.pyc,, +transformers/models/moonshine/__pycache__/configuration_moonshine.cpython-310.pyc,, +transformers/models/moonshine/__pycache__/convert_usefulsensors_to_hf.cpython-310.pyc,, +transformers/models/moonshine/__pycache__/modeling_moonshine.cpython-310.pyc,, +transformers/models/moonshine/__pycache__/modular_moonshine.cpython-310.pyc,, +transformers/models/moonshine/configuration_moonshine.py,sha256=90cr3nFtmb4OZIUY4f77NaaJRUjODAzQjccrY-buBIY,13174 +transformers/models/moonshine/convert_usefulsensors_to_hf.py,sha256=pcTINZKH9DkJtYjgFcVvOh_f8y9I90_vGqKEdLR3td0,7175 +transformers/models/moonshine/modeling_moonshine.py,sha256=QuLoUboHtqwC3e88xvbXAYE9S8p1ytEy0Jh441U8AWw,72460 +transformers/models/moonshine/modular_moonshine.py,sha256=CvMPT1utzxFV_XpOSkV5wBWcJUiwIid2Yz5vk_28EUQ,53967 +transformers/models/moshi/__init__.py,sha256=uW4oqTKZdbmURZaC_xwwHXnYEMyLJrMEJAlfbUzSWO8,991 +transformers/models/moshi/__pycache__/__init__.cpython-310.pyc,, +transformers/models/moshi/__pycache__/configuration_moshi.cpython-310.pyc,, +transformers/models/moshi/__pycache__/convert_moshi_transformers.cpython-310.pyc,, +transformers/models/moshi/__pycache__/modeling_moshi.cpython-310.pyc,, +transformers/models/moshi/configuration_moshi.py,sha256=ZYWbDase6o3SUCpLQVjcFaReiPXOgWihNDb76fh124Q,16050 +transformers/models/moshi/convert_moshi_transformers.py,sha256=42sbLhLNI2hWRWrBSJbFKH7DGHwjxoRn7DT1woaheio,11554 +transformers/models/moshi/modeling_moshi.py,sha256=sd78KFfNV8Z2zbKU0M-j9T69fWVFIyj5Uy9pD8pSwhk,137523 +transformers/models/mpnet/__init__.py,sha256=agt4uraqHTtlIphsDB17XVAPzCKHaPBKlVaQkKHxRyM,1109 +transformers/models/mpnet/__pycache__/__init__.cpython-310.pyc,, +transformers/models/mpnet/__pycache__/configuration_mpnet.cpython-310.pyc,, +transformers/models/mpnet/__pycache__/modeling_mpnet.cpython-310.pyc,, +transformers/models/mpnet/__pycache__/modeling_tf_mpnet.cpython-310.pyc,, +transformers/models/mpnet/__pycache__/tokenization_mpnet.cpython-310.pyc,, +transformers/models/mpnet/__pycache__/tokenization_mpnet_fast.cpython-310.pyc,, +transformers/models/mpnet/configuration_mpnet.py,sha256=DsCgTVE6hDGcaVxd2yqEPj7Ph-JLE2nPyt1AJlVZkx4,5327 +transformers/models/mpnet/modeling_mpnet.py,sha256=BqMYodX425EhnXRSqg28t-EOK5fvuETR93fstV3nK4I,42841 +transformers/models/mpnet/modeling_tf_mpnet.py,sha256=Q2KQ__x0k3XDov75cLAhCN-XkM6pmavudQH28osR7Sg,55748 +transformers/models/mpnet/tokenization_mpnet.py,sha256=eYYgPLpYR3ODzls2m-m1sPRA6skT-wQ4wdN3lxP01jE,22475 +transformers/models/mpnet/tokenization_mpnet_fast.py,sha256=WXozzALSVhzKSAm0FrhbQpkJ_KVvkkr7D9_w9fWPvSg,9193 +transformers/models/mpt/__init__.py,sha256=DAIIAY0kPL-bXMkPUvxmP97HCXPi-SoM3NLnlJJYarg,987 +transformers/models/mpt/__pycache__/__init__.cpython-310.pyc,, +transformers/models/mpt/__pycache__/configuration_mpt.cpython-310.pyc,, +transformers/models/mpt/__pycache__/modeling_mpt.cpython-310.pyc,, +transformers/models/mpt/configuration_mpt.py,sha256=fw4DDg2YlaYnIqsdw2S3xNrpjn1HqlRRL8FEEj19eSY,10543 +transformers/models/mpt/modeling_mpt.py,sha256=ebTLGgFvnM2Km0ufIE9_gqNI0RKOs3KEgxLNggVtAyg,39667 +transformers/models/mra/__init__.py,sha256=51mnm4DFq6aWxOsmaaVZDL28QozNauXyTtbEihDxUQU,987 +transformers/models/mra/__pycache__/__init__.cpython-310.pyc,, +transformers/models/mra/__pycache__/configuration_mra.cpython-310.pyc,, +transformers/models/mra/__pycache__/convert_mra_pytorch_to_pytorch.cpython-310.pyc,, +transformers/models/mra/__pycache__/modeling_mra.cpython-310.pyc,, +transformers/models/mra/configuration_mra.py,sha256=oNhRz6PdvUK_ugoiAhHDuNkGgBNyDguATgQdKeTJBnY,6536 +transformers/models/mra/convert_mra_pytorch_to_pytorch.py,sha256=LhaVlQ4q88gtewg-geRYZ748xQ3brLLhyDIo-OGWSdI,4247 +transformers/models/mra/modeling_mra.py,sha256=BLiEYTX9SekcTwtGRL4rSD84AesPTPOD2nnUUQyZGDc,62199 +transformers/models/mt5/__init__.py,sha256=UK8vGX9r6fPdzPaJKCbGJ7RCqKOdIo-7H9V-Qp8rwEg,1095 +transformers/models/mt5/__pycache__/__init__.cpython-310.pyc,, +transformers/models/mt5/__pycache__/configuration_mt5.cpython-310.pyc,, +transformers/models/mt5/__pycache__/modeling_flax_mt5.cpython-310.pyc,, +transformers/models/mt5/__pycache__/modeling_mt5.cpython-310.pyc,, +transformers/models/mt5/__pycache__/modeling_tf_mt5.cpython-310.pyc,, +transformers/models/mt5/__pycache__/tokenization_mt5.cpython-310.pyc,, +transformers/models/mt5/__pycache__/tokenization_mt5_fast.cpython-310.pyc,, +transformers/models/mt5/configuration_mt5.py,sha256=VyZeWfoCTMskhWm1s80fIesKNfyYqvRncASS9XGqyJw,8002 +transformers/models/mt5/modeling_flax_mt5.py,sha256=9WjlLB_EV9WDiy-rBxzVUPocsHrv02cEa4OB8lVR6EA,4329 +transformers/models/mt5/modeling_mt5.py,sha256=LbT4DK6tSZwLVtCdHF6yfL6-bjhPuuovYEAyX2Zl6gs,119494 +transformers/models/mt5/modeling_tf_mt5.py,sha256=EIUkWvuApAbiaX6qhveT1KC43s_NDmQazLrbYT45aao,3406 +transformers/models/mt5/tokenization_mt5.py,sha256=AckaXSw5OojOGLezMhrsv2a9BMZXwzhy5IsT3hvp_Q8,746 +transformers/models/mt5/tokenization_mt5_fast.py,sha256=1npEFH_c4nDQxOFNoqcGNW30KCWe04BpLrrv7aDcDQ8,762 +transformers/models/musicgen/__init__.py,sha256=iwtW9pg6iDe5D2dWVC4IRU8QbNmRK5kMqPCM8fsUSgo,1036 +transformers/models/musicgen/__pycache__/__init__.cpython-310.pyc,, +transformers/models/musicgen/__pycache__/configuration_musicgen.cpython-310.pyc,, +transformers/models/musicgen/__pycache__/convert_musicgen_transformers.cpython-310.pyc,, +transformers/models/musicgen/__pycache__/modeling_musicgen.cpython-310.pyc,, +transformers/models/musicgen/__pycache__/processing_musicgen.cpython-310.pyc,, +transformers/models/musicgen/configuration_musicgen.py,sha256=3nUL5CBMpG7IDKZ1659J58JsRAckf6rYxyGwflbo1SQ,10876 +transformers/models/musicgen/convert_musicgen_transformers.py,sha256=ky4nceHZ-78GAp0L4vY-cFXsZ6lGvkYzX9k8MGUZ2kQ,9384 +transformers/models/musicgen/modeling_musicgen.py,sha256=ZE6nrvSIBMb3IaSNFeld9lly0KNzTGqgYLwIntazxCw,135963 +transformers/models/musicgen/processing_musicgen.py,sha256=yLMH8wTOkE-Rv3YFeI6fz1NbdRCDbU0AL4dVZZ-Ih5s,5701 +transformers/models/musicgen_melody/__init__.py,sha256=v3FVLsoE2TEh_eAaYKcb8v114HPo9RZN-p5TSS4eD_I,2594 +transformers/models/musicgen_melody/__pycache__/__init__.cpython-310.pyc,, +transformers/models/musicgen_melody/__pycache__/configuration_musicgen_melody.cpython-310.pyc,, +transformers/models/musicgen_melody/__pycache__/convert_musicgen_melody_transformers.cpython-310.pyc,, +transformers/models/musicgen_melody/__pycache__/feature_extraction_musicgen_melody.cpython-310.pyc,, +transformers/models/musicgen_melody/__pycache__/modeling_musicgen_melody.cpython-310.pyc,, +transformers/models/musicgen_melody/__pycache__/processing_musicgen_melody.cpython-310.pyc,, +transformers/models/musicgen_melody/configuration_musicgen_melody.py,sha256=OiqJNBa-iWwLlwkrkA9Uodg9tuMPA95Cvy7_4hSPnMo,11931 +transformers/models/musicgen_melody/convert_musicgen_melody_transformers.py,sha256=uaWtwMCLmSCXc7c0PK9TinKJeBsGjDfRwmXVHH_Mxig,11356 +transformers/models/musicgen_melody/feature_extraction_musicgen_melody.py,sha256=0-gMjuGhG4JMeM-44wa3aTo3Nph-_cjZs7k3nhc6cfE,15227 +transformers/models/musicgen_melody/modeling_musicgen_melody.py,sha256=ydkKkDHGyLENM9G26kHd-v6moDGs61nkzfFexOwG91E,129472 +transformers/models/musicgen_melody/processing_musicgen_melody.py,sha256=W2dEpemPPhX10YgTwm0T3zvy71Z8QxJ_LSoz6Z00UAI,8634 +transformers/models/mvp/__init__.py,sha256=0e0-wP4EkfzPiO_BlHlmyVUEq-1kb9RHY2Ikbk66W7s,1064 +transformers/models/mvp/__pycache__/__init__.cpython-310.pyc,, +transformers/models/mvp/__pycache__/configuration_mvp.cpython-310.pyc,, +transformers/models/mvp/__pycache__/modeling_mvp.cpython-310.pyc,, +transformers/models/mvp/__pycache__/tokenization_mvp.cpython-310.pyc,, +transformers/models/mvp/__pycache__/tokenization_mvp_fast.cpython-310.pyc,, +transformers/models/mvp/configuration_mvp.py,sha256=AzvHDoXei3ZGo_lYWLM64GrqafASaAX2_sU0RuBSKqM,8435 +transformers/models/mvp/modeling_mvp.py,sha256=U2dhw2xVI4D-YRWI9zone9Uite_xkaKehy2f1bqY_ZM,90440 +transformers/models/mvp/tokenization_mvp.py,sha256=FNCM8ee93oXqWSJ6KEs7AHBORCbhCvQR1yDDxPKID5I,16221 +transformers/models/mvp/tokenization_mvp_fast.py,sha256=nVUAFwajGI2hZXUE5h9_3mDnCvwAkipVhmsBeEtBEyw,11832 +transformers/models/myt5/__init__.py,sha256=MFQX-RuvZujGb_twBWBQpTt4NZq6FxreEysWmF2fFGI,955 +transformers/models/myt5/__pycache__/__init__.cpython-310.pyc,, +transformers/models/myt5/__pycache__/convert_myt5_original_tf_checkpoint_to_pytorch.cpython-310.pyc,, +transformers/models/myt5/__pycache__/tokenization_myt5.cpython-310.pyc,, +transformers/models/myt5/convert_myt5_original_tf_checkpoint_to_pytorch.py,sha256=m_BWHivhSbIAP43xOVX82eNxd1z8uOElA17OSs77u08,2240 +transformers/models/myt5/tokenization_myt5.py,sha256=xsj12hpUt7sJj3slLU9mRwOeq3L_724yxoPlNrQsDtI,15555 +transformers/models/nemotron/__init__.py,sha256=ZwaMH1AQ0VIuFnouYe0Sx0HcCGA7PaCp3-_yw3xjeQA,997 +transformers/models/nemotron/__pycache__/__init__.cpython-310.pyc,, +transformers/models/nemotron/__pycache__/configuration_nemotron.cpython-310.pyc,, +transformers/models/nemotron/__pycache__/convert_nemotron_nemo_to_hf.cpython-310.pyc,, +transformers/models/nemotron/__pycache__/modeling_nemotron.cpython-310.pyc,, +transformers/models/nemotron/configuration_nemotron.py,sha256=JaqyIqgHyuy7Z2J6PCJd1Ub_m0p5pvQ6ge3ag5hYtgs,7393 +transformers/models/nemotron/convert_nemotron_nemo_to_hf.py,sha256=Bd9yShFuis_LrphDApfrfU_mc6S8OiLwclR083GoXr4,15645 +transformers/models/nemotron/modeling_nemotron.py,sha256=KGeenATPa23wIRS-O6b1jfhQB7X410WbMpeWjENpW5Q,64542 +transformers/models/nllb/__init__.py,sha256=MLFrxhOJ3xvOAcRulvCEMoKsajLuudllZLMrYDYQOas,997 +transformers/models/nllb/__pycache__/__init__.cpython-310.pyc,, +transformers/models/nllb/__pycache__/tokenization_nllb.cpython-310.pyc,, +transformers/models/nllb/__pycache__/tokenization_nllb_fast.cpython-310.pyc,, +transformers/models/nllb/tokenization_nllb.py,sha256=bxNqY1VepeLj6BTqismS9LHpZZTly7P7KeTi09e62q4,19095 +transformers/models/nllb/tokenization_nllb_fast.py,sha256=SjrPK2Z7hnfIuFf-baiftFFhG5JViWA0wqoWcshAf2Q,15974 +transformers/models/nllb_moe/__init__.py,sha256=sAfoAnhHK_reU1a2WUoF1rFtPBckeGGrzJCD8gUv54A,997 +transformers/models/nllb_moe/__pycache__/__init__.cpython-310.pyc,, +transformers/models/nllb_moe/__pycache__/configuration_nllb_moe.cpython-310.pyc,, +transformers/models/nllb_moe/__pycache__/convert_nllb_moe_sharded_original_checkpoint_to_pytorch.cpython-310.pyc,, +transformers/models/nllb_moe/__pycache__/modeling_nllb_moe.cpython-310.pyc,, +transformers/models/nllb_moe/configuration_nllb_moe.py,sha256=mgDfnEpxjMp0ACizcpHZ70lU-TaE0CLAXzfAPzZJQdw,11198 +transformers/models/nllb_moe/convert_nllb_moe_sharded_original_checkpoint_to_pytorch.py,sha256=c9Zab9qVzNESk0U2exJNaoDwUQo_Q7ZpcZHViZjqTQQ,6477 +transformers/models/nllb_moe/modeling_nllb_moe.py,sha256=gOpgkAPRRHIuAl9OiMnpuQffE9PQjrX7xtagzGeFCWU,84588 +transformers/models/nougat/__init__.py,sha256=W-_PD9oOisHzq8UvCK10HGSaz8ljuAkcBC5ElCPj6Bs,1042 +transformers/models/nougat/__pycache__/__init__.cpython-310.pyc,, +transformers/models/nougat/__pycache__/convert_nougat_to_hf.cpython-310.pyc,, +transformers/models/nougat/__pycache__/image_processing_nougat.cpython-310.pyc,, +transformers/models/nougat/__pycache__/processing_nougat.cpython-310.pyc,, +transformers/models/nougat/__pycache__/tokenization_nougat_fast.cpython-310.pyc,, +transformers/models/nougat/convert_nougat_to_hf.py,sha256=3KHG9mTikCDX88hKbRB8_aVQzKdlheP1_TiOydwQoIw,10949 +transformers/models/nougat/image_processing_nougat.py,sha256=XC8mwaDeH8mtIwHjQGssTyTNFlMjNIx5U1sp9mNq0-Y,23739 +transformers/models/nougat/processing_nougat.py,sha256=Rdm5WKDzUJmLX2SPYoMRRudyW4tYz8UWZIh97WYpRcY,6763 +transformers/models/nougat/tokenization_nougat_fast.py,sha256=c6y7vWMpIEQWNNal-NPP-Vq6GM7tht55nCClCIo5UjA,24740 +transformers/models/nystromformer/__init__.py,sha256=CwEg6m4nJW_AfNDws_MIv1O1x5IO3xPp-FYqirlFXwk,1007 +transformers/models/nystromformer/__pycache__/__init__.cpython-310.pyc,, +transformers/models/nystromformer/__pycache__/configuration_nystromformer.cpython-310.pyc,, +transformers/models/nystromformer/__pycache__/convert_nystromformer_original_pytorch_checkpoint_to_pytorch.cpython-310.pyc,, +transformers/models/nystromformer/__pycache__/modeling_nystromformer.cpython-310.pyc,, +transformers/models/nystromformer/configuration_nystromformer.py,sha256=UyLmPF2li3_ADTz9tS1h5t4CDY5d5GzsfeC9hG42RzI,6402 +transformers/models/nystromformer/convert_nystromformer_original_pytorch_checkpoint_to_pytorch.py,sha256=8K5IGFosME-LAljFLuTc09oce1IwxZDcxw1KPHsamqc,4197 +transformers/models/nystromformer/modeling_nystromformer.py,sha256=fi3bgpEtCWBJFZHVEgiGrjvELrarXGJxw9kXPMEPXpE,49062 +transformers/models/olmo/__init__.py,sha256=_dNlQLxAlwk4Yt9djxtrLXy90ben8LUx4LtD8wZR5hU,1658 +transformers/models/olmo/__pycache__/__init__.cpython-310.pyc,, +transformers/models/olmo/__pycache__/configuration_olmo.cpython-310.pyc,, +transformers/models/olmo/__pycache__/convert_olmo_weights_to_hf.cpython-310.pyc,, +transformers/models/olmo/__pycache__/modeling_olmo.cpython-310.pyc,, +transformers/models/olmo/__pycache__/modular_olmo.cpython-310.pyc,, +transformers/models/olmo/configuration_olmo.py,sha256=qqY8AF6TOEYKn6q-KKKuFB9WE_I6649dyvWBp-FAPh4,8810 +transformers/models/olmo/convert_olmo_weights_to_hf.py,sha256=SI91Kn_B_m0oel2kuJ2LUMGqfaNZL4Q4sT2ydqNYZlE,9413 +transformers/models/olmo/modeling_olmo.py,sha256=gB5Q3HSufNtD1yU0TF3NQlUc8hiNrNJXyL-QTrVajEY,38414 +transformers/models/olmo/modular_olmo.py,sha256=IiT1bB4e_6LwChEsMYnMIx-eqd6GW6gtVECqnTXnBnA,4899 +transformers/models/olmo2/__init__.py,sha256=Frt9nEMsfPszod1lkFTAJUobU50IjOFlqI6uJkuQVcY,1011 +transformers/models/olmo2/__pycache__/__init__.cpython-310.pyc,, +transformers/models/olmo2/__pycache__/configuration_olmo2.cpython-310.pyc,, +transformers/models/olmo2/__pycache__/convert_olmo2_weights_to_hf.cpython-310.pyc,, +transformers/models/olmo2/__pycache__/modeling_olmo2.cpython-310.pyc,, +transformers/models/olmo2/__pycache__/modular_olmo2.cpython-310.pyc,, +transformers/models/olmo2/configuration_olmo2.py,sha256=1eQ_mMfPB4KaO7FWXO3DrzVdFvsZVh-1NX_j_-6qG9U,8590 +transformers/models/olmo2/convert_olmo2_weights_to_hf.py,sha256=hzCdF-B_wZgcVyaJYNK_-FZR-hTWx4P-dsh5Fb3jE-o,11563 +transformers/models/olmo2/modeling_olmo2.py,sha256=EbG7bPm0bdZRjoHmo_3dBScqg4PgiGUdHytKj-PNWo4,38750 +transformers/models/olmo2/modular_olmo2.py,sha256=ryU8yWI9WkbiHGvxhP1BCZ5xQYmw1KtbQD-ElAszJrc,13029 +transformers/models/olmoe/__init__.py,sha256=eQ6mx9aBIcA4RiK3p7dbqORokkuMfQNRss06E8uWNrk,991 +transformers/models/olmoe/__pycache__/__init__.cpython-310.pyc,, +transformers/models/olmoe/__pycache__/configuration_olmoe.cpython-310.pyc,, +transformers/models/olmoe/__pycache__/convert_olmoe_weights_to_hf.cpython-310.pyc,, +transformers/models/olmoe/__pycache__/modeling_olmoe.cpython-310.pyc,, +transformers/models/olmoe/configuration_olmoe.py,sha256=OqnPtXjK4Kdjzk-HozshemJ8IemwbIgheYCM0BY7mbE,9064 +transformers/models/olmoe/convert_olmoe_weights_to_hf.py,sha256=vD0pnyDVd7oyF7nIqdWDa5MJZQzgUwtH29NaQSnri_g,13029 +transformers/models/olmoe/modeling_olmoe.py,sha256=kig3Pc3N4nxfD8rTNNNo5vbf1yghoRZQe_DxhFI23fU,61210 +transformers/models/omdet_turbo/__init__.py,sha256=XIckpuo9tkT7NB5uTs9wLdpxr9GDedQPVJL2P8XU-7Q,1045 +transformers/models/omdet_turbo/__pycache__/__init__.cpython-310.pyc,, +transformers/models/omdet_turbo/__pycache__/configuration_omdet_turbo.cpython-310.pyc,, +transformers/models/omdet_turbo/__pycache__/convert_omdet_turbo_to_hf.cpython-310.pyc,, +transformers/models/omdet_turbo/__pycache__/modeling_omdet_turbo.cpython-310.pyc,, +transformers/models/omdet_turbo/__pycache__/processing_omdet_turbo.cpython-310.pyc,, +transformers/models/omdet_turbo/configuration_omdet_turbo.py,sha256=pwzcgMM_6bEv7s0zH3-w1QvoAQQfcck7IevKDhEQiCg,14479 +transformers/models/omdet_turbo/convert_omdet_turbo_to_hf.py,sha256=yedQ9B-dADbQo_ZFXLb9nhAGC4SuW6424wn_rqG2wWk,17553 +transformers/models/omdet_turbo/modeling_omdet_turbo.py,sha256=RNwxIqSHHyD8mXRWwCJu8JV9mS8IBM2lzN3Vz4Q-Wn8,81117 +transformers/models/omdet_turbo/processing_omdet_turbo.py,sha256=4HqmlymcQiZga3WvZrCgQwewMwteVqvKXYzpjBvpDjE,15501 +transformers/models/oneformer/__init__.py,sha256=w9mGWZlVRSSC_IVWwcXxJudlvc_XvCffD1_yupoIDRY,1085 +transformers/models/oneformer/__pycache__/__init__.cpython-310.pyc,, +transformers/models/oneformer/__pycache__/configuration_oneformer.cpython-310.pyc,, +transformers/models/oneformer/__pycache__/convert_to_hf_oneformer.cpython-310.pyc,, +transformers/models/oneformer/__pycache__/image_processing_oneformer.cpython-310.pyc,, +transformers/models/oneformer/__pycache__/modeling_oneformer.cpython-310.pyc,, +transformers/models/oneformer/__pycache__/processing_oneformer.cpython-310.pyc,, +transformers/models/oneformer/configuration_oneformer.py,sha256=lx36Li_mBnawtbdvKUdHrF5qj0-2nAijgTF-SEadvDs,13468 +transformers/models/oneformer/convert_to_hf_oneformer.py,sha256=yBWS0SE1sGS9UqCzX2EdbhAiIWvBCumSBwutJ8VQFF4,50691 +transformers/models/oneformer/image_processing_oneformer.py,sha256=v_pWc4wZ5_VKTrPgiYxYqbsdKyxo9moI3RUUo3yWZ5I,61266 +transformers/models/oneformer/modeling_oneformer.py,sha256=AWgXS43DqO5Hbem-77J5VRvP_6vdnVhaxr8N5y-wOmQ,143688 +transformers/models/oneformer/processing_oneformer.py,sha256=MmkQH3xINYzVzebgiT0qH5iIq6hHPsMPxYugszkI9aY,9412 +transformers/models/openai/__init__.py,sha256=q0fAl8ajoJyknHe5A3ZHuHH3zww8xdupt_j49lIaObY,1114 +transformers/models/openai/__pycache__/__init__.cpython-310.pyc,, +transformers/models/openai/__pycache__/configuration_openai.cpython-310.pyc,, +transformers/models/openai/__pycache__/convert_openai_original_tf_checkpoint_to_pytorch.cpython-310.pyc,, +transformers/models/openai/__pycache__/modeling_openai.cpython-310.pyc,, +transformers/models/openai/__pycache__/modeling_tf_openai.cpython-310.pyc,, +transformers/models/openai/__pycache__/tokenization_openai.cpython-310.pyc,, +transformers/models/openai/__pycache__/tokenization_openai_fast.cpython-310.pyc,, +transformers/models/openai/configuration_openai.py,sha256=ERFfcrsaGEuG-8WnuBDfYyHR7uc5ihEr9JfItBMGZm0,7109 +transformers/models/openai/convert_openai_original_tf_checkpoint_to_pytorch.py,sha256=WVyibwB1gaKbNqJubia_mbh-N8Qy4a77W7XAroTw0yA,2665 +transformers/models/openai/modeling_openai.py,sha256=NnF_8pYO7Ct7t7b8BYU38MV6bL4u3Ou2YH1mmNC0-OU,38574 +transformers/models/openai/modeling_tf_openai.py,sha256=sk4UnlcxinJQKk_LP_bugbUZMs0kBNvwiFnOA22olew,41267 +transformers/models/openai/tokenization_openai.py,sha256=KQ699NFTu1N7nB5ugH5sKTYSrWuOESVCJMubWRWS6aU,15187 +transformers/models/openai/tokenization_openai_fast.py,sha256=M3hYvAYNCF-qRUg23AhU5AkvKZIxWcNLpF-6dzJJLaw,2560 +transformers/models/opt/__init__.py,sha256=Xk3Z-OdrOC4Y5J0KOEIB74Pp4PsfAllBI503NT7yFk8,1059 +transformers/models/opt/__pycache__/__init__.cpython-310.pyc,, +transformers/models/opt/__pycache__/configuration_opt.cpython-310.pyc,, +transformers/models/opt/__pycache__/convert_opt_original_pytorch_checkpoint_to_pytorch.cpython-310.pyc,, +transformers/models/opt/__pycache__/modeling_flax_opt.cpython-310.pyc,, +transformers/models/opt/__pycache__/modeling_opt.cpython-310.pyc,, +transformers/models/opt/__pycache__/modeling_tf_opt.cpython-310.pyc,, +transformers/models/opt/configuration_opt.py,sha256=bgH5bXI8nuYRBPOP93zqxqaV5T6mmEw_AP7uDq7Bt-k,6686 +transformers/models/opt/convert_opt_original_pytorch_checkpoint_to_pytorch.py,sha256=NIAQgoFpeWZyZCQ6zN4EylZdojsIQm2wtvOIuMZKj64,3857 +transformers/models/opt/modeling_flax_opt.py,sha256=MvU2uukG3UpYAZ_UkXcYAPEFT1f-Gt1Ir9TBzjS2Wuc,31617 +transformers/models/opt/modeling_opt.py,sha256=2yOSNGaZtC3t3j0CCcsnuoKjgKud0GByRFYw84YKcrg,69860 +transformers/models/opt/modeling_tf_opt.py,sha256=4XcpnLOnfNMOvHcviavTPp1B7PSdXYqMbN-IltFFpsU,49623 +transformers/models/owlv2/__init__.py,sha256=vCDn8zY6eLkh1fT2R0YnXKC9C7xe5Q0UHe5cvce3cxs,1069 +transformers/models/owlv2/__pycache__/__init__.cpython-310.pyc,, +transformers/models/owlv2/__pycache__/configuration_owlv2.cpython-310.pyc,, +transformers/models/owlv2/__pycache__/convert_owlv2_to_hf.cpython-310.pyc,, +transformers/models/owlv2/__pycache__/image_processing_owlv2.cpython-310.pyc,, +transformers/models/owlv2/__pycache__/modeling_owlv2.cpython-310.pyc,, +transformers/models/owlv2/__pycache__/processing_owlv2.cpython-310.pyc,, +transformers/models/owlv2/configuration_owlv2.py,sha256=cf5_-eT4QouWjnJ1NsLVGEo5_LmEdM1EKnmEO7JB98A,13203 +transformers/models/owlv2/convert_owlv2_to_hf.py,sha256=rF02k9XWTswf4P4ZZ76ekB3be6pRsFJLtbuWaJpyx3Y,22018 +transformers/models/owlv2/image_processing_owlv2.py,sha256=dg8LS6AC5KRRjBLF23ODhRzGO0aj-w2sYKrBT4SbNNY,26894 +transformers/models/owlv2/modeling_owlv2.py,sha256=g39mNkK6X2-bKy7-sDMkd8d7e2qEr3Oqi4nmDj1_sKg,86060 +transformers/models/owlv2/processing_owlv2.py,sha256=iOE_uFd5P6DuI5voz8TIOAZC8HMg8FmHkDfm78hWNKQ,10077 +transformers/models/owlvit/__init__.py,sha256=rN_V6yzWDuBHgrDtr_qAOn2X1ek-lCE3QsamyEFmAVg,1118 +transformers/models/owlvit/__pycache__/__init__.cpython-310.pyc,, +transformers/models/owlvit/__pycache__/configuration_owlvit.cpython-310.pyc,, +transformers/models/owlvit/__pycache__/convert_owlvit_original_flax_to_hf.cpython-310.pyc,, +transformers/models/owlvit/__pycache__/feature_extraction_owlvit.cpython-310.pyc,, +transformers/models/owlvit/__pycache__/image_processing_owlvit.cpython-310.pyc,, +transformers/models/owlvit/__pycache__/modeling_owlvit.cpython-310.pyc,, +transformers/models/owlvit/__pycache__/processing_owlvit.cpython-310.pyc,, +transformers/models/owlvit/configuration_owlvit.py,sha256=GOX1i1CekVQYYpqStS3qfQ3uhtWW8mkQmi9E_6KkHwY,14414 +transformers/models/owlvit/convert_owlvit_original_flax_to_hf.py,sha256=tofzNZcVROwfYoV7pV6u50Am3TFm-XmuJEAGwNvRT9o,13988 +transformers/models/owlvit/feature_extraction_owlvit.py,sha256=1e7IvryNbsbYMKeAj257kEmW3xuo755gQb7VaPvtWLc,1225 +transformers/models/owlvit/image_processing_owlvit.py,sha256=5-Z7YYhaePRtnzhrZugQaBkdtfQIF3tfgZwino50yLw,28460 +transformers/models/owlvit/modeling_owlvit.py,sha256=pb_8pIN-VKEM9OHj8rnckywPFyGQxOeAToJx2E6OJPE,81498 +transformers/models/owlvit/processing_owlvit.py,sha256=5zUkV1qiw_lpmHigH7wcycKQFOWY3z6MUC1U7B5N8qg,11074 +transformers/models/paligemma/__init__.py,sha256=nKnTTLC8XYlI7uYfS8h-D4vz3gFhknkNeDlZIwZlZ9w,1039 +transformers/models/paligemma/__pycache__/__init__.cpython-310.pyc,, +transformers/models/paligemma/__pycache__/configuration_paligemma.cpython-310.pyc,, +transformers/models/paligemma/__pycache__/convert_paligemma2_weights_to_hf.cpython-310.pyc,, +transformers/models/paligemma/__pycache__/convert_paligemma_weights_to_hf.cpython-310.pyc,, +transformers/models/paligemma/__pycache__/modeling_paligemma.cpython-310.pyc,, +transformers/models/paligemma/__pycache__/processing_paligemma.cpython-310.pyc,, +transformers/models/paligemma/configuration_paligemma.py,sha256=XTn4I14e_LqZg0Nnjq7OmN5SWAKaB4BpU6_pW5hM25g,6009 +transformers/models/paligemma/convert_paligemma2_weights_to_hf.py,sha256=7Rb2lHT28_5bMogGkBp_uhtkgMG7spu9vjv3khnQiZo,20844 +transformers/models/paligemma/convert_paligemma_weights_to_hf.py,sha256=lRp8Fi7CwaeuySEoRWlcdCAt2QgPJ5cIjoBbm8mUbbk,16896 +transformers/models/paligemma/modeling_paligemma.py,sha256=kSVtgDCmmm0vXEAlf284_pfYNU9lgXVw4igykRWk8JU,31304 +transformers/models/paligemma/processing_paligemma.py,sha256=Gz0sPa1Bw4w3P0qgz4WDTd2dl28EKPcn726MztqqQ5Y,16101 +transformers/models/patchtsmixer/__init__.py,sha256=deFjF_Tu67XcAcNHaq1PXO77N4kVW9wG80SnXBaeagE,1005 +transformers/models/patchtsmixer/__pycache__/__init__.cpython-310.pyc,, +transformers/models/patchtsmixer/__pycache__/configuration_patchtsmixer.cpython-310.pyc,, +transformers/models/patchtsmixer/__pycache__/modeling_patchtsmixer.cpython-310.pyc,, +transformers/models/patchtsmixer/configuration_patchtsmixer.py,sha256=wLXHLBxIFjsOIXHOzSmWxhOgRwdxr7gcu-ZKEiTDJgg,12566 +transformers/models/patchtsmixer/modeling_patchtsmixer.py,sha256=tFdqSR1Fw_2zFLAWnqJe3tjPSakOt9gM-hKMqmJ2VkM,87918 +transformers/models/patchtst/__init__.py,sha256=lrpuBvP25Yq6HZOCyS4yWVYZ47qWzK--rqC0AOIGGPE,997 +transformers/models/patchtst/__pycache__/__init__.cpython-310.pyc,, +transformers/models/patchtst/__pycache__/configuration_patchtst.cpython-310.pyc,, +transformers/models/patchtst/__pycache__/modeling_patchtst.cpython-310.pyc,, +transformers/models/patchtst/configuration_patchtst.py,sha256=F7VEYrtDyw-GEOyDcdVyupuUrVO-4p4PBWT_0kK_7VM,12315 +transformers/models/patchtst/modeling_patchtst.py,sha256=dnM_I2pFuqxU1kx0BnNO374_Z0aQlOWekvKo0v0JbH4,91914 +transformers/models/pegasus/__init__.py,sha256=4b7vCYJfIWUPuKrbcBGTG7LtobUdZ5ZjeQhloScTrXs,1160 +transformers/models/pegasus/__pycache__/__init__.cpython-310.pyc,, +transformers/models/pegasus/__pycache__/configuration_pegasus.cpython-310.pyc,, +transformers/models/pegasus/__pycache__/convert_pegasus_tf_to_pytorch.cpython-310.pyc,, +transformers/models/pegasus/__pycache__/modeling_flax_pegasus.cpython-310.pyc,, +transformers/models/pegasus/__pycache__/modeling_pegasus.cpython-310.pyc,, +transformers/models/pegasus/__pycache__/modeling_tf_pegasus.cpython-310.pyc,, +transformers/models/pegasus/__pycache__/tokenization_pegasus.cpython-310.pyc,, +transformers/models/pegasus/__pycache__/tokenization_pegasus_fast.cpython-310.pyc,, +transformers/models/pegasus/configuration_pegasus.py,sha256=nMfDkbgXkv7lEFWlLrijcYXFgGqKF-wP0_N8UHa_Bt8,7501 +transformers/models/pegasus/convert_pegasus_tf_to_pytorch.py,sha256=9geJowNAukZc9FE2OEq0pXQi6ynw9k-2NFtlmISxpUg,5359 +transformers/models/pegasus/modeling_flax_pegasus.py,sha256=_YU7jwBfcjS9ylcIwLNGHP3FGxJ2LZmh0nMjALuicBc,66074 +transformers/models/pegasus/modeling_pegasus.py,sha256=kIWm71eTzVzzD8PQDh1JZmIIV6-hWvKD2yTaQCqtkD0,78218 +transformers/models/pegasus/modeling_tf_pegasus.py,sha256=4BBU7FpCKFmfqOO9xLWRtc-kaEWilCyMezp9Hjoj-9Y,74296 +transformers/models/pegasus/tokenization_pegasus.py,sha256=cYiIdbzE_ddjIx9xrfOES1f4MZMopx3svdE82qyU3cY,13158 +transformers/models/pegasus/tokenization_pegasus_fast.py,sha256=dU1D5wObpz9wPIALiZouM5OzbISR8jEg8tZEPFmGgZg,9977 +transformers/models/pegasus_x/__init__.py,sha256=qSLaqKRA1upZOobapHW5MjSZvIEzf-ij-ZmY1VGzqaE,999 +transformers/models/pegasus_x/__pycache__/__init__.cpython-310.pyc,, +transformers/models/pegasus_x/__pycache__/configuration_pegasus_x.cpython-310.pyc,, +transformers/models/pegasus_x/__pycache__/modeling_pegasus_x.cpython-310.pyc,, +transformers/models/pegasus_x/configuration_pegasus_x.py,sha256=d0by30PpS5eiLt9Pccsy_HIqZRohZQ1MjJCTScHqRk4,8116 +transformers/models/pegasus_x/modeling_pegasus_x.py,sha256=3MteMW85xo2n3n_o38ianiXqqGxN4AxzWGZW2Ly3PKs,75619 +transformers/models/perceiver/__init__.py,sha256=C8S_9aD_JZCcDqv5lZhUw3I45vr09RYiZWlAmo83688,1135 +transformers/models/perceiver/__pycache__/__init__.cpython-310.pyc,, +transformers/models/perceiver/__pycache__/configuration_perceiver.cpython-310.pyc,, +transformers/models/perceiver/__pycache__/convert_perceiver_haiku_to_pytorch.cpython-310.pyc,, +transformers/models/perceiver/__pycache__/feature_extraction_perceiver.cpython-310.pyc,, +transformers/models/perceiver/__pycache__/image_processing_perceiver.cpython-310.pyc,, +transformers/models/perceiver/__pycache__/modeling_perceiver.cpython-310.pyc,, +transformers/models/perceiver/__pycache__/tokenization_perceiver.cpython-310.pyc,, +transformers/models/perceiver/configuration_perceiver.py,sha256=dOaFFVps56ciQfRpHWV8UYTcJfI4Yc39cZF8FyknLkU,12209 +transformers/models/perceiver/convert_perceiver_haiku_to_pytorch.py,sha256=0Hlf6d87i4-1FfEIhYfHb_tOGX0UNp8PnHjrF_8cE4o,21385 +transformers/models/perceiver/feature_extraction_perceiver.py,sha256=tDYvohyPm9KiWTT8TbiKuPqAX4vowyjp3_hX7zWsKRk,1249 +transformers/models/perceiver/image_processing_perceiver.py,sha256=jtV6dnqRj8oXW0XDC6IBe8XOPr3fW5J5OrNfxiJjtdc,17488 +transformers/models/perceiver/modeling_perceiver.py,sha256=XvajaLalUL4NEO0Ulj3AgckbHbx8RxbsYddVt9MeOog,149298 +transformers/models/perceiver/tokenization_perceiver.py,sha256=Go8KZHZ3zl2hHOg8NvuENG_QV402rlbNJv5yeivdvnE,8053 +transformers/models/persimmon/__init__.py,sha256=T1WqyE78N2TO74u9a9QdRIGaMowYqP6vWv8KhPojkLg,999 +transformers/models/persimmon/__pycache__/__init__.cpython-310.pyc,, +transformers/models/persimmon/__pycache__/configuration_persimmon.cpython-310.pyc,, +transformers/models/persimmon/__pycache__/convert_persimmon_weights_to_hf.cpython-310.pyc,, +transformers/models/persimmon/__pycache__/modeling_persimmon.cpython-310.pyc,, +transformers/models/persimmon/configuration_persimmon.py,sha256=TPogwaoT3PYEHds8wR_G-GIZWPPKKj84g-ykK3NljBg,9149 +transformers/models/persimmon/convert_persimmon_weights_to_hf.py,sha256=F3NFcbCWD-UxFwgp2h-Nv78_M0p0LELPq4re30ZNIjU,4644 +transformers/models/persimmon/modeling_persimmon.py,sha256=JYMKZxx5g7EB4rhlI2i_xeWCjnHaQXF9HDsTkhfnNqI,51735 +transformers/models/phi/__init__.py,sha256=qMWyJRn1PnnyX647VO4xrJbR7hlTiwvtEkyQVDEKHxw,1807 +transformers/models/phi/__pycache__/__init__.cpython-310.pyc,, +transformers/models/phi/__pycache__/configuration_phi.cpython-310.pyc,, +transformers/models/phi/__pycache__/convert_phi_weights_to_hf.cpython-310.pyc,, +transformers/models/phi/__pycache__/modeling_phi.cpython-310.pyc,, +transformers/models/phi/__pycache__/modular_phi.cpython-310.pyc,, +transformers/models/phi/configuration_phi.py,sha256=HdVVz0WMZD_zYgffGZimHreVvcdSCgmVEvUMf5Nbg_g,10537 +transformers/models/phi/convert_phi_weights_to_hf.py,sha256=XrjgtZm6GZQx01rZ0q52g6e4ajyZhl8n02QNchAD6BQ,7685 +transformers/models/phi/modeling_phi.py,sha256=c3wBOWetN8Vc1xJ6rrNYpiNRqdTzU5QQSXtjTYoeJLo,46713 +transformers/models/phi/modular_phi.py,sha256=Ua5rF9SP1JCFG8qgnR5PEZoq4NGIR1m832uGvUskXgY,12119 +transformers/models/phi3/__init__.py,sha256=dxyO-jIh0yB6t2Dzs173aRrEnTceVMIYIkg6JxIeyWs,989 +transformers/models/phi3/__pycache__/__init__.cpython-310.pyc,, +transformers/models/phi3/__pycache__/configuration_phi3.cpython-310.pyc,, +transformers/models/phi3/__pycache__/modeling_phi3.cpython-310.pyc,, +transformers/models/phi3/__pycache__/modular_phi3.cpython-310.pyc,, +transformers/models/phi3/configuration_phi3.py,sha256=36rax_9CLbw8daxaH6lpwgqhkygnZuSpBGAyuaZ0X_s,10636 +transformers/models/phi3/modeling_phi3.py,sha256=ThiFrOtzRZ5fEv3H3DBtzrUuX-Zf3SeDLyN7FNZkyNw,53948 +transformers/models/phi3/modular_phi3.py,sha256=FrdGWJFO7azSObDQ5ufesk7mQLY2GWWyM-ik3VhtqxU,13983 +transformers/models/phimoe/__init__.py,sha256=wGasPysu0EH_q0QGaZmXqQL57GxfZn8NTsvB2I6U2ro,1013 +transformers/models/phimoe/__pycache__/__init__.cpython-310.pyc,, +transformers/models/phimoe/__pycache__/configuration_phimoe.cpython-310.pyc,, +transformers/models/phimoe/__pycache__/modeling_phimoe.cpython-310.pyc,, +transformers/models/phimoe/configuration_phimoe.py,sha256=ChXe5y5wBQvk4nbo9m4cqLp_n8xlhzpB6ZG6ru71cN0,10273 +transformers/models/phimoe/modeling_phimoe.py,sha256=7_9UzaEkeZ93K-R3estb8DyYoyfOaZwUcfsmYzoPbMU,73633 +transformers/models/phobert/__init__.py,sha256=mau-2HIOzSk8qGIhxivVBPPYTx3hhdgoKPtnptDF38M,958 +transformers/models/phobert/__pycache__/__init__.cpython-310.pyc,, +transformers/models/phobert/__pycache__/tokenization_phobert.cpython-310.pyc,, +transformers/models/phobert/tokenization_phobert.py,sha256=MvkVnqP_ZVu7qiN88MUxwc948LJJ0gCvDjgyWjIwN80,13124 +transformers/models/pix2struct/__init__.py,sha256=ivncogrVjZZ6ag6FYHJ0XqyCMJYbsCYlh5boqxe09Yo,1089 +transformers/models/pix2struct/__pycache__/__init__.cpython-310.pyc,, +transformers/models/pix2struct/__pycache__/configuration_pix2struct.cpython-310.pyc,, +transformers/models/pix2struct/__pycache__/convert_pix2struct_original_pytorch_to_hf.cpython-310.pyc,, +transformers/models/pix2struct/__pycache__/image_processing_pix2struct.cpython-310.pyc,, +transformers/models/pix2struct/__pycache__/modeling_pix2struct.cpython-310.pyc,, +transformers/models/pix2struct/__pycache__/processing_pix2struct.cpython-310.pyc,, +transformers/models/pix2struct/configuration_pix2struct.py,sha256=fqBGk2kwErmFVBkcbM0_5shTg3VUcQynKoAqPw_Kp3U,15803 +transformers/models/pix2struct/convert_pix2struct_original_pytorch_to_hf.py,sha256=m_S-9oxyN4PQafRbWQIP-G0NUDrTqxOmr8IwiHNCOuU,5886 +transformers/models/pix2struct/image_processing_pix2struct.py,sha256=nibo9FTJil2WHIyo9uaAsK5J2k6XcEYPWDPHqHBUX-o,19769 +transformers/models/pix2struct/modeling_pix2struct.py,sha256=Wc0oTTwxx62K78XrbnxeK0ZHPIT3ONzcHPt9GUTcO0M,88644 +transformers/models/pix2struct/processing_pix2struct.py,sha256=yyNOHBL_8-ljZUcoEoulZls4pYQ_sksThesjTKvizcA,6912 +transformers/models/pixtral/__init__.py,sha256=WKCxuWpCeTYsYSaTH1XnUcGkIHEx5BIIXwwwqG_E83s,1126 +transformers/models/pixtral/__pycache__/__init__.cpython-310.pyc,, +transformers/models/pixtral/__pycache__/configuration_pixtral.cpython-310.pyc,, +transformers/models/pixtral/__pycache__/convert_pixtral_weights_to_hf.cpython-310.pyc,, +transformers/models/pixtral/__pycache__/image_processing_pixtral.cpython-310.pyc,, +transformers/models/pixtral/__pycache__/image_processing_pixtral_fast.cpython-310.pyc,, +transformers/models/pixtral/__pycache__/modeling_pixtral.cpython-310.pyc,, +transformers/models/pixtral/__pycache__/processing_pixtral.cpython-310.pyc,, +transformers/models/pixtral/configuration_pixtral.py,sha256=86cY74VW7J8XqU1JbvpxLqOXnnzoPh7I_9zja8j3Wng,4237 +transformers/models/pixtral/convert_pixtral_weights_to_hf.py,sha256=erIMU9rmoqSQ9nwwI6htr8CYMj3W5T-Iqos3RqVIcTk,13016 +transformers/models/pixtral/image_processing_pixtral.py,sha256=qStqiyE1y6eRZyccLiexx7U3Febp-DevTx_lurQHfVY,23906 +transformers/models/pixtral/image_processing_pixtral_fast.py,sha256=IiELMq0O0riEGl75kOE0NL8rokt8p2cPygW4fkJegEA,17071 +transformers/models/pixtral/modeling_pixtral.py,sha256=1KlYepPE9Wn6MpS5qZRbk4fH9yVWrQy6jkpVjKZPd-0,22096 +transformers/models/pixtral/processing_pixtral.py,sha256=AV2e6JcCLEfXPMB8nDo2b1sAYVBPcglS1rX4AWCsWZA,13982 +transformers/models/plbart/__init__.py,sha256=jmP857QTG7jGfr9n0qK3TB_1-hdVDD1ajtJvP6C7FIw,1032 +transformers/models/plbart/__pycache__/__init__.cpython-310.pyc,, +transformers/models/plbart/__pycache__/configuration_plbart.cpython-310.pyc,, +transformers/models/plbart/__pycache__/convert_plbart_original_checkpoint_to_torch.cpython-310.pyc,, +transformers/models/plbart/__pycache__/modeling_plbart.cpython-310.pyc,, +transformers/models/plbart/__pycache__/tokenization_plbart.cpython-310.pyc,, +transformers/models/plbart/configuration_plbart.py,sha256=dw0_2B0Ij7jaYwqa5kOmn8S8D7CAoNgU7NqGw1B_AU4,8532 +transformers/models/plbart/convert_plbart_original_checkpoint_to_torch.py,sha256=BOXNudNSr1xevmHnvNpa_4ya3Q89m6J4lndQhCWSLB8,3553 +transformers/models/plbart/modeling_plbart.py,sha256=3vUCM64r82v7EP8mtpDH9hVXwQGHi24WSZZAVSvAXkg,82488 +transformers/models/plbart/tokenization_plbart.py,sha256=XIGZiAIM7Z8edC7GgAqk3RVjDapVIYxltIA-Y_RmcKM,18892 +transformers/models/poolformer/__init__.py,sha256=pkSn3nUzqUgBbSmXc7vFD6xYpMlPAuPkhCptxKCdB8s,1097 +transformers/models/poolformer/__pycache__/__init__.cpython-310.pyc,, +transformers/models/poolformer/__pycache__/configuration_poolformer.cpython-310.pyc,, +transformers/models/poolformer/__pycache__/convert_poolformer_original_to_pytorch.cpython-310.pyc,, +transformers/models/poolformer/__pycache__/feature_extraction_poolformer.cpython-310.pyc,, +transformers/models/poolformer/__pycache__/image_processing_poolformer.cpython-310.pyc,, +transformers/models/poolformer/__pycache__/modeling_poolformer.cpython-310.pyc,, +transformers/models/poolformer/configuration_poolformer.py,sha256=08vAc_wIJXHt-lO09RK6rezPGn_i4i5WcM-cnVm_0mA,5632 +transformers/models/poolformer/convert_poolformer_original_to_pytorch.py,sha256=Vvlp7ju7kr2sg1NdXKma6vYGABjs4sVhPKhgFKPJRpk,7947 +transformers/models/poolformer/feature_extraction_poolformer.py,sha256=wj7ZMJPbN-YgxbMqOaIu61VL66QdjxDPuzdi38SVLnY,1257 +transformers/models/poolformer/image_processing_poolformer.py,sha256=wSvnkMwYTBuBsmbVp96lXfX5Boh1VQuVlEpqpfUfJ90,17850 +transformers/models/poolformer/modeling_poolformer.py,sha256=j-bJCPk7paqTHPp-D2aHMQGhkqvAkRfPlDiUJa-ES18,17877 +transformers/models/pop2piano/__init__.py,sha256=I2PPcFi-p0X5py7dLqobymv3E9g-mUv1QRn0luyPlIk,999 +transformers/models/pop2piano/__pycache__/__init__.cpython-310.pyc,, +transformers/models/pop2piano/__pycache__/configuration_pop2piano.cpython-310.pyc,, +transformers/models/pop2piano/__pycache__/convert_pop2piano_weights_to_hf.cpython-310.pyc,, +transformers/models/pop2piano/__pycache__/feature_extraction_pop2piano.cpython-310.pyc,, +transformers/models/pop2piano/__pycache__/modeling_pop2piano.cpython-310.pyc,, +transformers/models/pop2piano/__pycache__/processing_pop2piano.cpython-310.pyc,, +transformers/models/pop2piano/__pycache__/tokenization_pop2piano.cpython-310.pyc,, +transformers/models/pop2piano/configuration_pop2piano.py,sha256=aAnTDZdBrl19Kg6eOuPs13cz1_9ITlN7IgxysOqDGT4,5959 +transformers/models/pop2piano/convert_pop2piano_weights_to_hf.py,sha256=5B4ARCF8lCWeW2fsgNe0lgy5nmYvLmNPQFyg5O3kj-A,8624 +transformers/models/pop2piano/feature_extraction_pop2piano.py,sha256=eHyA7sjP-tkY4QXHTGYl83RCnCEoNgcQ9Tf6rSd9Pb8,19838 +transformers/models/pop2piano/modeling_pop2piano.py,sha256=lrMqxkqgz2HO2Y2mhLtFNCKjJ1smKwvTMoJhYKpYiZo,72087 +transformers/models/pop2piano/processing_pop2piano.py,sha256=QmobKR_Z3Ro_6t12TXMaileqUH1lAjGVY6n2wOevzwY,5524 +transformers/models/pop2piano/tokenization_pop2piano.py,sha256=Y3grUs2_4YvgUDxDAhe4hBBJe0RyAZq_ofx11jw1M5A,32677 +transformers/models/prophetnet/__init__.py,sha256=TYI21JDlj449kTgKAOtUBpuxVv5L_I70CDjofSZ627M,1044 +transformers/models/prophetnet/__pycache__/__init__.cpython-310.pyc,, +transformers/models/prophetnet/__pycache__/configuration_prophetnet.cpython-310.pyc,, +transformers/models/prophetnet/__pycache__/convert_prophetnet_original_pytorch_checkpoint_to_pytorch.cpython-310.pyc,, +transformers/models/prophetnet/__pycache__/modeling_prophetnet.cpython-310.pyc,, +transformers/models/prophetnet/__pycache__/tokenization_prophetnet.cpython-310.pyc,, +transformers/models/prophetnet/configuration_prophetnet.py,sha256=amGaPOOT0kJjxVPVJ2oyZN_XdC0LQ1LZVuJMheTCyF4,8903 +transformers/models/prophetnet/convert_prophetnet_original_pytorch_checkpoint_to_pytorch.py,sha256=NsV4OQ2M5Qmg-RzBOhW588PP2zMJADXwcE1vFIA9FPE,7054 +transformers/models/prophetnet/modeling_prophetnet.py,sha256=_t8iOZeV0EsRd8kgU0pF9RrYUervyf517hQEZ2sP2mM,114690 +transformers/models/prophetnet/tokenization_prophetnet.py,sha256=l3og-JwYmHAKWtv2yWv9X3lz1Y13DsmxTdRo4xCSb_Y,21236 +transformers/models/pvt/__init__.py,sha256=wxkffT1tVLlQ14D466ickBR_-mjAZaV0vRLDkwKWBmE,1027 +transformers/models/pvt/__pycache__/__init__.cpython-310.pyc,, +transformers/models/pvt/__pycache__/configuration_pvt.cpython-310.pyc,, +transformers/models/pvt/__pycache__/convert_pvt_to_pytorch.cpython-310.pyc,, +transformers/models/pvt/__pycache__/image_processing_pvt.cpython-310.pyc,, +transformers/models/pvt/__pycache__/modeling_pvt.cpython-310.pyc,, +transformers/models/pvt/configuration_pvt.py,sha256=H0cHrBRM-Ex9XbxEE3oHBRYDd1iMP58OpBKac7NOv6E,6962 +transformers/models/pvt/convert_pvt_to_pytorch.py,sha256=BLoYbECmvvKnWQQqMjM3zlm8lMjYc6L8xrcYwionges,9737 +transformers/models/pvt/image_processing_pvt.py,sha256=LeZI5PBVq_2_y-SrMPT-qoYVpbTzr2qwLlvdv2BrNsg,13864 +transformers/models/pvt/modeling_pvt.py,sha256=w6w5Ty-6ikJjvcXBHHu0ONNJGN_-o55irN-WQLmMyFc,28491 +transformers/models/pvt_v2/__init__.py,sha256=LkmqeLd7cZGKTFX_2d9_jU0sj_bDlML042kr_vMJTLw,993 +transformers/models/pvt_v2/__pycache__/__init__.cpython-310.pyc,, +transformers/models/pvt_v2/__pycache__/configuration_pvt_v2.cpython-310.pyc,, +transformers/models/pvt_v2/__pycache__/convert_pvt_v2_to_pytorch.cpython-310.pyc,, +transformers/models/pvt_v2/__pycache__/modeling_pvt_v2.cpython-310.pyc,, +transformers/models/pvt_v2/configuration_pvt_v2.py,sha256=3UNQlRykqWBvv1gmg_t4EFFw8YkQyHKeRxWon0dvLxc,7991 +transformers/models/pvt_v2/convert_pvt_v2_to_pytorch.py,sha256=OqYTYB1bssEh4C-AwCFG0VDDcEWZa1Su5kUkrn_UcOo,12077 +transformers/models/pvt_v2/modeling_pvt_v2.py,sha256=6QVe1Pi2BFVyFrdl7GQrseSYD-MpGqU_LGYHKW0io7w,29516 +transformers/models/qwen2/__init__.py,sha256=qoTTnT8A-pEg5kXdtnX0NgkIszex-35xul2PvJ3ab48,2434 +transformers/models/qwen2/__pycache__/__init__.cpython-310.pyc,, +transformers/models/qwen2/__pycache__/configuration_qwen2.cpython-310.pyc,, +transformers/models/qwen2/__pycache__/modeling_qwen2.cpython-310.pyc,, +transformers/models/qwen2/__pycache__/modular_qwen2.cpython-310.pyc,, +transformers/models/qwen2/__pycache__/tokenization_qwen2.cpython-310.pyc,, +transformers/models/qwen2/__pycache__/tokenization_qwen2_fast.cpython-310.pyc,, +transformers/models/qwen2/configuration_qwen2.py,sha256=skdfTz4sLCb1z5BYiXoiTK0-fPoR5xq6QgF0fdNsKxc,10517 +transformers/models/qwen2/modeling_qwen2.py,sha256=AewCRtIJCS_XlQcnCl1Y0lOSIx6Los_YZhQzMnuA6Jc,50822 +transformers/models/qwen2/modular_qwen2.py,sha256=o33e_M4VQ-WLSowSZqQwducKplA9sVFdYMp1fN-D9sw,5269 +transformers/models/qwen2/tokenization_qwen2.py,sha256=y9hRJ6oYYRa_4UyoQUPU_BlsrnTPKoEByiCQ3zelSmE,13913 +transformers/models/qwen2/tokenization_qwen2_fast.py,sha256=NL0QjEs36hiJUo0yu6X3-kp74LAjioKyoJeqnxhdsY8,5182 +transformers/models/qwen2_audio/__init__.py,sha256=KaUmP3FK3GdeWvbunzyp1QjBki0USS4E80NlvhaJ3D8,1045 +transformers/models/qwen2_audio/__pycache__/__init__.cpython-310.pyc,, +transformers/models/qwen2_audio/__pycache__/configuration_qwen2_audio.cpython-310.pyc,, +transformers/models/qwen2_audio/__pycache__/modeling_qwen2_audio.cpython-310.pyc,, +transformers/models/qwen2_audio/__pycache__/processing_qwen2_audio.cpython-310.pyc,, +transformers/models/qwen2_audio/configuration_qwen2_audio.py,sha256=XIpu1XkgfrpKbbou17XQ8Rxa0KoHBIKJGk5CGY2SaGE,8649 +transformers/models/qwen2_audio/modeling_qwen2_audio.py,sha256=uSbLKs_aJCw-FzIi7592TJMH4G5z3iflqBLN8Xz0LRw,69918 +transformers/models/qwen2_audio/processing_qwen2_audio.py,sha256=bHnuweFNMDR1u3_fnwqi89b9nmN6b0VExdZpcU2bf_I,11959 +transformers/models/qwen2_moe/__init__.py,sha256=TZM20WtUr1UyV-hDDgq5B-qFT4aUulMpjWwSUNdUs2w,999 +transformers/models/qwen2_moe/__pycache__/__init__.cpython-310.pyc,, +transformers/models/qwen2_moe/__pycache__/configuration_qwen2_moe.cpython-310.pyc,, +transformers/models/qwen2_moe/__pycache__/modeling_qwen2_moe.cpython-310.pyc,, +transformers/models/qwen2_moe/configuration_qwen2_moe.py,sha256=z6wUlAyAq8V2DIt8P1-p9VQhwnrszzZn_We1-prxMLU,12768 +transformers/models/qwen2_moe/modeling_qwen2_moe.py,sha256=D9NC6WBr_xNzZ3V-C2xR1hzith9kpn7CfoUiWVfe0fM,76610 +transformers/models/qwen2_vl/__init__.py,sha256=WNh5c1me4R5A8wBzbBVRc5scK9TwNPBoBzL0rES0HYw,1081 +transformers/models/qwen2_vl/__pycache__/__init__.cpython-310.pyc,, +transformers/models/qwen2_vl/__pycache__/configuration_qwen2_vl.cpython-310.pyc,, +transformers/models/qwen2_vl/__pycache__/image_processing_qwen2_vl.cpython-310.pyc,, +transformers/models/qwen2_vl/__pycache__/modeling_qwen2_vl.cpython-310.pyc,, +transformers/models/qwen2_vl/__pycache__/processing_qwen2_vl.cpython-310.pyc,, +transformers/models/qwen2_vl/configuration_qwen2_vl.py,sha256=VD7kZGvM6OC2jP1Qkvc_xR_cXe1Ewn9QydEO0NjXt9o,12180 +transformers/models/qwen2_vl/image_processing_qwen2_vl.py,sha256=Nc6JtTUeCH1kavoEzSeMUA582LATmVIDaqqjXPTOsFg,22398 +transformers/models/qwen2_vl/modeling_qwen2_vl.py,sha256=_Z8SJ6C1N6DSu0S75A0X0895keooOf4haYwj550pcUo,87266 +transformers/models/qwen2_vl/processing_qwen2_vl.py,sha256=tZjwLokt0gv5ju1sc59uhXClsiFfXnoSemPu6nVgGBU,9516 +transformers/models/rag/__init__.py,sha256=89sLlT4QJ96h0U-X6FmTdfSNJ8NjDjTpqyI1yK0L1Cw,1091 +transformers/models/rag/__pycache__/__init__.cpython-310.pyc,, +transformers/models/rag/__pycache__/configuration_rag.cpython-310.pyc,, +transformers/models/rag/__pycache__/modeling_rag.cpython-310.pyc,, +transformers/models/rag/__pycache__/modeling_tf_rag.cpython-310.pyc,, +transformers/models/rag/__pycache__/retrieval_rag.cpython-310.pyc,, +transformers/models/rag/__pycache__/tokenization_rag.cpython-310.pyc,, +transformers/models/rag/configuration_rag.py,sha256=i6XOIi_KQ2Lft2GzrrlQQHnn8LJhhINqhEzMC5SgLPw,8513 +transformers/models/rag/modeling_rag.py,sha256=W_0--XKi9KZVqzd58gEw62HmQnRId4zd7AtbSn_6Zjg,86376 +transformers/models/rag/modeling_tf_rag.py,sha256=hwmDt8BCPOeWjh2lVfmhwcxONd0f8WgVH4TDQED7Y8I,88917 +transformers/models/rag/retrieval_rag.py,sha256=Jmz6sr0QI-ailZ7CYQguZjTTK18kQxPGbDDxQckGZe4,29951 +transformers/models/rag/tokenization_rag.py,sha256=3ZOzFZ1PV9sxaBkTt8F0tg5IFim-vwoyqH3ekeuHqIk,4606 +transformers/models/recurrent_gemma/__init__.py,sha256=i86Cydx-eAdwsVMjNc0yG9hGxe_amyfAdvF5Eg-UCGM,1011 +transformers/models/recurrent_gemma/__pycache__/__init__.cpython-310.pyc,, +transformers/models/recurrent_gemma/__pycache__/configuration_recurrent_gemma.cpython-310.pyc,, +transformers/models/recurrent_gemma/__pycache__/convert_recurrent_gemma_to_hf.cpython-310.pyc,, +transformers/models/recurrent_gemma/__pycache__/modeling_recurrent_gemma.cpython-310.pyc,, +transformers/models/recurrent_gemma/configuration_recurrent_gemma.py,sha256=ZsGiKPvFxUwJzd5xVUxJ2OkfapLGdNKJMbFEJFMcX9U,7750 +transformers/models/recurrent_gemma/convert_recurrent_gemma_to_hf.py,sha256=jZGkZ2FmNFWsZXz37gf86NjLRFbgLTK6C-ZO6-JChks,7965 +transformers/models/recurrent_gemma/modeling_recurrent_gemma.py,sha256=Jxf9kbDaXvwtb-0FAp9hxt6axq6imkAcqERYR9LFsZ0,41747 +transformers/models/reformer/__init__.py,sha256=zjiMjHIRPssQ8pVa4fQ0zMCCn0ee_mtJt6wc9J23QYQ,1084 +transformers/models/reformer/__pycache__/__init__.cpython-310.pyc,, +transformers/models/reformer/__pycache__/configuration_reformer.cpython-310.pyc,, +transformers/models/reformer/__pycache__/convert_reformer_trax_checkpoint_to_pytorch.cpython-310.pyc,, +transformers/models/reformer/__pycache__/modeling_reformer.cpython-310.pyc,, +transformers/models/reformer/__pycache__/tokenization_reformer.cpython-310.pyc,, +transformers/models/reformer/__pycache__/tokenization_reformer_fast.cpython-310.pyc,, +transformers/models/reformer/configuration_reformer.py,sha256=ewfu4yWtk_TwV6MMzrkAtYcP9nkB-5Wv3Deh442Cb7M,13196 +transformers/models/reformer/convert_reformer_trax_checkpoint_to_pytorch.py,sha256=pwYPRNmvEk0FRpbR53-pOACHGqv3nzexvtlHaABQIrw,7950 +transformers/models/reformer/modeling_reformer.py,sha256=Bcj35PWQKVU3xt7o0hHxNU7Mbag84fLAoA6tALGgvoE,115785 +transformers/models/reformer/tokenization_reformer.py,sha256=BqwwxreW56tOP7QBPUJY_f0yRnQUOv_e1v-ZLLK2keA,6760 +transformers/models/reformer/tokenization_reformer_fast.py,sha256=nfNUC7uZtvyKAQfQql5OpEz9ColY6m6VH-qE4wL56Q8,4283 +transformers/models/regnet/__init__.py,sha256=X_FU3wnZJ5KkCmRi4EyHk6ZUm_f0--YyyTS8lrknS9Y,1071 +transformers/models/regnet/__pycache__/__init__.cpython-310.pyc,, +transformers/models/regnet/__pycache__/configuration_regnet.cpython-310.pyc,, +transformers/models/regnet/__pycache__/convert_regnet_seer_10b_to_pytorch.cpython-310.pyc,, +transformers/models/regnet/__pycache__/convert_regnet_to_pytorch.cpython-310.pyc,, +transformers/models/regnet/__pycache__/modeling_flax_regnet.cpython-310.pyc,, +transformers/models/regnet/__pycache__/modeling_regnet.cpython-310.pyc,, +transformers/models/regnet/__pycache__/modeling_tf_regnet.cpython-310.pyc,, +transformers/models/regnet/configuration_regnet.py,sha256=5_p_leo8Cvb4ZiHJGISKq_rGcTnNaw98LAG0sEBz_Pg,3974 +transformers/models/regnet/convert_regnet_seer_10b_to_pytorch.py,sha256=PH1ePsaNgxZZu9YWob3FTbWZkxnLKoN9bcJCIVK2pYI,11754 +transformers/models/regnet/convert_regnet_to_pytorch.py,sha256=kyfKbY-kwlaj-VsXnMZJZwaqZPbuyR-MU58lFDG9F_Y,18702 +transformers/models/regnet/modeling_flax_regnet.py,sha256=g0LNoW8SlhmHZ34MUOpewyAfycwvSSNXiv7_Ia4pNeY,28507 +transformers/models/regnet/modeling_regnet.py,sha256=laDinWyMxbVrs-kYtFJ9UzAoFr3BBSt4mXqE8xwMuxY,17772 +transformers/models/regnet/modeling_tf_regnet.py,sha256=n-24MtY8UVuuYKLVwkEaRAIv0QFxCpHXI_YD2pY_LK4,24391 +transformers/models/rembert/__init__.py,sha256=Gif9TX1kvmD5iVWqsViSjxKYIDhR3FiBfp_QfA7U7i4,1119 +transformers/models/rembert/__pycache__/__init__.cpython-310.pyc,, +transformers/models/rembert/__pycache__/configuration_rembert.cpython-310.pyc,, +transformers/models/rembert/__pycache__/convert_rembert_tf_checkpoint_to_pytorch.cpython-310.pyc,, +transformers/models/rembert/__pycache__/modeling_rembert.cpython-310.pyc,, +transformers/models/rembert/__pycache__/modeling_tf_rembert.cpython-310.pyc,, +transformers/models/rembert/__pycache__/tokenization_rembert.cpython-310.pyc,, +transformers/models/rembert/__pycache__/tokenization_rembert_fast.cpython-310.pyc,, +transformers/models/rembert/configuration_rembert.py,sha256=VvcwZWc3akBef7VeiDAGMx0inuob_zilhrGsKvl9smA,7291 +transformers/models/rembert/convert_rembert_tf_checkpoint_to_pytorch.py,sha256=_FwapBBnk_xv1QPby_PGnFvIZfGe9vooclAUwU3Ve10,2207 +transformers/models/rembert/modeling_rembert.py,sha256=02Jq7AMeoK3DKX2egbTmE5SQxa4NM9GsrMbGQsM2wx8,67504 +transformers/models/rembert/modeling_tf_rembert.py,sha256=99WKl_IgABQlt1O58QnCnGInogXh0HSzlvTOrmCedzY,77981 +transformers/models/rembert/tokenization_rembert.py,sha256=Y-sstejHDNDw-Cb3kJbvEN_j3g2zaGkzIpx0QntkHII,10625 +transformers/models/rembert/tokenization_rembert_fast.py,sha256=t_YjcmDD5MzFoQ9Wl2aH2IBUUTSRJFcy4EHtZJYZu_Y,10032 +transformers/models/resnet/__init__.py,sha256=NCgMoczDbEI_XDWkWNWKIKGPYeohOC95f0o2X-Vh2vA,1071 +transformers/models/resnet/__pycache__/__init__.cpython-310.pyc,, +transformers/models/resnet/__pycache__/configuration_resnet.cpython-310.pyc,, +transformers/models/resnet/__pycache__/convert_resnet_to_pytorch.cpython-310.pyc,, +transformers/models/resnet/__pycache__/modeling_flax_resnet.cpython-310.pyc,, +transformers/models/resnet/__pycache__/modeling_resnet.cpython-310.pyc,, +transformers/models/resnet/__pycache__/modeling_tf_resnet.cpython-310.pyc,, +transformers/models/resnet/configuration_resnet.py,sha256=K8n9ba6A2OiIqmFAfTJKQUF4q8o9xgq14xdZisEpEqc,6067 +transformers/models/resnet/convert_resnet_to_pytorch.py,sha256=fVar-ifk_-_sENkZiTA93wIaEunPMInlp0mmDdH5KOQ,7286 +transformers/models/resnet/modeling_flax_resnet.py,sha256=RsRBMcXQ7NDoMx3L0ip7Tfu8eNOCwrDI6KUkCpxccsg,24704 +transformers/models/resnet/modeling_resnet.py,sha256=DpNfzcFFTi8r4YHsmzNQGX8ZB0QcUUV8iGwEEFlWBwY,19891 +transformers/models/resnet/modeling_tf_resnet.py,sha256=nSUzzdq6HBE1qhxMvI5tUc3O2_Hyjj2tvNACMQgKPFE,23741 +transformers/models/roberta/__init__.py,sha256=p1qYu_9qpmxsxMfXuoxK-VrmRQMEshwiM8Ekoij2J1M,1160 +transformers/models/roberta/__pycache__/__init__.cpython-310.pyc,, +transformers/models/roberta/__pycache__/configuration_roberta.cpython-310.pyc,, +transformers/models/roberta/__pycache__/convert_roberta_original_pytorch_checkpoint_to_pytorch.cpython-310.pyc,, +transformers/models/roberta/__pycache__/modeling_flax_roberta.cpython-310.pyc,, +transformers/models/roberta/__pycache__/modeling_roberta.cpython-310.pyc,, +transformers/models/roberta/__pycache__/modeling_tf_roberta.cpython-310.pyc,, +transformers/models/roberta/__pycache__/tokenization_roberta.cpython-310.pyc,, +transformers/models/roberta/__pycache__/tokenization_roberta_fast.cpython-310.pyc,, +transformers/models/roberta/configuration_roberta.py,sha256=r1rJghjnlXorwp-ZqR45IAKaZJMMORD_or7GHfk8dgY,7311 +transformers/models/roberta/convert_roberta_original_pytorch_checkpoint_to_pytorch.py,sha256=5sX5PtUseHWXH78xTBXqGsrzn8YttrZsuN0E0H9CWi4,8001 +transformers/models/roberta/modeling_flax_roberta.py,sha256=lPTxEfaLr4zkIq_Jm404y8fQCR3szkxTVgA1QAXBO3w,57270 +transformers/models/roberta/modeling_roberta.py,sha256=h_Ozez5RydUgUtts19t-QQeLmhPoGVLQgIVUfnvvBNE,78033 +transformers/models/roberta/modeling_tf_roberta.py,sha256=4UTC7zcqsFTqHzUQjPL0Mfh8A1-SDEzkIxeB2T3RyCk,80179 +transformers/models/roberta/tokenization_roberta.py,sha256=5wuB8fCNXGXk4svqPNOcXkR9mLA12JvYqr9TlpVECrg,16484 +transformers/models/roberta/tokenization_roberta_fast.py,sha256=Ay29BGVvDcsd91D4yVPhYLNI21-DLWlGweeegTjZ-vU,10991 +transformers/models/roberta_prelayernorm/__init__.py,sha256=QsVJJaoujnLHyCgwSsz53MV88vI183tTGJNXHDCHCAc,1127 +transformers/models/roberta_prelayernorm/__pycache__/__init__.cpython-310.pyc,, +transformers/models/roberta_prelayernorm/__pycache__/configuration_roberta_prelayernorm.cpython-310.pyc,, +transformers/models/roberta_prelayernorm/__pycache__/convert_roberta_prelayernorm_original_pytorch_checkpoint_to_pytorch.cpython-310.pyc,, +transformers/models/roberta_prelayernorm/__pycache__/modeling_flax_roberta_prelayernorm.cpython-310.pyc,, +transformers/models/roberta_prelayernorm/__pycache__/modeling_roberta_prelayernorm.cpython-310.pyc,, +transformers/models/roberta_prelayernorm/__pycache__/modeling_tf_roberta_prelayernorm.cpython-310.pyc,, +transformers/models/roberta_prelayernorm/configuration_roberta_prelayernorm.py,sha256=DM0trohLskvy5OYLcDjpEa5ri-htNy5dgISllI3b0og,7883 +transformers/models/roberta_prelayernorm/convert_roberta_prelayernorm_original_pytorch_checkpoint_to_pytorch.py,sha256=DWhNuDUo_dZsWSjJIPzNqxAY9KzWlAWnpgNxE88qpWQ,2974 +transformers/models/roberta_prelayernorm/modeling_flax_roberta_prelayernorm.py,sha256=HiEkWv_mTjRU6eT9er8jlJmLROt0RMafp8MJKct3C2A,60927 +transformers/models/roberta_prelayernorm/modeling_roberta_prelayernorm.py,sha256=Wbw9glGv8NweQFSO0S1ChvQu3THUGGmWr9qVQYO3RPU,72939 +transformers/models/roberta_prelayernorm/modeling_tf_roberta_prelayernorm.py,sha256=9Ow8t-WqDGGDcUQB2KGyDZPOn1NCouxMmVAUsfL8FCE,83452 +transformers/models/roc_bert/__init__.py,sha256=4CveMGU-dY3nV4E6x-Xpb1jicRniwrPuSOrY8-SHIUI,1038 +transformers/models/roc_bert/__pycache__/__init__.cpython-310.pyc,, +transformers/models/roc_bert/__pycache__/configuration_roc_bert.cpython-310.pyc,, +transformers/models/roc_bert/__pycache__/modeling_roc_bert.cpython-310.pyc,, +transformers/models/roc_bert/__pycache__/tokenization_roc_bert.cpython-310.pyc,, +transformers/models/roc_bert/configuration_roc_bert.py,sha256=XpKYrVUjci1ykruLmUKTUlUc7RjHXq7AW71w2wc6ars,8528 +transformers/models/roc_bert/modeling_roc_bert.py,sha256=MhsAZoJJi5jBft70m9XSg_mvxa7MqLovlmwmncGntQk,93641 +transformers/models/roc_bert/tokenization_roc_bert.py,sha256=5HdBmjNFHR3uJUoiTnU8WJS2zzrgMnmDFshlDeBLa6M,50739 +transformers/models/roformer/__init__.py,sha256=v1CIjowYMq6aN-V9gyl-RWlMi_uQQxopuvEv76geFqk,1166 +transformers/models/roformer/__pycache__/__init__.cpython-310.pyc,, +transformers/models/roformer/__pycache__/configuration_roformer.cpython-310.pyc,, +transformers/models/roformer/__pycache__/convert_roformer_original_tf_checkpoint_to_pytorch.cpython-310.pyc,, +transformers/models/roformer/__pycache__/modeling_flax_roformer.cpython-310.pyc,, +transformers/models/roformer/__pycache__/modeling_roformer.cpython-310.pyc,, +transformers/models/roformer/__pycache__/modeling_tf_roformer.cpython-310.pyc,, +transformers/models/roformer/__pycache__/tokenization_roformer.cpython-310.pyc,, +transformers/models/roformer/__pycache__/tokenization_roformer_fast.cpython-310.pyc,, +transformers/models/roformer/__pycache__/tokenization_utils.cpython-310.pyc,, +transformers/models/roformer/configuration_roformer.py,sha256=lqqzrsI95wVya5jcUoeUjQmTy76AEtTEEwJ-1TLNIcE,6856 +transformers/models/roformer/convert_roformer_original_tf_checkpoint_to_pytorch.py,sha256=TS6-r9GV2DJXof9onFihBgvXXdn08al0_kJutS8cwEQ,2239 +transformers/models/roformer/modeling_flax_roformer.py,sha256=MsQ62YShLRpfv_DA9twbYm4k12JCiKlWQDHdpICSElw,39370 +transformers/models/roformer/modeling_roformer.py,sha256=HIsxlIjmZAnIbWLXg9qcvqB47h-6Py-FYNLiYocOlQ4,68678 +transformers/models/roformer/modeling_tf_roformer.py,sha256=F28YCrWA8MamBsLo0oEA557uatj4Oj1SPhT7gnMA2Xw,66222 +transformers/models/roformer/tokenization_roformer.py,sha256=T5xTBpn2oJicg8O6Ooivi-n2Sw-vrlhmSp6tNdZQ_qw,22011 +transformers/models/roformer/tokenization_roformer_fast.py,sha256=b2p3BdYUv-AcUpG-6zbUK5XRliuqtd8trcwuORkg5XU,6717 +transformers/models/roformer/tokenization_utils.py,sha256=0ciH13qW2kCa5my1rPwfwAuSXX-jGzN0nzemvGvOBxw,2652 +transformers/models/rt_detr/__init__.py,sha256=c9Y3NeKQwBP46tyFF99kjqTngoIWhLMq7XvzEJOfLaY,1181 +transformers/models/rt_detr/__pycache__/__init__.cpython-310.pyc,, +transformers/models/rt_detr/__pycache__/configuration_rt_detr.cpython-310.pyc,, +transformers/models/rt_detr/__pycache__/configuration_rt_detr_resnet.cpython-310.pyc,, +transformers/models/rt_detr/__pycache__/convert_rt_detr_original_pytorch_checkpoint_to_hf.cpython-310.pyc,, +transformers/models/rt_detr/__pycache__/image_processing_rt_detr.cpython-310.pyc,, +transformers/models/rt_detr/__pycache__/image_processing_rt_detr_fast.cpython-310.pyc,, +transformers/models/rt_detr/__pycache__/modeling_rt_detr.cpython-310.pyc,, +transformers/models/rt_detr/__pycache__/modeling_rt_detr_resnet.cpython-310.pyc,, +transformers/models/rt_detr/__pycache__/modular_rt_detr.cpython-310.pyc,, +transformers/models/rt_detr/configuration_rt_detr.py,sha256=-nrNyKF-E1k_sWYe29bD54Ab51MKcZc2CN4pVXaIygc,18070 +transformers/models/rt_detr/configuration_rt_detr_resnet.py,sha256=SRxquPIXRdu5Xs6YWQgtzYyT3cyoIEig2KKpNeSFfKQ,5557 +transformers/models/rt_detr/convert_rt_detr_original_pytorch_checkpoint_to_hf.py,sha256=L2tEnPp4NNJlXsI-fBBpib0Vg76qL3xN-OLvrd429Jo,32779 +transformers/models/rt_detr/image_processing_rt_detr.py,sha256=G7AEnUq7Gs835nKR1i-tNHoUx0rO4PpM63QdSIfbHpQ,51648 +transformers/models/rt_detr/image_processing_rt_detr_fast.py,sha256=wbOBCeIrhWdDJO8-1HPqSeDnMYLUUco0Nrgce9WyEQY,38583 +transformers/models/rt_detr/modeling_rt_detr.py,sha256=9k65LHKanCcq98iL19cm_SGSdVEeqV9j6qEcEFWei_0,104651 +transformers/models/rt_detr/modeling_rt_detr_resnet.py,sha256=r8jDRnj_U6iq2ddtoh2Ay2kHHZFRbmnSk-inIb0yzeg,16462 +transformers/models/rt_detr/modular_rt_detr.py,sha256=-fShpivFvVJO8SceHAcHavPWRjdnpOV0tsw4juFhNVw,29338 +transformers/models/rwkv/__init__.py,sha256=HAiwEvW1j_xuHj_PbmN25srY9RtA1gLmN_0RWvAyG78,989 +transformers/models/rwkv/__pycache__/__init__.cpython-310.pyc,, +transformers/models/rwkv/__pycache__/configuration_rwkv.cpython-310.pyc,, +transformers/models/rwkv/__pycache__/convert_rwkv_checkpoint_to_hf.cpython-310.pyc,, +transformers/models/rwkv/__pycache__/modeling_rwkv.cpython-310.pyc,, +transformers/models/rwkv/configuration_rwkv.py,sha256=SEMdtDGoCzSlG7sgHlc-I2WbIFsIBz00qrJdSUZ1xkY,5203 +transformers/models/rwkv/convert_rwkv_checkpoint_to_hf.py,sha256=-Eg98Q3TrpFV8SvSTdKoYzZCYOwGa1xiEVvOkkx2Keo,7371 +transformers/models/rwkv/modeling_rwkv.py,sha256=lVoLTAxFZlZ_eotI-u96fpCgJZvzBTYg3Bo3d0YaO6Y,37083 +transformers/models/sam/__init__.py,sha256=vLpuKLgQZgbv3WGjn6Kr4bawb_4ZmYsrpNg2ojKkHiE,1096 +transformers/models/sam/__pycache__/__init__.cpython-310.pyc,, +transformers/models/sam/__pycache__/configuration_sam.cpython-310.pyc,, +transformers/models/sam/__pycache__/convert_sam_to_hf.cpython-310.pyc,, +transformers/models/sam/__pycache__/image_processing_sam.cpython-310.pyc,, +transformers/models/sam/__pycache__/modeling_sam.cpython-310.pyc,, +transformers/models/sam/__pycache__/modeling_tf_sam.cpython-310.pyc,, +transformers/models/sam/__pycache__/processing_sam.cpython-310.pyc,, +transformers/models/sam/configuration_sam.py,sha256=nBf9jKHgrI_OS49j-1laQ-IloCTPb12dWXHffcCYaL4,14165 +transformers/models/sam/convert_sam_to_hf.py,sha256=Ter22qOhmN1eQyKi387HL0KCBrirnPnr1asU4BobQkk,8543 +transformers/models/sam/image_processing_sam.py,sha256=JgTBgRFpM5OnYm9GVol4gLYgzCgN0YDlVBYy_k4CMSY,66752 +transformers/models/sam/modeling_sam.py,sha256=o_9LYX8IQk8ZqM0DN4G-Aab_JmMhQaGNLR87S9IPnxs,71324 +transformers/models/sam/modeling_tf_sam.py,sha256=Gx3NwM484-V3uBUeIV0qvcT7tcBLJhBrLVE-RUr91mA,75501 +transformers/models/sam/processing_sam.py,sha256=BhxmbHKBC27rWGe23WUSZn9SNTTLgt4qdYXxHBkadbk,12930 +transformers/models/seamless_m4t/__init__.py,sha256=Y5c_W1E83fh8ToTMqF4NcReXzKZiTDv3A4ePoNUxXDg,1194 +transformers/models/seamless_m4t/__pycache__/__init__.cpython-310.pyc,, +transformers/models/seamless_m4t/__pycache__/configuration_seamless_m4t.cpython-310.pyc,, +transformers/models/seamless_m4t/__pycache__/convert_fairseq2_to_hf.cpython-310.pyc,, +transformers/models/seamless_m4t/__pycache__/feature_extraction_seamless_m4t.cpython-310.pyc,, +transformers/models/seamless_m4t/__pycache__/modeling_seamless_m4t.cpython-310.pyc,, +transformers/models/seamless_m4t/__pycache__/processing_seamless_m4t.cpython-310.pyc,, +transformers/models/seamless_m4t/__pycache__/tokenization_seamless_m4t.cpython-310.pyc,, +transformers/models/seamless_m4t/__pycache__/tokenization_seamless_m4t_fast.cpython-310.pyc,, +transformers/models/seamless_m4t/configuration_seamless_m4t.py,sha256=P7ZjMZwVx_BXv6Thl_qeWHrW5yVLKsRKgecZAgCPZW8,23497 +transformers/models/seamless_m4t/convert_fairseq2_to_hf.py,sha256=AQ75kQXD0Yv1Xn5y_mf3FhrHShAkO66wqg_Y-o93qy4,15958 +transformers/models/seamless_m4t/feature_extraction_seamless_m4t.py,sha256=q1tQ_fL_rO89GaohPfh9fUVXmxvGEuYc6rVEZL_k4k8,13643 +transformers/models/seamless_m4t/modeling_seamless_m4t.py,sha256=0I4GgU9kiRPF0wvt1gvn11H3AS5KK3wyC_9Mbi34sgc,199258 +transformers/models/seamless_m4t/processing_seamless_m4t.py,sha256=voYavxsi-yG0I8lSsGmevP2zKW-qB2CP-kk3-jMDwPo,5930 +transformers/models/seamless_m4t/tokenization_seamless_m4t.py,sha256=cf1SIsG5Cxw368x7uMN58scaFS_aq2se5E3PSMnl0UE,26013 +transformers/models/seamless_m4t/tokenization_seamless_m4t_fast.py,sha256=UMmq_S9I2tGjVWc9dYATqF-wenc3pxA7gy7oV7CPhC8,19926 +transformers/models/seamless_m4t_v2/__init__.py,sha256=mMY04PBMrOwTIQLq01RHqZjssvrSYl3UDhP5Y5vFifs,1011 +transformers/models/seamless_m4t_v2/__pycache__/__init__.cpython-310.pyc,, +transformers/models/seamless_m4t_v2/__pycache__/configuration_seamless_m4t_v2.cpython-310.pyc,, +transformers/models/seamless_m4t_v2/__pycache__/convert_fairseq2_to_hf.cpython-310.pyc,, +transformers/models/seamless_m4t_v2/__pycache__/modeling_seamless_m4t_v2.cpython-310.pyc,, +transformers/models/seamless_m4t_v2/configuration_seamless_m4t_v2.py,sha256=AGu4RlVCJkC5iGAvJGJhMFzJ7i-uNSkgmMfJHbdXS7w,24356 +transformers/models/seamless_m4t_v2/convert_fairseq2_to_hf.py,sha256=3fuW2IGoJpNKVGCE9YXrWbXZvdoko6cfQLyLMn5wycg,15082 +transformers/models/seamless_m4t_v2/modeling_seamless_m4t_v2.py,sha256=b8gybvvLjIQULbFtsbEw5_E1nJPkvBHIhxQbI7ixj9Q,225709 +transformers/models/segformer/__init__.py,sha256=ITklna1wOGVI09TgGcRxn-rc2tYosLRov_Un0n5XHPo,1134 +transformers/models/segformer/__pycache__/__init__.cpython-310.pyc,, +transformers/models/segformer/__pycache__/configuration_segformer.cpython-310.pyc,, +transformers/models/segformer/__pycache__/convert_segformer_original_to_pytorch.cpython-310.pyc,, +transformers/models/segformer/__pycache__/feature_extraction_segformer.cpython-310.pyc,, +transformers/models/segformer/__pycache__/image_processing_segformer.cpython-310.pyc,, +transformers/models/segformer/__pycache__/modeling_segformer.cpython-310.pyc,, +transformers/models/segformer/__pycache__/modeling_tf_segformer.cpython-310.pyc,, +transformers/models/segformer/configuration_segformer.py,sha256=R4qcxjTrvHA3O2YyJ0MFPhR9FXGX1ynClDeK44loqU8,7420 +transformers/models/segformer/convert_segformer_original_to_pytorch.py,sha256=JxOpEDBJ_IQHNlvbXADlMg9PBQwPdSgI4ybc8kj2hPY,17091 +transformers/models/segformer/feature_extraction_segformer.py,sha256=dEAG-c8JoEQl2nvT9wXx_DasFq4pSHYHGZuxnc5ErWc,1249 +transformers/models/segformer/image_processing_segformer.py,sha256=9qk77S-AYykNLnAVUmO_WPfHKaWHt2jAoXNu7i1ynM8,22784 +transformers/models/segformer/modeling_segformer.py,sha256=eyY9KeHwf68MMmGZaJe3q1NwCqL6hGFl_GMMfFIWxoA,35550 +transformers/models/segformer/modeling_tf_segformer.py,sha256=6ggHaUQkVHRGWCp95lhKf55eGM2F51yGwyJ9c9v0SRM,43796 +transformers/models/seggpt/__init__.py,sha256=RzV8DKCX1lOWGqXv2BlE1R7T4QuEcdYAVy_csccLvEw,1036 +transformers/models/seggpt/__pycache__/__init__.cpython-310.pyc,, +transformers/models/seggpt/__pycache__/configuration_seggpt.cpython-310.pyc,, +transformers/models/seggpt/__pycache__/convert_seggpt_to_hf.cpython-310.pyc,, +transformers/models/seggpt/__pycache__/image_processing_seggpt.cpython-310.pyc,, +transformers/models/seggpt/__pycache__/modeling_seggpt.cpython-310.pyc,, +transformers/models/seggpt/configuration_seggpt.py,sha256=_v6RESyTwPgfynZR8xjyJKuB_mRXBbVylb7AJ8USaKg,6492 +transformers/models/seggpt/convert_seggpt_to_hf.py,sha256=BZLBrTnCLMLgbcRZbUk8sflo1R_jpMS2Ru90GwpQtxw,9778 +transformers/models/seggpt/image_processing_seggpt.py,sha256=rEy1Mcx8UD9dFvfU5KN8wCbY-_y9xp3kP8IrAMKpaeY,31509 +transformers/models/seggpt/modeling_seggpt.py,sha256=HPXlLXvHy1iMQEkuj2hhfd2OEijY3tHeeTUq63OIplk,45867 +transformers/models/sew/__init__.py,sha256=POCF36ZRa_dr7oQhkDU2X17bsZuLoWI5V8DSihqr_vU,987 +transformers/models/sew/__pycache__/__init__.cpython-310.pyc,, +transformers/models/sew/__pycache__/configuration_sew.cpython-310.pyc,, +transformers/models/sew/__pycache__/convert_sew_original_pytorch_checkpoint_to_pytorch.cpython-310.pyc,, +transformers/models/sew/__pycache__/modeling_sew.cpython-310.pyc,, +transformers/models/sew/configuration_sew.py,sha256=3gBVqNaBxEehYEU-MXTunHM6qmDqQub35vOmjiirDiA,14207 +transformers/models/sew/convert_sew_original_pytorch_checkpoint_to_pytorch.py,sha256=SOWT4r47np8zJwW6fdWqtgCfuQAHjXUVUgMHZ6STCDA,12744 +transformers/models/sew/modeling_sew.py,sha256=OoqcODgmBRz78H1wh80PbNI-ZobvrvPsU1w1eJhAd-M,67413 +transformers/models/sew_d/__init__.py,sha256=zE9sw10e_a1d-8-Jsb75z5frCjkFGD0dZMHAXiNgGwk,991 +transformers/models/sew_d/__pycache__/__init__.cpython-310.pyc,, +transformers/models/sew_d/__pycache__/configuration_sew_d.cpython-310.pyc,, +transformers/models/sew_d/__pycache__/convert_sew_d_original_pytorch_checkpoint_to_pytorch.cpython-310.pyc,, +transformers/models/sew_d/__pycache__/modeling_sew_d.cpython-310.pyc,, +transformers/models/sew_d/configuration_sew_d.py,sha256=QKAeDcDJfVbPCLuxqBrAkXrOU8BeGRaYNkot83YKt-4,16175 +transformers/models/sew_d/convert_sew_d_original_pytorch_checkpoint_to_pytorch.py,sha256=5Cy_FfZr1-7Kgmu8ipmETQt33BTyPFjX93tuaPV_tRw,13574 +transformers/models/sew_d/modeling_sew_d.py,sha256=6veDdr--1kTMM6Fhl2fzTbz8AxKxdUkWfKk-XAklvIw,72858 +transformers/models/siglip/__init__.py,sha256=8Obnadb5YHIFfrJ3oGHfxDwV0R7IVMBbpTTZPnBMDow,1112 +transformers/models/siglip/__pycache__/__init__.cpython-310.pyc,, +transformers/models/siglip/__pycache__/configuration_siglip.cpython-310.pyc,, +transformers/models/siglip/__pycache__/convert_siglip_to_hf.cpython-310.pyc,, +transformers/models/siglip/__pycache__/image_processing_siglip.cpython-310.pyc,, +transformers/models/siglip/__pycache__/modeling_siglip.cpython-310.pyc,, +transformers/models/siglip/__pycache__/processing_siglip.cpython-310.pyc,, +transformers/models/siglip/__pycache__/tokenization_siglip.cpython-310.pyc,, +transformers/models/siglip/configuration_siglip.py,sha256=buZJADRSQs_QHesht7h6QxkFaZNDgI9EFtyCcr85oMA,11947 +transformers/models/siglip/convert_siglip_to_hf.py,sha256=JWlMzjGcS_OgIE7KGlVX1yWQJsVpWG7X3rLCoXkUJ90,20829 +transformers/models/siglip/image_processing_siglip.py,sha256=EVnwPqwBxkggPy_CKql6HrYx7C-EwzVEtzXkZ7RMYuc,11953 +transformers/models/siglip/modeling_siglip.py,sha256=NnCV3w0TmxqRdeIuZFuk2iCGwNgYiBLpw8FkMe7ovMQ,69422 +transformers/models/siglip/processing_siglip.py,sha256=ReTGUQAWrCNTT9gdw4_T0qAxcK5sn_56w27z3LgK1es,7334 +transformers/models/siglip/tokenization_siglip.py,sha256=Lrv4eyGkAKU5AZwaR-Wg6vzZyP-pTkB1q6LMwA9mhw0,15984 +transformers/models/speech_encoder_decoder/__init__.py,sha256=0MwevN904dCSAb0dvznhDH--q-m3-MzdCtx0B-T5hpk,1081 +transformers/models/speech_encoder_decoder/__pycache__/__init__.cpython-310.pyc,, +transformers/models/speech_encoder_decoder/__pycache__/configuration_speech_encoder_decoder.cpython-310.pyc,, +transformers/models/speech_encoder_decoder/__pycache__/convert_mbart_wav2vec2_seq2seq_original_to_pytorch.cpython-310.pyc,, +transformers/models/speech_encoder_decoder/__pycache__/convert_speech_to_text_wav2vec2_seq2seq_original_to_pytorch.cpython-310.pyc,, +transformers/models/speech_encoder_decoder/__pycache__/modeling_flax_speech_encoder_decoder.cpython-310.pyc,, +transformers/models/speech_encoder_decoder/__pycache__/modeling_speech_encoder_decoder.cpython-310.pyc,, +transformers/models/speech_encoder_decoder/configuration_speech_encoder_decoder.py,sha256=vdMSRuVo6eOhpM2JVkwhCd28NbdJHDm6OBTCA914cm0,4683 +transformers/models/speech_encoder_decoder/convert_mbart_wav2vec2_seq2seq_original_to_pytorch.py,sha256=T7TlN5DEbuMVgAtK0xwljBOEGjHGTzZ6Ndj7V-XVScA,14753 +transformers/models/speech_encoder_decoder/convert_speech_to_text_wav2vec2_seq2seq_original_to_pytorch.py,sha256=wyaXE2ICf7YBoaJxi4W4a9gbxjGW7zwpdOxJCPBsLHA,11970 +transformers/models/speech_encoder_decoder/modeling_flax_speech_encoder_decoder.py,sha256=MDoGAgEKzMfIehXrt1P9rJpax_2-L-Cy8fz-p2F4ySA,44688 +transformers/models/speech_encoder_decoder/modeling_speech_encoder_decoder.py,sha256=auRqA88dfmY9yo2j26Ksdk6c342QNooILZAI0cmGb0c,32125 +transformers/models/speech_to_text/__init__.py,sha256=qZzt5u1rbSsOjPVmX40R4b4pkL1mxOQZ66q8GPDKao8,1200 +transformers/models/speech_to_text/__pycache__/__init__.cpython-310.pyc,, +transformers/models/speech_to_text/__pycache__/configuration_speech_to_text.cpython-310.pyc,, +transformers/models/speech_to_text/__pycache__/convert_s2t_fairseq_to_tfms.cpython-310.pyc,, +transformers/models/speech_to_text/__pycache__/feature_extraction_speech_to_text.cpython-310.pyc,, +transformers/models/speech_to_text/__pycache__/modeling_speech_to_text.cpython-310.pyc,, +transformers/models/speech_to_text/__pycache__/modeling_tf_speech_to_text.cpython-310.pyc,, +transformers/models/speech_to_text/__pycache__/processing_speech_to_text.cpython-310.pyc,, +transformers/models/speech_to_text/__pycache__/tokenization_speech_to_text.cpython-310.pyc,, +transformers/models/speech_to_text/configuration_speech_to_text.py,sha256=akMZy_BM5rCB0g-DIVFsf5GwGTL1TxQ1eK4swvOK8pM,9809 +transformers/models/speech_to_text/convert_s2t_fairseq_to_tfms.py,sha256=v-5aSPwuCKCtqwU8gREj9wA2nm14Z97tg6wQ3S47gos,4478 +transformers/models/speech_to_text/feature_extraction_speech_to_text.py,sha256=PkD2ItQrXUUDkmZN3wllBmj-UbQ5AiclIyb-ISlwM_Q,13232 +transformers/models/speech_to_text/modeling_speech_to_text.py,sha256=8SrsX3TMGrEu4iDjGbS07MadjaT1nNoI9IdESdRZwxY,63640 +transformers/models/speech_to_text/modeling_tf_speech_to_text.py,sha256=7iGWE1TPaKcb9E9G4aW6Nw-EDGiB9RTHn9CQmGH2Z1U,74420 +transformers/models/speech_to_text/processing_speech_to_text.py,sha256=cfHZTf5_O9jOXg9gAtqBc6diEpQw4OynmqgTkl-Lj2I,4856 +transformers/models/speech_to_text/tokenization_speech_to_text.py,sha256=CXVyp4Lze3PMbwmqmB0xVhtshsFVWfFri_zPlcRFBBU,11438 +transformers/models/speecht5/__init__.py,sha256=DploRLnZX4ZO40Z7BstCZ7aNWGuZE06tIeMo0GTyR60,1124 +transformers/models/speecht5/__pycache__/__init__.cpython-310.pyc,, +transformers/models/speecht5/__pycache__/configuration_speecht5.cpython-310.pyc,, +transformers/models/speecht5/__pycache__/convert_hifigan.cpython-310.pyc,, +transformers/models/speecht5/__pycache__/convert_speecht5_original_pytorch_checkpoint_to_pytorch.cpython-310.pyc,, +transformers/models/speecht5/__pycache__/feature_extraction_speecht5.cpython-310.pyc,, +transformers/models/speecht5/__pycache__/modeling_speecht5.cpython-310.pyc,, +transformers/models/speecht5/__pycache__/number_normalizer.cpython-310.pyc,, +transformers/models/speecht5/__pycache__/processing_speecht5.cpython-310.pyc,, +transformers/models/speecht5/__pycache__/tokenization_speecht5.cpython-310.pyc,, +transformers/models/speecht5/configuration_speecht5.py,sha256=FaCOuh7O7jhrWQEaFWmj0WpGgrKLZ9ziSOAduEVkQAk,23434 +transformers/models/speecht5/convert_hifigan.py,sha256=CL9GSX_bimjm_hU2rE55MaNvTUjTtWD6qCtqNMaXy7I,4241 +transformers/models/speecht5/convert_speecht5_original_pytorch_checkpoint_to_pytorch.py,sha256=AyAjaeibe3002YZRT2maq1Yi8-iP1j7Ahs5qxYMjiJ0,17194 +transformers/models/speecht5/feature_extraction_speecht5.py,sha256=9PHkKDOahe7l0m-AnAFpiNfSBW30jmT6YXuPUVJHADM,17850 +transformers/models/speecht5/modeling_speecht5.py,sha256=4IlEVg_SYgF4VEMRQGxpId5rESA3NE7rYiW9GJcB7lo,154653 +transformers/models/speecht5/number_normalizer.py,sha256=cxnEUdHSISW5eAo15cLuVkZa65zMFuMFaJ8zAOQCsAA,7019 +transformers/models/speecht5/processing_speecht5.py,sha256=lp8lCue0tNo3xQVqlHpzruReD0iGUZeNz4KRsXP12rg,7596 +transformers/models/speecht5/tokenization_speecht5.py,sha256=eeIqgfTtt_OTdg_uPl6G2il1zvmtLoMI728DroM-pUg,8946 +transformers/models/splinter/__init__.py,sha256=N3tdgJIqZRPK0g3pfLE3p3-HkGJMRf-GQ189anQ51to,1084 +transformers/models/splinter/__pycache__/__init__.cpython-310.pyc,, +transformers/models/splinter/__pycache__/configuration_splinter.cpython-310.pyc,, +transformers/models/splinter/__pycache__/modeling_splinter.cpython-310.pyc,, +transformers/models/splinter/__pycache__/tokenization_splinter.cpython-310.pyc,, +transformers/models/splinter/__pycache__/tokenization_splinter_fast.cpython-310.pyc,, +transformers/models/splinter/configuration_splinter.py,sha256=ZajZPX6f9K7gBqp2PbOtmJg-_fAU8h72tKdTNjyQV0M,5625 +transformers/models/splinter/modeling_splinter.py,sha256=unKeBbKPkZ1-yMDOGxAUCPBkNTlo1ENK4TWeuhGj1qs,53454 +transformers/models/splinter/tokenization_splinter.py,sha256=a-Z2ZgRffsIVGn122hHRjnkab5KDx900uPbCzzKzKUs,20981 +transformers/models/splinter/tokenization_splinter_fast.py,sha256=Y4D08-btQwQepNw4ZDAr-y-lkHbqRIcKn8HeL4zaXW8,8603 +transformers/models/squeezebert/__init__.py,sha256=_kzQtfoJetCK99e_FICGZl5DN8S2VVcOUFioGyN0sLI,1096 +transformers/models/squeezebert/__pycache__/__init__.cpython-310.pyc,, +transformers/models/squeezebert/__pycache__/configuration_squeezebert.cpython-310.pyc,, +transformers/models/squeezebert/__pycache__/modeling_squeezebert.cpython-310.pyc,, +transformers/models/squeezebert/__pycache__/tokenization_squeezebert.cpython-310.pyc,, +transformers/models/squeezebert/__pycache__/tokenization_squeezebert_fast.cpython-310.pyc,, +transformers/models/squeezebert/configuration_squeezebert.py,sha256=24rAypu_QmOVu_CTO_e6hos_xEtnPVQZHmsEVv-F3mk,7303 +transformers/models/squeezebert/modeling_squeezebert.py,sha256=ERtHnCeYCB-PTacSp7HANHxFxxDn0Rbf-BmQD1SPXYU,45342 +transformers/models/squeezebert/tokenization_squeezebert.py,sha256=gNsEVyb_FE47-orxA4xDVidgTGEUoLODjdK38H9XD6Y,21248 +transformers/models/squeezebert/tokenization_squeezebert_fast.py,sha256=uZvcCVHG-ObsxQlwlcMTAL6sTgBAjFtMjKJvP42ga3o,7860 +transformers/models/stablelm/__init__.py,sha256=aVgWTcwBuuiGJDp8H_ZU6BvhYqjmNEqCukU7jEfwd_I,997 +transformers/models/stablelm/__pycache__/__init__.cpython-310.pyc,, +transformers/models/stablelm/__pycache__/configuration_stablelm.cpython-310.pyc,, +transformers/models/stablelm/__pycache__/modeling_stablelm.cpython-310.pyc,, +transformers/models/stablelm/configuration_stablelm.py,sha256=qAiB-_tQupM4gJBL3Fslpww60FMGyAgRsxGSi8PpttA,10837 +transformers/models/stablelm/modeling_stablelm.py,sha256=t93Yj0DiLVOAIRAmQiwiSfOHuqD_N07wSXZ-zvXXUeI,64051 +transformers/models/starcoder2/__init__.py,sha256=fZ8HHZCGjxRfVgROe7zuoi9ADIAa4SeqxGHkvKUQiQM,1001 +transformers/models/starcoder2/__pycache__/__init__.cpython-310.pyc,, +transformers/models/starcoder2/__pycache__/configuration_starcoder2.cpython-310.pyc,, +transformers/models/starcoder2/__pycache__/modeling_starcoder2.cpython-310.pyc,, +transformers/models/starcoder2/__pycache__/modular_starcoder2.cpython-310.pyc,, +transformers/models/starcoder2/configuration_starcoder2.py,sha256=3Y4CDpQPOKWtkj0Qe8_uJthZSCw2jF9w429ZFVHdQgA,10667 +transformers/models/starcoder2/modeling_starcoder2.py,sha256=Gt30pOGXb8-8U3YC_mk00w92up37VqW3kQ7jtA1bwhc,49272 +transformers/models/starcoder2/modular_starcoder2.py,sha256=TUZIcLaG90eVy1V12XTMMr-1UQPwSVmpKpdsQTr5fso,11496 +transformers/models/superpoint/__init__.py,sha256=CeDGkon6FhcDhbdXs9IlLKFmS1d3THdAB5p4mH6gZ_M,1048 +transformers/models/superpoint/__pycache__/__init__.cpython-310.pyc,, +transformers/models/superpoint/__pycache__/configuration_superpoint.cpython-310.pyc,, +transformers/models/superpoint/__pycache__/convert_superpoint_to_pytorch.cpython-310.pyc,, +transformers/models/superpoint/__pycache__/image_processing_superpoint.cpython-310.pyc,, +transformers/models/superpoint/__pycache__/modeling_superpoint.cpython-310.pyc,, +transformers/models/superpoint/configuration_superpoint.py,sha256=F6qo1YZWmIv83xPGzhvMpBDD7Kfk8EVJJg39CnkEF6g,4072 +transformers/models/superpoint/convert_superpoint_to_pytorch.py,sha256=tO1P6yqW46LY1hnWIJPOs4KjW0uZWkiVWW-GTOXbJGg,7243 +transformers/models/superpoint/image_processing_superpoint.py,sha256=q0YCviuC2au7ooom2s8091MNI02p9rKcQ6UwruBC0FQ,15217 +transformers/models/superpoint/modeling_superpoint.py,sha256=_2whda8SKT-T5hBU3LrFqG9Z-sN6JnxXhtzC3JuKi3c,21654 +transformers/models/swiftformer/__init__.py,sha256=cW3-9efPxdjZV1KziM8j1S8e8wH3wJQhWqMXlULhG6c,1046 +transformers/models/swiftformer/__pycache__/__init__.cpython-310.pyc,, +transformers/models/swiftformer/__pycache__/configuration_swiftformer.cpython-310.pyc,, +transformers/models/swiftformer/__pycache__/convert_swiftformer_original_to_hf.cpython-310.pyc,, +transformers/models/swiftformer/__pycache__/modeling_swiftformer.cpython-310.pyc,, +transformers/models/swiftformer/__pycache__/modeling_tf_swiftformer.cpython-310.pyc,, +transformers/models/swiftformer/configuration_swiftformer.py,sha256=0_PbPpI5DyuAukHnqq3vy-urxe4dS_qATNhC_MDp-fM,5858 +transformers/models/swiftformer/convert_swiftformer_original_to_hf.py,sha256=f3WE1QJGHW6Ak8fu3n37S69m92xgICfsbYYKDvibn6c,6238 +transformers/models/swiftformer/modeling_swiftformer.py,sha256=m5XuKIrQCjUbYbDmeok-FqyQMICyJwN5xDh2uLenTkY,22846 +transformers/models/swiftformer/modeling_tf_swiftformer.py,sha256=eFDCLFNvEQ3PEKxcVqQuaJ3-U1P5tukZCw7J_IvCb1g,34966 +transformers/models/swin/__init__.py,sha256=7pcdahUG9WcEkEDRoUcMVxdonKglhOpXaQLo8xI6KTg,1025 +transformers/models/swin/__pycache__/__init__.cpython-310.pyc,, +transformers/models/swin/__pycache__/configuration_swin.cpython-310.pyc,, +transformers/models/swin/__pycache__/convert_swin_simmim_to_pytorch.cpython-310.pyc,, +transformers/models/swin/__pycache__/convert_swin_timm_to_pytorch.cpython-310.pyc,, +transformers/models/swin/__pycache__/modeling_swin.cpython-310.pyc,, +transformers/models/swin/__pycache__/modeling_tf_swin.cpython-310.pyc,, +transformers/models/swin/configuration_swin.py,sha256=8xaMjjRiwRngEhBf5yklrd67PekE6RVsz3a1j4EzQp0,7949 +transformers/models/swin/convert_swin_simmim_to_pytorch.py,sha256=35aBdY_dHVSumVv-vl6qS9wuJeNrqWT5EkMTvo8zVfo,6631 +transformers/models/swin/convert_swin_timm_to_pytorch.py,sha256=UE5XxkTNJZjTKwfK_upI-bOqZwE5fIgQjJDSoPKkT-g,5809 +transformers/models/swin/modeling_swin.py,sha256=QukH6wd6jEz5CorSsUEN7uSuqzgR-FJlZcVjLA774n0,63190 +transformers/models/swin/modeling_tf_swin.py,sha256=O4NGM4AotiGgoDZ_ww0rkQg19eTKe5gUlwvJHBa0buY,70907 +transformers/models/swin2sr/__init__.py,sha256=xEgE9PSRZ7w4fMZeHQ42QyfS1xua7kNne-K7ADvGRn0,1039 +transformers/models/swin2sr/__pycache__/__init__.cpython-310.pyc,, +transformers/models/swin2sr/__pycache__/configuration_swin2sr.cpython-310.pyc,, +transformers/models/swin2sr/__pycache__/convert_swin2sr_original_to_pytorch.cpython-310.pyc,, +transformers/models/swin2sr/__pycache__/image_processing_swin2sr.cpython-310.pyc,, +transformers/models/swin2sr/__pycache__/modeling_swin2sr.cpython-310.pyc,, +transformers/models/swin2sr/configuration_swin2sr.py,sha256=6ZRVIyo6z1oQvPm13QvkrWcKpf1qjMf0QqwmdHMdvto,6841 +transformers/models/swin2sr/convert_swin2sr_original_to_pytorch.py,sha256=xv4DkXMn4keBRk8lLaG5Kq91DdQMn4q_25WudPCYhyo,11363 +transformers/models/swin2sr/image_processing_swin2sr.py,sha256=T5JpOohG19DOgjlUHgtw06vOv1Q5FHg-oK6ImXPL2zQ,9247 +transformers/models/swin2sr/modeling_swin2sr.py,sha256=wehbrkmY8y4BW3d5zQne25AqilW6PmUZNoUO38Ln-c4,50887 +transformers/models/swinv2/__init__.py,sha256=njM902tlEQ82mYRN9ZTMOiXpJn1NHnxKbm_LCvn2I-M,993 +transformers/models/swinv2/__pycache__/__init__.cpython-310.pyc,, +transformers/models/swinv2/__pycache__/configuration_swinv2.cpython-310.pyc,, +transformers/models/swinv2/__pycache__/convert_swinv2_timm_to_pytorch.cpython-310.pyc,, +transformers/models/swinv2/__pycache__/modeling_swinv2.cpython-310.pyc,, +transformers/models/swinv2/configuration_swinv2.py,sha256=2ELV-6mQG8Jbmkkd-E5YVKXlr08d6RL-3HZUmNwW4D8,7547 +transformers/models/swinv2/convert_swinv2_timm_to_pytorch.py,sha256=Eg5ASHw46_huYnDn-QCdVXJnSb_U5QfzrktX__nP_D0,7693 +transformers/models/swinv2/modeling_swinv2.py,sha256=RJvpsxmMm98DgkSclLKN80deIjyPYdBI9ni_XN5WPh0,67006 +transformers/models/switch_transformers/__init__.py,sha256=Iw38A9kfIT5mJ0G00YE-TVN-M_b1DBHYQqb0pEyTZMY,1019 +transformers/models/switch_transformers/__pycache__/__init__.cpython-310.pyc,, +transformers/models/switch_transformers/__pycache__/configuration_switch_transformers.cpython-310.pyc,, +transformers/models/switch_transformers/__pycache__/convert_big_switch.cpython-310.pyc,, +transformers/models/switch_transformers/__pycache__/convert_switch_transformers_original_flax_checkpoint_to_pytorch.cpython-310.pyc,, +transformers/models/switch_transformers/__pycache__/modeling_switch_transformers.cpython-310.pyc,, +transformers/models/switch_transformers/configuration_switch_transformers.py,sha256=oIUAs_6pAQ1ExMl2uFAYiUMA-l8GJvqpChiPkIFYYq4,9046 +transformers/models/switch_transformers/convert_big_switch.py,sha256=wjMGjHXAqVool6fZQhdG_Av2Ujx9EDoZrtHC8RdDLk4,7659 +transformers/models/switch_transformers/convert_switch_transformers_original_flax_checkpoint_to_pytorch.py,sha256=AAJNkPcr_THjPN_8RUnOdBYbbYc6GOqXdgdjhx9FZyw,7593 +transformers/models/switch_transformers/modeling_switch_transformers.py,sha256=BDMt0dqLWW-5lFpNhlwBaLC_o93IwW5EZlj0XjyX5qg,94437 +transformers/models/t5/__init__.py,sha256=hCQO8nkKAJqFgMOwC7nxhyDYOUA9fcDT0pDb7DAHt5Y,1130 +transformers/models/t5/__pycache__/__init__.cpython-310.pyc,, +transformers/models/t5/__pycache__/configuration_t5.cpython-310.pyc,, +transformers/models/t5/__pycache__/convert_t5_original_tf_checkpoint_to_pytorch.cpython-310.pyc,, +transformers/models/t5/__pycache__/convert_t5x_checkpoint_to_flax.cpython-310.pyc,, +transformers/models/t5/__pycache__/convert_t5x_checkpoint_to_pytorch.cpython-310.pyc,, +transformers/models/t5/__pycache__/modeling_flax_t5.cpython-310.pyc,, +transformers/models/t5/__pycache__/modeling_t5.cpython-310.pyc,, +transformers/models/t5/__pycache__/modeling_tf_t5.cpython-310.pyc,, +transformers/models/t5/__pycache__/tokenization_t5.cpython-310.pyc,, +transformers/models/t5/__pycache__/tokenization_t5_fast.cpython-310.pyc,, +transformers/models/t5/configuration_t5.py,sha256=DfwNeewoBtzp1VsB84uLW9k0N_nwPk7L66SVEDVLQwA,7372 +transformers/models/t5/convert_t5_original_tf_checkpoint_to_pytorch.py,sha256=LEibHPdlDdKdyB6XHB5s7pHRsqB5qQxUWN93H8G_q5k,2119 +transformers/models/t5/convert_t5x_checkpoint_to_flax.py,sha256=PLgfe5u_gcFjBduCmAeuKaDW4VjJtP6KKsx4zIRX8hs,10580 +transformers/models/t5/convert_t5x_checkpoint_to_pytorch.py,sha256=GTF0FYHDDDBl2tcYgHcirqHOI2KOE2YkDG4ekzjh_Ao,10483 +transformers/models/t5/modeling_flax_t5.py,sha256=37D-nHUepd3Fo93JT4rXLTXCZbXGYRlRcTYiXq2Deog,74273 +transformers/models/t5/modeling_t5.py,sha256=z0F0R81pW2q5U7Ls2lk5hOvY-RgErWS-0-e3kfIta60,115335 +transformers/models/t5/modeling_tf_t5.py,sha256=VZzl32hiqK1L9pZBw9OJzqpHGotBY5t9IpKWT3a_BNI,77180 +transformers/models/t5/tokenization_t5.py,sha256=1g994QkBag1ht5dKWdKjbB9wfLusIWGTFd2Jmkp_Rz8,20019 +transformers/models/t5/tokenization_t5_fast.py,sha256=wNerBtP7B5cB3o6UhVQB2b1W4NjNXwkvMFHLY3H8m9I,10200 +transformers/models/table_transformer/__init__.py,sha256=VT-KM0_6LZ6fdOAglbfA8zEhCQuYa6He10Div7WEcD8,1015 +transformers/models/table_transformer/__pycache__/__init__.cpython-310.pyc,, +transformers/models/table_transformer/__pycache__/configuration_table_transformer.cpython-310.pyc,, +transformers/models/table_transformer/__pycache__/convert_table_transformer_to_hf.cpython-310.pyc,, +transformers/models/table_transformer/__pycache__/convert_table_transformer_to_hf_no_timm.cpython-310.pyc,, +transformers/models/table_transformer/__pycache__/modeling_table_transformer.cpython-310.pyc,, +transformers/models/table_transformer/configuration_table_transformer.py,sha256=eqmUeH6hf0B8WHidmq1Ca8DG2CQf4TCZol5u12ooAys,13373 +transformers/models/table_transformer/convert_table_transformer_to_hf.py,sha256=Fw7-BfEEli4_Qk4AXZ9sNfiitz6IJwyTGPdtCGlkBg8,15094 +transformers/models/table_transformer/convert_table_transformer_to_hf_no_timm.py,sha256=9A3hwdQWayhc8RoxUWpLoPyhhpHDuVgheIN_MOkLnYI,21185 +transformers/models/table_transformer/modeling_table_transformer.py,sha256=LzvOJure8I8YPA3TuI5QHB4-sJcEF2kpYl8Xy6wM6J8,70029 +transformers/models/tapas/__init__.py,sha256=DQTmog2nYukVsXxARy8v35SitI0Iv4ZLCGl7zUlLDuI,1066 +transformers/models/tapas/__pycache__/__init__.cpython-310.pyc,, +transformers/models/tapas/__pycache__/configuration_tapas.cpython-310.pyc,, +transformers/models/tapas/__pycache__/convert_tapas_original_tf_checkpoint_to_pytorch.cpython-310.pyc,, +transformers/models/tapas/__pycache__/modeling_tapas.cpython-310.pyc,, +transformers/models/tapas/__pycache__/modeling_tf_tapas.cpython-310.pyc,, +transformers/models/tapas/__pycache__/tokenization_tapas.cpython-310.pyc,, +transformers/models/tapas/configuration_tapas.py,sha256=ICAyq4RBVAgph30XEKBixFCQolEGQQ_wZk9o0DXl2pk,12293 +transformers/models/tapas/convert_tapas_original_tf_checkpoint_to_pytorch.py,sha256=K69dSJ-h4XbiYh_4pd7UX7KErXThx8dtDORaiCcBKTM,5048 +transformers/models/tapas/modeling_tapas.py,sha256=dI6cKtkUOwOQDom1DiPovgUUzbdXNfs_LnHgakSfWd8,110377 +transformers/models/tapas/modeling_tf_tapas.py,sha256=WNYBNH4uyA1bERtL5ObV2dWEQKN6lmZdbLE-_kyzhIw,112395 +transformers/models/tapas/tokenization_tapas.py,sha256=OPyboBARXxk-QPTK5fY0jb16_uiFp0EuGZI0kXNWejk,118424 +transformers/models/textnet/__init__.py,sha256=WCPdGs5LWKGDk5UvZm4wA0G76bIXMOhBr1M3x-WmE3s,1039 +transformers/models/textnet/__pycache__/__init__.cpython-310.pyc,, +transformers/models/textnet/__pycache__/configuration_textnet.cpython-310.pyc,, +transformers/models/textnet/__pycache__/convert_textnet_to_hf.cpython-310.pyc,, +transformers/models/textnet/__pycache__/image_processing_textnet.cpython-310.pyc,, +transformers/models/textnet/__pycache__/modeling_textnet.cpython-310.pyc,, +transformers/models/textnet/configuration_textnet.py,sha256=kW_lRsSSythpV0dltKhkG74LJh_zPCSvTmXsMT-y47g,6212 +transformers/models/textnet/convert_textnet_to_hf.py,sha256=fRgKy9UvBq5oyWwGI8bvIcDt91FnvRBsWQo3tqB9U7A,8050 +transformers/models/textnet/image_processing_textnet.py,sha256=ZVkHCdJGeWtFz_j1uLkzBUypBbHW5v_pUgXUV5CfBjc,17613 +transformers/models/textnet/modeling_textnet.py,sha256=snpUjqS6AcHhd0cywTYaSdgFSABOzZe8A9IpAbFGrfc,19010 +transformers/models/time_series_transformer/__init__.py,sha256=3A_3Wog-6NDwCoBIMtkzJv9slc_wXpzDzsOo-xBQ8hE,1027 +transformers/models/time_series_transformer/__pycache__/__init__.cpython-310.pyc,, +transformers/models/time_series_transformer/__pycache__/configuration_time_series_transformer.cpython-310.pyc,, +transformers/models/time_series_transformer/__pycache__/modeling_time_series_transformer.cpython-310.pyc,, +transformers/models/time_series_transformer/configuration_time_series_transformer.py,sha256=okwA_lr2uUmiXn6ETjLAn36lZ0tb22Yc8YOE9UMd_4M,11701 +transformers/models/time_series_transformer/modeling_time_series_transformer.py,sha256=lhfG_jX0PxkjN8hol6UwyyhcPy2JfdcHAeQdKg8J0aU,88674 +transformers/models/timesformer/__init__.py,sha256=4ODuyNRrYkbgpSbMHJX8XmpJdekHlu__zWey-plUSgI,1003 +transformers/models/timesformer/__pycache__/__init__.cpython-310.pyc,, +transformers/models/timesformer/__pycache__/configuration_timesformer.cpython-310.pyc,, +transformers/models/timesformer/__pycache__/convert_timesformer_to_pytorch.cpython-310.pyc,, +transformers/models/timesformer/__pycache__/modeling_timesformer.cpython-310.pyc,, +transformers/models/timesformer/configuration_timesformer.py,sha256=GilCKil_40B_hqjh0-02CWrBupbwEfHhOZ3b5bUpTPI,5568 +transformers/models/timesformer/convert_timesformer_to_pytorch.py,sha256=TjOfPbEC4oVb5tlOgU2m9g36OBizDEEjm0bbcZz6Mq8,10176 +transformers/models/timesformer/modeling_timesformer.py,sha256=aofWD4rJQcLqXHOFGb6qLd0q7AOy8x9GWTJiCut_qqw,35293 +transformers/models/timm_backbone/__init__.py,sha256=s0GlTaJ43Yt9ZdzG9-qjJNlp0Ol4vjN-14S6N7gXLsA,1007 +transformers/models/timm_backbone/__pycache__/__init__.cpython-310.pyc,, +transformers/models/timm_backbone/__pycache__/configuration_timm_backbone.cpython-310.pyc,, +transformers/models/timm_backbone/__pycache__/modeling_timm_backbone.cpython-310.pyc,, +transformers/models/timm_backbone/configuration_timm_backbone.py,sha256=2TXijKvoZeRZzvZxGEsoVVTb0kNcMuoYoVfYQKYhZco,3186 +transformers/models/timm_backbone/modeling_timm_backbone.py,sha256=xyVXDBWWzRl0wPSX4mv7vOCsGlLH5L7LlGRIpjBwuuI,6649 +transformers/models/timm_wrapper/__init__.py,sha256=nO3xlv8KQmYCoxKqDteADLkli16cLqdLkfTY_G73O6k,1048 +transformers/models/timm_wrapper/__pycache__/__init__.cpython-310.pyc,, +transformers/models/timm_wrapper/__pycache__/configuration_timm_wrapper.cpython-310.pyc,, +transformers/models/timm_wrapper/__pycache__/image_processing_timm_wrapper.cpython-310.pyc,, +transformers/models/timm_wrapper/__pycache__/modeling_timm_wrapper.cpython-310.pyc,, +transformers/models/timm_wrapper/configuration_timm_wrapper.py,sha256=i9zuXwUCSwJGewQB5rP3iSVJJgK6b_PKXY6dqzjZ0R0,4775 +transformers/models/timm_wrapper/image_processing_timm_wrapper.py,sha256=-jHV_6GOwmcgCmzyhn4R9LKpTTQQCsL5k-puh3LFoh4,5288 +transformers/models/timm_wrapper/modeling_timm_wrapper.py,sha256=yETdcsOveAe3BZDjhb9Ibm1n4dx5G3NwKrRAtWMVLhY,15815 +transformers/models/trocr/__init__.py,sha256=Hllbq_42XbGRZyXsGOzYHcb33MOA5_yfijMRKEXJ4n4,1027 +transformers/models/trocr/__pycache__/__init__.cpython-310.pyc,, +transformers/models/trocr/__pycache__/configuration_trocr.cpython-310.pyc,, +transformers/models/trocr/__pycache__/convert_trocr_unilm_to_pytorch.cpython-310.pyc,, +transformers/models/trocr/__pycache__/modeling_trocr.cpython-310.pyc,, +transformers/models/trocr/__pycache__/processing_trocr.cpython-310.pyc,, +transformers/models/trocr/configuration_trocr.py,sha256=7W4gKEwd5ZdCf3ovSjO9OJbY-VG-ebwl04WkSEdCUgI,6550 +transformers/models/trocr/convert_trocr_unilm_to_pytorch.py,sha256=FO6U6L51DYBxCxvalXRVpsfZZWKK7PHZV-6nJgt4icI,10165 +transformers/models/trocr/modeling_trocr.py,sha256=MRlODhiywnVx9_lobqSBI3qWxMh-TrgdMCbyrCtMPts,44940 +transformers/models/trocr/processing_trocr.py,sha256=_am3vHsTzD0DrmQbwgzb3CtRWMqT8p0IxatDNAGK-nA,6354 +transformers/models/tvp/__init__.py,sha256=CMKadZ9nKrh8p6u4Z-k6014a9LqDJY7KpyL009s3kpo,1061 +transformers/models/tvp/__pycache__/__init__.cpython-310.pyc,, +transformers/models/tvp/__pycache__/configuration_tvp.cpython-310.pyc,, +transformers/models/tvp/__pycache__/image_processing_tvp.cpython-310.pyc,, +transformers/models/tvp/__pycache__/modeling_tvp.cpython-310.pyc,, +transformers/models/tvp/__pycache__/processing_tvp.cpython-310.pyc,, +transformers/models/tvp/configuration_tvp.py,sha256=DLhpoGcH2Sj9I-0etRbAlWFXhSedn3IiKt4NSQYgnN4,9932 +transformers/models/tvp/image_processing_tvp.py,sha256=YtrQRYqJZO84ht-izgd2SFD3565jC6v6gvZMqftXxQU,22582 +transformers/models/tvp/modeling_tvp.py,sha256=6XgJHOEWEoxLpx0rm1M1Cd0ZjfqNBRtQdFOWEhmR_FI,43675 +transformers/models/tvp/processing_tvp.py,sha256=COYfa1VIKEH4Yo20wQ9tzpNcKvH4-tjngdAzWNpRoTA,7009 +transformers/models/udop/__init__.py,sha256=CqFpHruzC__VtxEcVz31QxxMpBI1mjO77-Lj0RqW4Eo,1103 +transformers/models/udop/__pycache__/__init__.cpython-310.pyc,, +transformers/models/udop/__pycache__/configuration_udop.cpython-310.pyc,, +transformers/models/udop/__pycache__/convert_udop_to_hf.cpython-310.pyc,, +transformers/models/udop/__pycache__/modeling_udop.cpython-310.pyc,, +transformers/models/udop/__pycache__/processing_udop.cpython-310.pyc,, +transformers/models/udop/__pycache__/tokenization_udop.cpython-310.pyc,, +transformers/models/udop/__pycache__/tokenization_udop_fast.cpython-310.pyc,, +transformers/models/udop/configuration_udop.py,sha256=T0ZtIom_dWCvj9_lYwZ-stuGkQC3XUR5w8tfF5t0hwU,7675 +transformers/models/udop/convert_udop_to_hf.py,sha256=3HkMdxV39MaBHOzIHQnAzBnA7yxoz7Zl87pspmORr3Q,33524 +transformers/models/udop/modeling_udop.py,sha256=44rm4erBgkZcFzFkCa0DPGNRASMAQ9dILZiwvQN0Pyo,100933 +transformers/models/udop/processing_udop.py,sha256=CoT7oSAEPKegNPiA95lp9u46vKDpqNvQU9q7UIVF3WE,10034 +transformers/models/udop/tokenization_udop.py,sha256=MP2OjSQ406KmtBPsCnaqCp2VGs4inJq5jJrr-FDNoBs,71722 +transformers/models/udop/tokenization_udop_fast.py,sha256=q4nmSOPUZDvyxqZfcLyP7lgp-GX8x4Hc-3Cx2ctHp50,49794 +transformers/models/umt5/__init__.py,sha256=FKt6Ap3AvOCIKoeOM-5qY84lNEML9IujaDaYROINJMs,989 +transformers/models/umt5/__pycache__/__init__.cpython-310.pyc,, +transformers/models/umt5/__pycache__/configuration_umt5.cpython-310.pyc,, +transformers/models/umt5/__pycache__/convert_umt5_checkpoint_to_pytorch.cpython-310.pyc,, +transformers/models/umt5/__pycache__/modeling_umt5.cpython-310.pyc,, +transformers/models/umt5/configuration_umt5.py,sha256=n5AuXtgmuknmr7_wbWdmzfA6-9t355hFj1jD_B7XsMA,7740 +transformers/models/umt5/convert_umt5_checkpoint_to_pytorch.py,sha256=cSB6TobLxWoeNNqPXPiH4YOKwj0ji9phK8gA4vzt-jo,12072 +transformers/models/umt5/modeling_umt5.py,sha256=kXDIxEWD4EhIYNnH3in48LHoG5vd6YKU8-QEfb7r_UE,94696 +transformers/models/unispeech/__init__.py,sha256=AXJMExDoYYI71OKNXhAt7lyqcFIvcLHEQ1Fsm171m5w,999 +transformers/models/unispeech/__pycache__/__init__.cpython-310.pyc,, +transformers/models/unispeech/__pycache__/configuration_unispeech.cpython-310.pyc,, +transformers/models/unispeech/__pycache__/convert_unispeech_original_pytorch_checkpoint_to_pytorch.cpython-310.pyc,, +transformers/models/unispeech/__pycache__/modeling_unispeech.cpython-310.pyc,, +transformers/models/unispeech/configuration_unispeech.py,sha256=7Z8nZdyHVm7VrFNdngRLdvvJ9xLm8TRwlspGT2CpoXo,17486 +transformers/models/unispeech/convert_unispeech_original_pytorch_checkpoint_to_pytorch.py,sha256=9Sy8RKspS_mb4rTl4t9IlqNaMiXQz31ATrWRFfM5xhA,11339 +transformers/models/unispeech/modeling_unispeech.py,sha256=XLUqkSmVJNV7Jt3BTkuf48nHnEkGk1Fgyj0cvKBzcFw,86866 +transformers/models/unispeech_sat/__init__.py,sha256=P9lCzMg01s4Gj_Pb8t1l36MRAeoOcxUa4d7dbQSe9N4,1007 +transformers/models/unispeech_sat/__pycache__/__init__.cpython-310.pyc,, +transformers/models/unispeech_sat/__pycache__/configuration_unispeech_sat.cpython-310.pyc,, +transformers/models/unispeech_sat/__pycache__/convert_unispeech_original_s3prl_checkpoint_to_pytorch.cpython-310.pyc,, +transformers/models/unispeech_sat/__pycache__/convert_unispeech_sat_original_pytorch_checkpoint_to_pytorch.cpython-310.pyc,, +transformers/models/unispeech_sat/__pycache__/modeling_unispeech_sat.cpython-310.pyc,, +transformers/models/unispeech_sat/configuration_unispeech_sat.py,sha256=jyLGIqtOHBLZQ-lw8XSuG6ZrOOvvU-4aifWxulNoPSw,18831 +transformers/models/unispeech_sat/convert_unispeech_original_s3prl_checkpoint_to_pytorch.py,sha256=OLOISwA82PhBuqITPvfR7bP23Fx0Gxvb2c2SbCKk_XY,4869 +transformers/models/unispeech_sat/convert_unispeech_sat_original_pytorch_checkpoint_to_pytorch.py,sha256=aqmh0Am1E26ifRHt8MujjdUO9CHkVZ6ziqoOIr3n6-o,9288 +transformers/models/unispeech_sat/modeling_unispeech_sat.py,sha256=C0iUmdP0gp477d1cBBoVnKWUaoEo7-JQtLz2IyPgrXA,101126 +transformers/models/univnet/__init__.py,sha256=hfHyxyKGEfd58p1fUSA3IxK2q6JkVatkGceVaoKuODk,1041 +transformers/models/univnet/__pycache__/__init__.cpython-310.pyc,, +transformers/models/univnet/__pycache__/configuration_univnet.cpython-310.pyc,, +transformers/models/univnet/__pycache__/convert_univnet.cpython-310.pyc,, +transformers/models/univnet/__pycache__/feature_extraction_univnet.cpython-310.pyc,, +transformers/models/univnet/__pycache__/modeling_univnet.cpython-310.pyc,, +transformers/models/univnet/configuration_univnet.py,sha256=le3W3tTVtbIpdHEuAGSiQw67ZlKcmeFh7jdu4ESEoiI,6758 +transformers/models/univnet/convert_univnet.py,sha256=R2gqXfz8Oq2rwIUU01V7T_oSoDGG2A4Gety-R80Yn24,6364 +transformers/models/univnet/feature_extraction_univnet.py,sha256=eKmWqN-2p0DQ-HSaWDp6T0QpLicH2BQERSUjFVJj-HY,22861 +transformers/models/univnet/modeling_univnet.py,sha256=IFAldSQ4Ubq_qxXkOuxfYVg91EDDNi8KRuRLtaMMxiA,27572 +transformers/models/upernet/__init__.py,sha256=Wq3u7yXJul5PLmjalxKgx451sa_WuSXbEM45bZsRv3E,995 +transformers/models/upernet/__pycache__/__init__.cpython-310.pyc,, +transformers/models/upernet/__pycache__/configuration_upernet.cpython-310.pyc,, +transformers/models/upernet/__pycache__/convert_convnext_upernet_to_pytorch.cpython-310.pyc,, +transformers/models/upernet/__pycache__/convert_swin_upernet_to_pytorch.cpython-310.pyc,, +transformers/models/upernet/__pycache__/modeling_upernet.cpython-310.pyc,, +transformers/models/upernet/configuration_upernet.py,sha256=cVM6QHwf0DcfGV9tA4ejaQs6GHZZaSoHCC3ssZHiOpM,6643 +transformers/models/upernet/convert_convnext_upernet_to_pytorch.py,sha256=l_CJoXwANEE9rm5mwpHwbusIoJLmN8jNGjxsj6WhZrk,10271 +transformers/models/upernet/convert_swin_upernet_to_pytorch.py,sha256=lHV8SE_bZnxOo-zEJ21S2nY449uPVc3bpcl2JGKNEjA,14026 +transformers/models/upernet/modeling_upernet.py,sha256=_91OAqgT054ITpI9sP9_ZvqL3Q0UMqyxOd5xeTNG0Xg,17208 +transformers/models/video_llava/__init__.py,sha256=bsLGp1WBBO_AvNVRxzOn5k7OYQIbX9SqFhESd24FImc,1093 +transformers/models/video_llava/__pycache__/__init__.cpython-310.pyc,, +transformers/models/video_llava/__pycache__/configuration_video_llava.cpython-310.pyc,, +transformers/models/video_llava/__pycache__/convert_video_llava_weights_to_hf.cpython-310.pyc,, +transformers/models/video_llava/__pycache__/image_processing_video_llava.cpython-310.pyc,, +transformers/models/video_llava/__pycache__/modeling_video_llava.cpython-310.pyc,, +transformers/models/video_llava/__pycache__/processing_video_llava.cpython-310.pyc,, +transformers/models/video_llava/configuration_video_llava.py,sha256=OFfUqRQTXVRxeFAkhe4lco0MEFL898NzO3s1cmNvHbk,6332 +transformers/models/video_llava/convert_video_llava_weights_to_hf.py,sha256=2ubkFfdHTRc-6BPLYG1A82Daki0EBnSkmVPLQyRc7eo,6078 +transformers/models/video_llava/image_processing_video_llava.py,sha256=pMsOMzaJcYeUvI06QWE1l_fW8gM_riHo9rwdPn0r-iY,19352 +transformers/models/video_llava/modeling_video_llava.py,sha256=nZK2ndX7Myo1R6BFTQyoo95fLt6vokVPES2_43w78lU,34604 +transformers/models/video_llava/processing_video_llava.py,sha256=sS2WFIxWoRaf-edpH9WnG7YvWkhzD9C6-MO8h72As3s,11590 +transformers/models/videomae/__init__.py,sha256=IYw3qXj1-PDmBAp---YaZyqdBsIjdMZQI37xT_-9SgY,1089 +transformers/models/videomae/__pycache__/__init__.cpython-310.pyc,, +transformers/models/videomae/__pycache__/configuration_videomae.cpython-310.pyc,, +transformers/models/videomae/__pycache__/convert_videomae_to_pytorch.cpython-310.pyc,, +transformers/models/videomae/__pycache__/feature_extraction_videomae.cpython-310.pyc,, +transformers/models/videomae/__pycache__/image_processing_videomae.cpython-310.pyc,, +transformers/models/videomae/__pycache__/modeling_videomae.cpython-310.pyc,, +transformers/models/videomae/configuration_videomae.py,sha256=O0BwqYZnc9Q5Kpemmel6rOxeDBSj7KKCxgpHfMVCVGE,6600 +transformers/models/videomae/convert_videomae_to_pytorch.py,sha256=rq2nT2ZJekra1G38kM2DH_qOvcZBDQFNgbCvH3mKZjY,13989 +transformers/models/videomae/feature_extraction_videomae.py,sha256=ipQNgym9IqJFNUgI0tDxLmhXQTsXGnmUXHEEQnAG1BI,1241 +transformers/models/videomae/image_processing_videomae.py,sha256=1bwAavDxV0gpVZf9U9xt7a7j9ZHljpBqTxmuaB2JJMw,16547 +transformers/models/videomae/modeling_videomae.py,sha256=58VIORwAmCubzJUSrSc9e55hqD_5s7RTz5lMP82V_4s,49409 +transformers/models/vilt/__init__.py,sha256=xi3VCXhgvk-7vjOrInBT3x_tucu7dkQ1oaJPdAQGCME,1108 +transformers/models/vilt/__pycache__/__init__.cpython-310.pyc,, +transformers/models/vilt/__pycache__/configuration_vilt.cpython-310.pyc,, +transformers/models/vilt/__pycache__/convert_vilt_original_to_pytorch.cpython-310.pyc,, +transformers/models/vilt/__pycache__/feature_extraction_vilt.cpython-310.pyc,, +transformers/models/vilt/__pycache__/image_processing_vilt.cpython-310.pyc,, +transformers/models/vilt/__pycache__/modeling_vilt.cpython-310.pyc,, +transformers/models/vilt/__pycache__/processing_vilt.cpython-310.pyc,, +transformers/models/vilt/configuration_vilt.py,sha256=_ClQWl9Srq_ozXsB-gSdsy5gaTbhwNrP5bMcp6tIz4A,6815 +transformers/models/vilt/convert_vilt_original_to_pytorch.py,sha256=PE1IIC0UxmzENP_L_Ev-2mOw3Q43N7Hpog-I8qQN_Yc,12881 +transformers/models/vilt/feature_extraction_vilt.py,sha256=2dLqlDY3SUnKf-MypHrgpH5scJLJj_AtMWn-d3DrhUQ,1209 +transformers/models/vilt/image_processing_vilt.py,sha256=1tgD7TQ7PyWFHidZEpMNuPw4OSceWYAQT71n7TEgTKU,23191 +transformers/models/vilt/modeling_vilt.py,sha256=4owEuEq6X7oBNIUGnyvyqcWzEoj6Oi_9wGfECi-tXjA,65132 +transformers/models/vilt/processing_vilt.py,sha256=0S5qkrduxIpeQHJsJv5G5T7YG4tB-mLZRl6tTVsxNeY,6109 +transformers/models/vipllava/__init__.py,sha256=HJ5mZUNdt_bmaC9l-GycD7mVT2r1oN15prmnlBtz6oA,997 +transformers/models/vipllava/__pycache__/__init__.cpython-310.pyc,, +transformers/models/vipllava/__pycache__/configuration_vipllava.cpython-310.pyc,, +transformers/models/vipllava/__pycache__/convert_vipllava_weights_to_hf.cpython-310.pyc,, +transformers/models/vipllava/__pycache__/modeling_vipllava.cpython-310.pyc,, +transformers/models/vipllava/configuration_vipllava.py,sha256=grbmy497QCQS6V3BbPP0qCuj5Bl3QSm-Zr7zhvTtAUo,5213 +transformers/models/vipllava/convert_vipllava_weights_to_hf.py,sha256=u64-lOXDE0JMGhkGYJEtyrOh3gpeJtxSDC_dC08mc2c,4794 +transformers/models/vipllava/modeling_vipllava.py,sha256=YKGEc6oHkkVUiU22fut7p-p3GVJRspnbKBC77_kjvzc,28300 +transformers/models/vision_encoder_decoder/__init__.py,sha256=xK5xKVeIOZSN1d9Y2nDa3NYkLdGidbwgQ6Es8JhzKzA,1135 +transformers/models/vision_encoder_decoder/__pycache__/__init__.cpython-310.pyc,, +transformers/models/vision_encoder_decoder/__pycache__/configuration_vision_encoder_decoder.cpython-310.pyc,, +transformers/models/vision_encoder_decoder/__pycache__/modeling_flax_vision_encoder_decoder.cpython-310.pyc,, +transformers/models/vision_encoder_decoder/__pycache__/modeling_tf_vision_encoder_decoder.cpython-310.pyc,, +transformers/models/vision_encoder_decoder/__pycache__/modeling_vision_encoder_decoder.cpython-310.pyc,, +transformers/models/vision_encoder_decoder/configuration_vision_encoder_decoder.py,sha256=No1MOjy6fx-oA0Kngdn3rNeusBUzN-K0Ee2hrjmgpq4,8415 +transformers/models/vision_encoder_decoder/modeling_flax_vision_encoder_decoder.py,sha256=tZ8n9UFZ68aq4h99apWiuCaQoVAsSOvY_YwHmLYBppw,41579 +transformers/models/vision_encoder_decoder/modeling_tf_vision_encoder_decoder.py,sha256=E0PLKardMAzd7oQLDXXBIIp0FjlUPUBIEkjMoAyxzAA,36281 +transformers/models/vision_encoder_decoder/modeling_vision_encoder_decoder.py,sha256=PFSb3CdMAaeRWMLcouZYSKMvoQJ0JeNVk77oz_7j0K8,34477 +transformers/models/vision_text_dual_encoder/__init__.py,sha256=LRXs5oXk4_8AaHuIVaj1IgBO4X1vwP-ehQC1T1xEiAI,1198 +transformers/models/vision_text_dual_encoder/__pycache__/__init__.cpython-310.pyc,, +transformers/models/vision_text_dual_encoder/__pycache__/configuration_vision_text_dual_encoder.cpython-310.pyc,, +transformers/models/vision_text_dual_encoder/__pycache__/modeling_flax_vision_text_dual_encoder.cpython-310.pyc,, +transformers/models/vision_text_dual_encoder/__pycache__/modeling_tf_vision_text_dual_encoder.cpython-310.pyc,, +transformers/models/vision_text_dual_encoder/__pycache__/modeling_vision_text_dual_encoder.cpython-310.pyc,, +transformers/models/vision_text_dual_encoder/__pycache__/processing_vision_text_dual_encoder.cpython-310.pyc,, +transformers/models/vision_text_dual_encoder/configuration_vision_text_dual_encoder.py,sha256=QVBDhO2nISzfOpwAE5oRoW973Ff0C29u9QSQufpH2vY,5014 +transformers/models/vision_text_dual_encoder/modeling_flax_vision_text_dual_encoder.py,sha256=I9IkA7fnzzuW9vyPigPXhbpSI_6l9T1JBh1IA5dSVLo,26359 +transformers/models/vision_text_dual_encoder/modeling_tf_vision_text_dual_encoder.py,sha256=tVIG_VSYiPtCkkHabB_ZZpNLUWcyWrPRnujeSUe9hXc,28685 +transformers/models/vision_text_dual_encoder/modeling_vision_text_dual_encoder.py,sha256=hH4cQG6_7GJMhWXLOYh_rVNg47d2cvM0Y_F-PdgsOhg,25242 +transformers/models/vision_text_dual_encoder/processing_vision_text_dual_encoder.py,sha256=7KMnZ_wyP62ORmCjUJdYIQKQmtI0-aGduzY_Jg8Ocjc,6976 +transformers/models/visual_bert/__init__.py,sha256=zZFHfkE7OUMZUwYvB7v4ZIBXVUW9Mboqoa1QdTQURWM,1003 +transformers/models/visual_bert/__pycache__/__init__.cpython-310.pyc,, +transformers/models/visual_bert/__pycache__/configuration_visual_bert.cpython-310.pyc,, +transformers/models/visual_bert/__pycache__/convert_visual_bert_original_pytorch_checkpoint_to_pytorch.cpython-310.pyc,, +transformers/models/visual_bert/__pycache__/modeling_visual_bert.cpython-310.pyc,, +transformers/models/visual_bert/configuration_visual_bert.py,sha256=4U17YnlSjbOpzsAPdGH_EfvBjv7jppbHWlmLBrchGM4,6767 +transformers/models/visual_bert/convert_visual_bert_original_pytorch_checkpoint_to_pytorch.py,sha256=jaKViPxrFh302Z0muKnQovpjLlenroT1RwXeXBGVh5Q,5157 +transformers/models/visual_bert/modeling_visual_bert.py,sha256=eGGQL3438HYVy5svXZGw09QyE1ElEYThswMh8mlwm_k,69195 +transformers/models/vit/__init__.py,sha256=uTQRjeWgJLHyXfc7yVOEyv7wnr42Jhy-8p9k5UUbxAM,1186 +transformers/models/vit/__pycache__/__init__.cpython-310.pyc,, +transformers/models/vit/__pycache__/configuration_vit.cpython-310.pyc,, +transformers/models/vit/__pycache__/convert_dino_to_pytorch.cpython-310.pyc,, +transformers/models/vit/__pycache__/convert_vit_timm_to_pytorch.cpython-310.pyc,, +transformers/models/vit/__pycache__/feature_extraction_vit.cpython-310.pyc,, +transformers/models/vit/__pycache__/image_processing_vit.cpython-310.pyc,, +transformers/models/vit/__pycache__/image_processing_vit_fast.cpython-310.pyc,, +transformers/models/vit/__pycache__/modeling_flax_vit.cpython-310.pyc,, +transformers/models/vit/__pycache__/modeling_tf_vit.cpython-310.pyc,, +transformers/models/vit/__pycache__/modeling_vit.cpython-310.pyc,, +transformers/models/vit/configuration_vit.py,sha256=OjdcPt4dfH1Jxqli5OxqAVg2qu-hpTTZWfvOYQwdMRc,5655 +transformers/models/vit/convert_dino_to_pytorch.py,sha256=_lGR0qDnyjBxr0BtxibxxpSwo71za6iKt-CJ2cKXpHI,8853 +transformers/models/vit/convert_vit_timm_to_pytorch.py,sha256=vZPsceKZQo8OOVaVEMUoy5HAM7KJFW4vG7DSqynVp_c,10889 +transformers/models/vit/feature_extraction_vit.py,sha256=sHO5n66s6PACZUTDxNTNS4p4zQmqLQxt7KByxQLF1ks,1201 +transformers/models/vit/image_processing_vit.py,sha256=O5h-venZuX2zCZpw99dGWgYZhdwhwCrL3tjl0l7NJw0,14356 +transformers/models/vit/image_processing_vit_fast.py,sha256=qmPc-swi3Dg25PVeR-_6eltV1CPCV_kOEPRIj7KevD8,13809 +transformers/models/vit/modeling_flax_vit.py,sha256=qJHCa7wmFrYscxoMp2UkBy0d5mprUtLjXZ6kqwrzh_o,25428 +transformers/models/vit/modeling_tf_vit.py,sha256=lUnqxfGpXgvXydp99uWwY8qeyXMqeR7RCWIrgGalzPQ,37408 +transformers/models/vit/modeling_vit.py,sha256=gsCAsbmlRo8e1KwaI1WNqLlAC9ujwhalSmO643cE2Pc,38082 +transformers/models/vit_mae/__init__.py,sha256=C8NcxWwzXlNMeMOA9DNHfDYvRF9biIuUduuwhoaTTD8,1034 +transformers/models/vit_mae/__pycache__/__init__.cpython-310.pyc,, +transformers/models/vit_mae/__pycache__/configuration_vit_mae.cpython-310.pyc,, +transformers/models/vit_mae/__pycache__/convert_vit_mae_to_pytorch.cpython-310.pyc,, +transformers/models/vit_mae/__pycache__/modeling_tf_vit_mae.cpython-310.pyc,, +transformers/models/vit_mae/__pycache__/modeling_vit_mae.cpython-310.pyc,, +transformers/models/vit_mae/configuration_vit_mae.py,sha256=3nnWDAbp6WLfOHLO3taJUNEuGRlk3oAa0qaLEEJgjHQ,6372 +transformers/models/vit_mae/convert_vit_mae_to_pytorch.py,sha256=Nj4Y5LS8H7xbyWNeLE9Vn0NFyXSQQYEcj1QQMzN1Hdg,7516 +transformers/models/vit_mae/modeling_tf_vit_mae.py,sha256=aUDcL9w1V-k4rCfJa-CM7Dd6j-GV9Zes2JezW_-F2do,58028 +transformers/models/vit_mae/modeling_vit_mae.py,sha256=KSPDdAbqEDYIKZJSTNvtuFM-Wjoa8he1rVMFfKadC1c,51286 +transformers/models/vit_msn/__init__.py,sha256=Y1g56VRSNr-PxS-g4Cp2IlRR5M9CiaFGlhAQXwszGHo,995 +transformers/models/vit_msn/__pycache__/__init__.cpython-310.pyc,, +transformers/models/vit_msn/__pycache__/configuration_vit_msn.cpython-310.pyc,, +transformers/models/vit_msn/__pycache__/convert_msn_to_pytorch.cpython-310.pyc,, +transformers/models/vit_msn/__pycache__/modeling_vit_msn.cpython-310.pyc,, +transformers/models/vit_msn/configuration_vit_msn.py,sha256=HeU0UloranISU9zLiPsK0CyooMacqogTNmwE4xp2N-o,4864 +transformers/models/vit_msn/convert_msn_to_pytorch.py,sha256=1xBjqvbviFkGxhi_xq2956R7qZpFEBdKPNOQYb-SoIA,9841 +transformers/models/vit_msn/modeling_vit_msn.py,sha256=7gdrXGWcp6D3ouGe2XoXpYcb7a9lm9fAidzEsS7XYlE,32419 +transformers/models/vitdet/__init__.py,sha256=13LNGZwvKK3tBrQWVs43rQbxbgqvxLfnM0uMqomHqhM,993 +transformers/models/vitdet/__pycache__/__init__.cpython-310.pyc,, +transformers/models/vitdet/__pycache__/configuration_vitdet.cpython-310.pyc,, +transformers/models/vitdet/__pycache__/modeling_vitdet.cpython-310.pyc,, +transformers/models/vitdet/configuration_vitdet.py,sha256=5p8B04eSluvXlpdMxyU6cRniCnMLTfYVyIqJW1iOwXc,7541 +transformers/models/vitdet/modeling_vitdet.py,sha256=EKwG9JwcT6LjWwoGd8CP3btbdKin7FxziEvnZKvLc4E,34895 +transformers/models/vitmatte/__init__.py,sha256=UGc3nIiSmHnzVsRoJ0BYxbZPXPRj1bB2pbzaMg1H6hM,1042 +transformers/models/vitmatte/__pycache__/__init__.cpython-310.pyc,, +transformers/models/vitmatte/__pycache__/configuration_vitmatte.cpython-310.pyc,, +transformers/models/vitmatte/__pycache__/convert_vitmatte_to_hf.cpython-310.pyc,, +transformers/models/vitmatte/__pycache__/image_processing_vitmatte.cpython-310.pyc,, +transformers/models/vitmatte/__pycache__/modeling_vitmatte.cpython-310.pyc,, +transformers/models/vitmatte/configuration_vitmatte.py,sha256=RGKk5EN1Z_v9MaBHxcHOdTKVfPYmIrI_Bq1eSVE_9mI,6269 +transformers/models/vitmatte/convert_vitmatte_to_hf.py,sha256=1xctm78nmCLelPMqGJepxSyq5saKgA4by5CTzyxRPvc,6404 +transformers/models/vitmatte/image_processing_vitmatte.py,sha256=cBkeqdnsvXG6c850RUBkYQNuvmRzTHKET2hZdECK1ig,13473 +transformers/models/vitmatte/modeling_vitmatte.py,sha256=EB7xOVC4jmJ3MqrxSWpikq3FV6nWfDnOMexOvlbwEm4,13053 +transformers/models/vitpose/__init__.py,sha256=VA7aRcVMgFJH46i6HurkXJS0Z38BotU3H3o3e2wgyXU,1039 +transformers/models/vitpose/__pycache__/__init__.cpython-310.pyc,, +transformers/models/vitpose/__pycache__/configuration_vitpose.cpython-310.pyc,, +transformers/models/vitpose/__pycache__/convert_vitpose_to_hf.cpython-310.pyc,, +transformers/models/vitpose/__pycache__/image_processing_vitpose.cpython-310.pyc,, +transformers/models/vitpose/__pycache__/modeling_vitpose.cpython-310.pyc,, +transformers/models/vitpose/configuration_vitpose.py,sha256=pHi8AHS_s6_lyy6OfORNASj_s_fITxu2ABam-OZ3ad4,5741 +transformers/models/vitpose/convert_vitpose_to_hf.py,sha256=naiNFxChM1b6LU5178TCjKBtNEu8lwDKzK5iAEPRPIg,12545 +transformers/models/vitpose/image_processing_vitpose.py,sha256=r2eevEQkWAuY9owSNSwDbY_gbzTPaKyrlhzqZ0-bMXU,29538 +transformers/models/vitpose/modeling_vitpose.py,sha256=-yEaO0kTqa4qy4pUBlAZ9avgtro46psDWXd1fEGG_48,14702 +transformers/models/vitpose_backbone/__init__.py,sha256=AJzKeeuuhEYwBUijmeuCSaio_RysaVoWuLQbQDQvHsw,1773 +transformers/models/vitpose_backbone/__pycache__/__init__.cpython-310.pyc,, +transformers/models/vitpose_backbone/__pycache__/configuration_vitpose_backbone.cpython-310.pyc,, +transformers/models/vitpose_backbone/__pycache__/modeling_vitpose_backbone.cpython-310.pyc,, +transformers/models/vitpose_backbone/configuration_vitpose_backbone.py,sha256=44eEhv4XBnS9zsUU7JwFlGv97x32j9iUr_jYp6vnAsY,6613 +transformers/models/vitpose_backbone/modeling_vitpose_backbone.py,sha256=6rShmlG3jKDXGLTfEpT_kc5h_pPbbVGJL9X-bzHpAfo,22717 +transformers/models/vits/__init__.py,sha256=7baZcqGvFlYQxAl721XtMptMZKkzvBOa2ttyOhqhUtk,1026 +transformers/models/vits/__pycache__/__init__.cpython-310.pyc,, +transformers/models/vits/__pycache__/configuration_vits.cpython-310.pyc,, +transformers/models/vits/__pycache__/convert_original_checkpoint.cpython-310.pyc,, +transformers/models/vits/__pycache__/modeling_vits.cpython-310.pyc,, +transformers/models/vits/__pycache__/tokenization_vits.cpython-310.pyc,, +transformers/models/vits/configuration_vits.py,sha256=mrh8QVlkYg0pj52N8fTivMjNxxyOSMnzBil9gGIt1rI,13884 +transformers/models/vits/convert_original_checkpoint.py,sha256=N6rRzBaJlMxRwT7u33kUyJKy-4fFTWTD6nu_RTTOGt0,18610 +transformers/models/vits/modeling_vits.py,sha256=vz_bVhhirAhRMZNWDsAJ-NOIki88nCQ7MPgKSSoLN2k,66560 +transformers/models/vits/tokenization_vits.py,sha256=UWa1DWO5hMEViHZ4DeUfoiLUaACzkkTKLK7cwA2lzb0,9388 +transformers/models/vivit/__init__.py,sha256=LT2FipIBdB69s9UY4viyuB5q2e0v3bCwtQMiOEOj2xg,1033 +transformers/models/vivit/__pycache__/__init__.cpython-310.pyc,, +transformers/models/vivit/__pycache__/configuration_vivit.cpython-310.pyc,, +transformers/models/vivit/__pycache__/convert_vivit_flax_to_pytorch.cpython-310.pyc,, +transformers/models/vivit/__pycache__/image_processing_vivit.cpython-310.pyc,, +transformers/models/vivit/__pycache__/modeling_vivit.cpython-310.pyc,, +transformers/models/vivit/configuration_vivit.py,sha256=9gBflSLQaYIF_hwzti8438fDtfHA8CCnzPdnG1aRf6Q,5142 +transformers/models/vivit/convert_vivit_flax_to_pytorch.py,sha256=7lqPLrfC2tORMS69USynZGlVZBcPv1Ljzt7UO68Zu9w,9112 +transformers/models/vivit/image_processing_vivit.py,sha256=P4AP0K4x_hzENg7-6Z270pt-sTWrBNIJbnXnzD4WnZo,19075 +transformers/models/vivit/modeling_vivit.py,sha256=KYWG5dqYb9AS3usjalaD0SeQA8HaWBaytozUcqMxxKM,35579 +transformers/models/wav2vec2/__init__.py,sha256=5nXyY4dA0h9iNUQZrGAUXtjOnU6KbVq2B1gRzEGEUNI,1206 +transformers/models/wav2vec2/__pycache__/__init__.cpython-310.pyc,, +transformers/models/wav2vec2/__pycache__/configuration_wav2vec2.cpython-310.pyc,, +transformers/models/wav2vec2/__pycache__/convert_wav2vec2_original_pytorch_checkpoint_to_pytorch.cpython-310.pyc,, +transformers/models/wav2vec2/__pycache__/convert_wav2vec2_original_s3prl_checkpoint_to_pytorch.cpython-310.pyc,, +transformers/models/wav2vec2/__pycache__/feature_extraction_wav2vec2.cpython-310.pyc,, +transformers/models/wav2vec2/__pycache__/modeling_flax_wav2vec2.cpython-310.pyc,, +transformers/models/wav2vec2/__pycache__/modeling_tf_wav2vec2.cpython-310.pyc,, +transformers/models/wav2vec2/__pycache__/modeling_wav2vec2.cpython-310.pyc,, +transformers/models/wav2vec2/__pycache__/processing_wav2vec2.cpython-310.pyc,, +transformers/models/wav2vec2/__pycache__/tokenization_wav2vec2.cpython-310.pyc,, +transformers/models/wav2vec2/configuration_wav2vec2.py,sha256=7EFLuyhxCXZ6eYWY9KPu2ABrsHMHS8vNVC-cf4m-tCw,20076 +transformers/models/wav2vec2/convert_wav2vec2_original_pytorch_checkpoint_to_pytorch.py,sha256=d3WhsQr53BCVEpENSK0N0-lWCktnul2pTyx9O2n1KV8,15166 +transformers/models/wav2vec2/convert_wav2vec2_original_s3prl_checkpoint_to_pytorch.py,sha256=xtC7g9fzjpjN-Zeuk2MjhrJsSbn6hs1jhT5evZvtNUM,4837 +transformers/models/wav2vec2/feature_extraction_wav2vec2.py,sha256=PkgOaVsHGC3Jjoyl1nIekBoQxkR9-hq9N-q7B8JDTy0,11600 +transformers/models/wav2vec2/modeling_flax_wav2vec2.py,sha256=9Lb6qXTcGvMcMKXs9gCKZPnRIHJpCJXzpk6VRfcELSo,57447 +transformers/models/wav2vec2/modeling_tf_wav2vec2.py,sha256=pJyCyiasTpCWL8nEfztUckvTytMy9VCK2QO9i0ku5lo,78734 +transformers/models/wav2vec2/modeling_wav2vec2.py,sha256=AJDolhXGzmbRR0k8ZW13ga5ewKb7YPCcBB7HCEWt66A,120912 +transformers/models/wav2vec2/processing_wav2vec2.py,sha256=d6G7AVeK3GyBXs-PEjVASfjHhSaiou4nPm5qsyviNZ0,7738 +transformers/models/wav2vec2/tokenization_wav2vec2.py,sha256=BPZBOhr9KVDBG3HeKE8CA_nUv1WULwZCmxfpp065PTA,38799 +transformers/models/wav2vec2_bert/__init__.py,sha256=DL010VL3ZV3lAugPH-BOTNSgIedotOEaoy8iHo0sC1Q,1051 +transformers/models/wav2vec2_bert/__pycache__/__init__.cpython-310.pyc,, +transformers/models/wav2vec2_bert/__pycache__/configuration_wav2vec2_bert.cpython-310.pyc,, +transformers/models/wav2vec2_bert/__pycache__/convert_wav2vec2_seamless_checkpoint.cpython-310.pyc,, +transformers/models/wav2vec2_bert/__pycache__/modeling_wav2vec2_bert.cpython-310.pyc,, +transformers/models/wav2vec2_bert/__pycache__/processing_wav2vec2_bert.cpython-310.pyc,, +transformers/models/wav2vec2_bert/configuration_wav2vec2_bert.py,sha256=sr2kV9gOzUscvQy1cjMYhnVAlLvDyMK6P-mXxWpGR74,18110 +transformers/models/wav2vec2_bert/convert_wav2vec2_seamless_checkpoint.py,sha256=JU4IQi_3dgf_j_foK5JvpxCbk0ZMFbE9wAaA-WRnQ9s,7419 +transformers/models/wav2vec2_bert/modeling_wav2vec2_bert.py,sha256=TSXI3jTLEL5qwbmdPDxJCQClMIIzmcOIIrCsxAG5Ajo,74875 +transformers/models/wav2vec2_bert/processing_wav2vec2_bert.py,sha256=MZ0demC6dVjmKLjJofkllM8yDv1yQVavFBNOGc4MgiA,7882 +transformers/models/wav2vec2_conformer/__init__.py,sha256=JBpapW8VF3yck4Bk29xKyUiQZqB_CXLSYtYxXGXAu2Q,1017 +transformers/models/wav2vec2_conformer/__pycache__/__init__.cpython-310.pyc,, +transformers/models/wav2vec2_conformer/__pycache__/configuration_wav2vec2_conformer.cpython-310.pyc,, +transformers/models/wav2vec2_conformer/__pycache__/convert_wav2vec2_conformer_original_pytorch_checkpoint_to_pytorch.cpython-310.pyc,, +transformers/models/wav2vec2_conformer/__pycache__/modeling_wav2vec2_conformer.cpython-310.pyc,, +transformers/models/wav2vec2_conformer/configuration_wav2vec2_conformer.py,sha256=cmbdw40JnLToNSMUttYGvE2r1EesHY5yZzSYJxxY4Wo,20914 +transformers/models/wav2vec2_conformer/convert_wav2vec2_conformer_original_pytorch_checkpoint_to_pytorch.py,sha256=gJvDTMypi4dwCM4RN4N7Zh8W1ZnIk2aYa_VtO4uarQo,13381 +transformers/models/wav2vec2_conformer/modeling_wav2vec2_conformer.py,sha256=YO79Lz8Itll7F8BuTu3h_1QbPROpzcf_qHs6TJ9bHVM,96436 +transformers/models/wav2vec2_phoneme/__init__.py,sha256=LV4FKcFYNt0GuJvfsUOwTYVFRVfuzUuclKRybFyN9lk,967 +transformers/models/wav2vec2_phoneme/__pycache__/__init__.cpython-310.pyc,, +transformers/models/wav2vec2_phoneme/__pycache__/tokenization_wav2vec2_phoneme.cpython-310.pyc,, +transformers/models/wav2vec2_phoneme/tokenization_wav2vec2_phoneme.py,sha256=HbJC1BoaOBTrPcMeRSudrumzKa8mxbFwapKux-FTax8,23205 +transformers/models/wav2vec2_with_lm/__init__.py,sha256=yZKHsma85j7AMLB8g8uNXL5D_E5Gc3Vqe-D-V2W15oY,965 +transformers/models/wav2vec2_with_lm/__pycache__/__init__.cpython-310.pyc,, +transformers/models/wav2vec2_with_lm/__pycache__/processing_wav2vec2_with_lm.cpython-310.pyc,, +transformers/models/wav2vec2_with_lm/processing_wav2vec2_with_lm.py,sha256=pFA2QDKdr9eir1_BIdeLMXwKd73g8hkScWoI_2oRCPU,30040 +transformers/models/wavlm/__init__.py,sha256=wYnYuOpw2e95lauqDbD7u3OC-Pez8yoRsrgExSh_WJQ,991 +transformers/models/wavlm/__pycache__/__init__.cpython-310.pyc,, +transformers/models/wavlm/__pycache__/configuration_wavlm.cpython-310.pyc,, +transformers/models/wavlm/__pycache__/convert_wavlm_original_pytorch_checkpoint_to_pytorch.cpython-310.pyc,, +transformers/models/wavlm/__pycache__/convert_wavlm_original_s3prl_checkpoint_to_pytorch.cpython-310.pyc,, +transformers/models/wavlm/__pycache__/modeling_wavlm.cpython-310.pyc,, +transformers/models/wavlm/configuration_wavlm.py,sha256=GKWOh0w-KsVQLy34Cq3ly-anGbBjM6QSiCwdsXdcXeQ,18564 +transformers/models/wavlm/convert_wavlm_original_pytorch_checkpoint_to_pytorch.py,sha256=dYZIX8q3-JQ-LkhggxjbMWPjixu_ZpONwXqQRu9ImjQ,8579 +transformers/models/wavlm/convert_wavlm_original_s3prl_checkpoint_to_pytorch.py,sha256=pMXFACce5UgAMK1uHdwI6ksoirFDr6G0OER6z1yFYFM,4813 +transformers/models/wavlm/modeling_wavlm.py,sha256=Gj6VxBbtmt-IY9oHufO1hlGl-AkMYQO18eMBLQH0Fng,79381 +transformers/models/whisper/__init__.py,sha256=qT70wGFDyOsAGuyaHe9if7kn8fxK2shCe6rovr3onw4,1244 +transformers/models/whisper/__pycache__/__init__.cpython-310.pyc,, +transformers/models/whisper/__pycache__/configuration_whisper.cpython-310.pyc,, +transformers/models/whisper/__pycache__/convert_openai_to_hf.cpython-310.pyc,, +transformers/models/whisper/__pycache__/english_normalizer.cpython-310.pyc,, +transformers/models/whisper/__pycache__/feature_extraction_whisper.cpython-310.pyc,, +transformers/models/whisper/__pycache__/generation_whisper.cpython-310.pyc,, +transformers/models/whisper/__pycache__/modeling_flax_whisper.cpython-310.pyc,, +transformers/models/whisper/__pycache__/modeling_tf_whisper.cpython-310.pyc,, +transformers/models/whisper/__pycache__/modeling_whisper.cpython-310.pyc,, +transformers/models/whisper/__pycache__/processing_whisper.cpython-310.pyc,, +transformers/models/whisper/__pycache__/tokenization_whisper.cpython-310.pyc,, +transformers/models/whisper/__pycache__/tokenization_whisper_fast.cpython-310.pyc,, +transformers/models/whisper/configuration_whisper.py,sha256=Ljqle73Il6yvso4MfpxHBsPQwbkhEIxPKW6ZtSxmAzA,17042 +transformers/models/whisper/convert_openai_to_hf.py,sha256=e1E6mFHUwF4o3BvUhwNya-P5yGRGHcPCwdni4I0OdQo,14961 +transformers/models/whisper/english_normalizer.py,sha256=MTJ16OhstprR2X8owfEJmONqkoSHHyzztENejmEhSBM,22822 +transformers/models/whisper/feature_extraction_whisper.py,sha256=ICOyfQUO83ne8l8pP2G-ezftSf5J6izN9Xxy4V37VZo,14935 +transformers/models/whisper/generation_whisper.py,sha256=YjTm7BbR8Fko58uBNahustNsSdmX87ObLg3ZPuQPcfM,102445 +transformers/models/whisper/modeling_flax_whisper.py,sha256=WURGm5l04PO1mfxsoLcIbHbrzNgSYZOXT89NFFgrYWI,73768 +transformers/models/whisper/modeling_tf_whisper.py,sha256=cXOrTKxsKUpU8nrXxhclN-wwX8f5EyiAmorTygKwiq8,84861 +transformers/models/whisper/modeling_whisper.py,sha256=N9vYhgDawL4Ekxh7qHVbrM70Dm13ra0Y0cqBLEApEwI,108303 +transformers/models/whisper/processing_whisper.py,sha256=OqcvUtgrBq1hJHeIZDrgh5d6epo4U4W6yI-sbjomZiw,3923 +transformers/models/whisper/tokenization_whisper.py,sha256=zz5kIhhyeRfvk1BtwPGAKtcdqw9suJRZFxEojuAV09k,57326 +transformers/models/whisper/tokenization_whisper_fast.py,sha256=HhkV7bEBDgTCXGp1lJ_VGgREMshUG232wqDp-rsQkQg,30243 +transformers/models/x_clip/__init__.py,sha256=ufjh6w7SNuNAUjAHp_MK3yRcrHm22-SfhZ0ZfbiXhGw,1030 +transformers/models/x_clip/__pycache__/__init__.cpython-310.pyc,, +transformers/models/x_clip/__pycache__/configuration_x_clip.cpython-310.pyc,, +transformers/models/x_clip/__pycache__/convert_x_clip_original_pytorch_to_hf.cpython-310.pyc,, +transformers/models/x_clip/__pycache__/modeling_x_clip.cpython-310.pyc,, +transformers/models/x_clip/__pycache__/processing_x_clip.cpython-310.pyc,, +transformers/models/x_clip/configuration_x_clip.py,sha256=6g8IhFhbceFhELrUURHL2kYIGs1u6aPcnZIjQu7CvKk,18729 +transformers/models/x_clip/convert_x_clip_original_pytorch_to_hf.py,sha256=WzXe8IKqSz4Bi78EIvRA6C3QiLL4c-SpARggHjIWtt4,18066 +transformers/models/x_clip/modeling_x_clip.py,sha256=9OfV1xIUxEJwpjA0Nt6vKwj6F8eTjljAVzfUezgFp9A,73360 +transformers/models/x_clip/processing_x_clip.py,sha256=6V8_tO7mYPKO4dg9cy1rRvsEFVK01vegmfGWVbKkTp0,6928 +transformers/models/xglm/__init__.py,sha256=ZU7tQBmBXzr8wh9MJNDZ5uIrsCRQP8tuNrpGDd2W3OI,1142 +transformers/models/xglm/__pycache__/__init__.cpython-310.pyc,, +transformers/models/xglm/__pycache__/configuration_xglm.cpython-310.pyc,, +transformers/models/xglm/__pycache__/convert_xglm_original_ckpt_to_trfms.cpython-310.pyc,, +transformers/models/xglm/__pycache__/modeling_flax_xglm.cpython-310.pyc,, +transformers/models/xglm/__pycache__/modeling_tf_xglm.cpython-310.pyc,, +transformers/models/xglm/__pycache__/modeling_xglm.cpython-310.pyc,, +transformers/models/xglm/__pycache__/tokenization_xglm.cpython-310.pyc,, +transformers/models/xglm/__pycache__/tokenization_xglm_fast.cpython-310.pyc,, +transformers/models/xglm/configuration_xglm.py,sha256=WvBIkxXt1Kv0-kpNDPPirmP6bkFn6juD5DKiruGPyKc,5873 +transformers/models/xglm/convert_xglm_original_ckpt_to_trfms.py,sha256=9fjXP40nMFbiI9H0VV66Buqk9JQrPhAFERCOBYHl_7g,2325 +transformers/models/xglm/modeling_flax_xglm.py,sha256=s52aHZ8L5WYF07UfPlXgTiXC2SXLp9-q990oTua52z8,33195 +transformers/models/xglm/modeling_tf_xglm.py,sha256=jBjFE1g5qCL8cHci4Vp31_XIIetBIyUZC6-fnyDs2lM,45349 +transformers/models/xglm/modeling_xglm.py,sha256=bqf0r0zhZioe9lkKikZ1eEQlVqabWCuGXowollPBEr8,37705 +transformers/models/xglm/tokenization_xglm.py,sha256=INwt_y3XTT9qbhCV3Q14hlX54D9WaEDN75_5DIGcr9A,12513 +transformers/models/xglm/tokenization_xglm_fast.py,sha256=bl4P5U6fic-V3r7d4jZ8btJT0CCcI5v2rprw129-aIM,7622 +transformers/models/xlm/__init__.py,sha256=QevE83gMJ5h41H7EKxRAUN-kmE0zgOsyGj6QzWcpjmk,1058 +transformers/models/xlm/__pycache__/__init__.cpython-310.pyc,, +transformers/models/xlm/__pycache__/configuration_xlm.cpython-310.pyc,, +transformers/models/xlm/__pycache__/convert_xlm_original_pytorch_checkpoint_to_pytorch.cpython-310.pyc,, +transformers/models/xlm/__pycache__/modeling_tf_xlm.cpython-310.pyc,, +transformers/models/xlm/__pycache__/modeling_xlm.cpython-310.pyc,, +transformers/models/xlm/__pycache__/tokenization_xlm.cpython-310.pyc,, +transformers/models/xlm/configuration_xlm.py,sha256=M1S7atRiLWIsRmmSFnm-s179-_AWtcDm3B_LeqTnNok,11053 +transformers/models/xlm/convert_xlm_original_pytorch_checkpoint_to_pytorch.py,sha256=WUF2ZQtwzejYGtF54ToSahenIRvAFxgtUZ5ckzQl9Hc,2933 +transformers/models/xlm/modeling_tf_xlm.py,sha256=UH-lSaKroJ7dmVi7HDVL9wqe7Cq9bkOn6snQ4hR4ocU,56664 +transformers/models/xlm/modeling_xlm.py,sha256=O_QfhZvrMMEzYBjtgICbhR6JPtA3QXpaRmUYiNPqzCc,55059 +transformers/models/xlm/tokenization_xlm.py,sha256=zK3o3R5z6TW-YWLtDNeMQXINYfOdplI9Q0Vwo5mJCS4,24476 +transformers/models/xlm_roberta/__init__.py,sha256=dhjej7PBi8UrfXRkTxh9CWXnw8wuLZHPT9FYFfCkIHg,1184 +transformers/models/xlm_roberta/__pycache__/__init__.cpython-310.pyc,, +transformers/models/xlm_roberta/__pycache__/configuration_xlm_roberta.cpython-310.pyc,, +transformers/models/xlm_roberta/__pycache__/modeling_flax_xlm_roberta.cpython-310.pyc,, +transformers/models/xlm_roberta/__pycache__/modeling_tf_xlm_roberta.cpython-310.pyc,, +transformers/models/xlm_roberta/__pycache__/modeling_xlm_roberta.cpython-310.pyc,, +transformers/models/xlm_roberta/__pycache__/tokenization_xlm_roberta.cpython-310.pyc,, +transformers/models/xlm_roberta/__pycache__/tokenization_xlm_roberta_fast.cpython-310.pyc,, +transformers/models/xlm_roberta/configuration_xlm_roberta.py,sha256=OMf77FAVM4tvv7RAOP9aOFi7IyVOqGvwhtchDQ0Mx_A,7571 +transformers/models/xlm_roberta/modeling_flax_xlm_roberta.py,sha256=0i0Ns1rlKIA7-118Nw-7hh6HeYVTn3Qj0mE82K7725U,58763 +transformers/models/xlm_roberta/modeling_tf_xlm_roberta.py,sha256=izOMRENtdeo9HyJIwnDcxmGN83ZhD0a3o0VC2mBm-Mk,82124 +transformers/models/xlm_roberta/modeling_xlm_roberta.py,sha256=61fZ1oW6ZLv-Ena7E6Js7Ko9V42SQd5fFUXWA3ktkms,79700 +transformers/models/xlm_roberta/tokenization_xlm_roberta.py,sha256=QSxB42IVqgU1fpaYc4f_kvRPfElUreCByCqr0Qo22MA,12741 +transformers/models/xlm_roberta/tokenization_xlm_roberta_fast.py,sha256=foyAhWec6tKr3fuvLBukYrE_K891Z-rhlnK87m7yQRg,7960 +transformers/models/xlm_roberta_xl/__init__.py,sha256=V0fXTKk2hQmf5dKogCJ0HSiRBxVX-rs7c414ZoZIh28,1009 +transformers/models/xlm_roberta_xl/__pycache__/__init__.cpython-310.pyc,, +transformers/models/xlm_roberta_xl/__pycache__/configuration_xlm_roberta_xl.cpython-310.pyc,, +transformers/models/xlm_roberta_xl/__pycache__/convert_xlm_roberta_xl_original_pytorch_checkpoint_to_pytorch.cpython-310.pyc,, +transformers/models/xlm_roberta_xl/__pycache__/modeling_xlm_roberta_xl.cpython-310.pyc,, +transformers/models/xlm_roberta_xl/configuration_xlm_roberta_xl.py,sha256=q_TJlvjrKd8W1OEDniUEhtFolirVRuseCReHfgM84jA,7323 +transformers/models/xlm_roberta_xl/convert_xlm_roberta_xl_original_pytorch_checkpoint_to_pytorch.py,sha256=zVa6azx9rd33D3JkH2uqJ6W20TosJyWi9eLm3LNtc5U,8228 +transformers/models/xlm_roberta_xl/modeling_xlm_roberta_xl.py,sha256=d7QC_4rcogBcu_6d3IJcEKMUFU0vt89YrxblE-EZVMw,77904 +transformers/models/xlnet/__init__.py,sha256=t-UvrFyorGF7VMuATzjrB_cUqKsM-8O9KqxiWjtJqhs,1109 +transformers/models/xlnet/__pycache__/__init__.cpython-310.pyc,, +transformers/models/xlnet/__pycache__/configuration_xlnet.cpython-310.pyc,, +transformers/models/xlnet/__pycache__/convert_xlnet_original_tf_checkpoint_to_pytorch.cpython-310.pyc,, +transformers/models/xlnet/__pycache__/modeling_tf_xlnet.cpython-310.pyc,, +transformers/models/xlnet/__pycache__/modeling_xlnet.cpython-310.pyc,, +transformers/models/xlnet/__pycache__/tokenization_xlnet.cpython-310.pyc,, +transformers/models/xlnet/__pycache__/tokenization_xlnet_fast.cpython-310.pyc,, +transformers/models/xlnet/configuration_xlnet.py,sha256=U_WpCoqALv86cbvTXgTVnJwOfl3nzcGTgZJd_9SDhvY,10953 +transformers/models/xlnet/convert_xlnet_original_tf_checkpoint_to_pytorch.py,sha256=vabPDgM_imZsTVqlUq_YrKElpWhEpiNf8xkzShmgH6o,3687 +transformers/models/xlnet/modeling_tf_xlnet.py,sha256=Ftt4x4FXax2ElbkZcU0jxIuc7c5U96V9ouiQgpL4EDY,77869 +transformers/models/xlnet/modeling_xlnet.py,sha256=U30V5DpX0s42WxzmFLH6YA0V_MC1RsBZMluA_MZDyI8,93259 +transformers/models/xlnet/tokenization_xlnet.py,sha256=iANzDlYVzbNkNNeT_j94W-ELSQ2i8cR2aBvXyiOK1Rw,15732 +transformers/models/xlnet/tokenization_xlnet_fast.py,sha256=0DTH5b_d6hkErbGQHu3avDkrC4CnZ-yEYNrYrinbo5E,9399 +transformers/models/xmod/__init__.py,sha256=WLxIbzC8oCEkMrerWHTy7GLopz0mqocSaacdcyb_BhQ,989 +transformers/models/xmod/__pycache__/__init__.cpython-310.pyc,, +transformers/models/xmod/__pycache__/configuration_xmod.cpython-310.pyc,, +transformers/models/xmod/__pycache__/convert_xmod_original_pytorch_checkpoint_to_pytorch.cpython-310.pyc,, +transformers/models/xmod/__pycache__/modeling_xmod.cpython-310.pyc,, +transformers/models/xmod/configuration_xmod.py,sha256=68vgi2zopSNso3NQRi68srEAel5XdV5q8DLsawGg8AA,9155 +transformers/models/xmod/convert_xmod_original_pytorch_checkpoint_to_pytorch.py,sha256=yFSAtXjxbAy6uXBg2XinRbk3VSEBOsWj1ugBhVNrGjQ,9859 +transformers/models/xmod/modeling_xmod.py,sha256=F00Gu-6lKalVChpqR7KqljFAMvXyAdWMd60kun67ORM,75375 +transformers/models/yolos/__init__.py,sha256=HLs9o0p_jTtnkcC0gYaa3ns1nw2uv3FC07RMOfNqptk,1077 +transformers/models/yolos/__pycache__/__init__.cpython-310.pyc,, +transformers/models/yolos/__pycache__/configuration_yolos.cpython-310.pyc,, +transformers/models/yolos/__pycache__/convert_yolos_to_pytorch.cpython-310.pyc,, +transformers/models/yolos/__pycache__/feature_extraction_yolos.cpython-310.pyc,, +transformers/models/yolos/__pycache__/image_processing_yolos.cpython-310.pyc,, +transformers/models/yolos/__pycache__/modeling_yolos.cpython-310.pyc,, +transformers/models/yolos/configuration_yolos.py,sha256=IwkN8id70ZC9d5Pv6SHqvh0ycONeGBIle7yuRYdUKt4,7618 +transformers/models/yolos/convert_yolos_to_pytorch.py,sha256=dDZ8lJdoh8FEi6iujUPGhnr-ejbcTGQIIRUB2c7WA8c,11258 +transformers/models/yolos/feature_extraction_yolos.py,sha256=DJS0_Q0qjxNUGEWhQAWDX_iHyQbBcz7U00gmAf3hyIs,1519 +transformers/models/yolos/image_processing_yolos.py,sha256=iHWbHcKKfc5IN6wmiUntZCQSwC_PB6LQe1qBr7KyAHc,67927 +transformers/models/yolos/modeling_yolos.py,sha256=ehfvFwtSpVX11jkPVDAacfsq6YkiboL5cJ_-6N7vhp4,39204 +transformers/models/yoso/__init__.py,sha256=sCXsXYZuOQLFkZMexRb8qY7EJCftR54G_eO7qIUvdss,989 +transformers/models/yoso/__pycache__/__init__.cpython-310.pyc,, +transformers/models/yoso/__pycache__/configuration_yoso.cpython-310.pyc,, +transformers/models/yoso/__pycache__/convert_yoso_pytorch_to_pytorch.cpython-310.pyc,, +transformers/models/yoso/__pycache__/modeling_yoso.cpython-310.pyc,, +transformers/models/yoso/configuration_yoso.py,sha256=6PQqt0OjHQBTNnnhDE761sdwlq9_tqG48UJ-pBV3rBM,6715 +transformers/models/yoso/convert_yoso_pytorch_to_pytorch.py,sha256=VjPOSLINfkiaHx8M3dTNMdC8hXh3M1yyhIQ9t4Vzqk0,4115 +transformers/models/yoso/modeling_yoso.py,sha256=vZTHSxKV5-xLEnI78UUoElfLoLvuTeMcENt4xHDArwA,54977 +transformers/models/zamba/__init__.py,sha256=iqZnf8BQ49TLcB4mYwIfuJeF4aGvYhOBRiGI6_74ZFk,991 +transformers/models/zamba/__pycache__/__init__.cpython-310.pyc,, +transformers/models/zamba/__pycache__/configuration_zamba.cpython-310.pyc,, +transformers/models/zamba/__pycache__/modeling_zamba.cpython-310.pyc,, +transformers/models/zamba/configuration_zamba.py,sha256=CUwysVd0zLr7HEkrcGMfyB6Utjt_p_DORQUr1wHT7NI,11284 +transformers/models/zamba/modeling_zamba.py,sha256=Vo1_Umtt-V_fbdidu4HIiE5XFiUZPWU_N9qtHrcJJVI,71163 +transformers/models/zoedepth/__init__.py,sha256=rNum7_sa_6TE8LkLh0LEarnQnByBVHAWj8Bgnd-28kQ,1042 +transformers/models/zoedepth/__pycache__/__init__.cpython-310.pyc,, +transformers/models/zoedepth/__pycache__/configuration_zoedepth.cpython-310.pyc,, +transformers/models/zoedepth/__pycache__/convert_zoedepth_to_hf.cpython-310.pyc,, +transformers/models/zoedepth/__pycache__/image_processing_zoedepth.cpython-310.pyc,, +transformers/models/zoedepth/__pycache__/modeling_zoedepth.cpython-310.pyc,, +transformers/models/zoedepth/configuration_zoedepth.py,sha256=UlEOSvaf754FvsbP2ZHXm_BAFe9JyBRaiolPKqK3GcM,12757 +transformers/models/zoedepth/convert_zoedepth_to_hf.py,sha256=STtF2DGuZvFgSGpeDj8uNQRadiLQ0h17HvoKDD5TdFQ,18075 +transformers/models/zoedepth/image_processing_zoedepth.py,sha256=w02YxQfV5lTrfi889-5E48KweLprqL4PyeoosObGLV8,28076 +transformers/models/zoedepth/modeling_zoedepth.py,sha256=bwx8YgmIFoFxp1h3ZpjNzixsx5Lsr-jUlWdugcFaNrk,57258 +transformers/onnx/__init__.py,sha256=wALLY4TPOK2iPrFcfZf_WiEmTRAU6dAWHElxGdexr58,1548 +transformers/onnx/__main__.py,sha256=JZ9ZmeRsnDitwTMWb-dFT8W9AEmMoMKLQ3SvbyCkY0w,9497 +transformers/onnx/__pycache__/__init__.cpython-310.pyc,, +transformers/onnx/__pycache__/__main__.cpython-310.pyc,, +transformers/onnx/__pycache__/config.cpython-310.pyc,, +transformers/onnx/__pycache__/convert.cpython-310.pyc,, +transformers/onnx/__pycache__/features.cpython-310.pyc,, +transformers/onnx/__pycache__/utils.cpython-310.pyc,, +transformers/onnx/config.py,sha256=zPDgC_HSLmMeqPkcLv_Y8EfbfLLEDLqPrvrfQCRyhl8,32556 +transformers/onnx/convert.py,sha256=ZSh9jQE6B6cCxhlSbKLHxNmj48HkXXdl-HF7iGtZy5k,19369 +transformers/onnx/features.py,sha256=GSuwZj760THxAkDmJYROt43La0GaY-bA19j2bE-XYVI,28264 +transformers/onnx/utils.py,sha256=39Uw_GkFBsTb6ZvMIHRTnI289aQDhc6hwfEapaBGE-o,3625 +transformers/optimization.py,sha256=pOi-3ADMoveZKUqQfi3yRbzyplmi_x5z-gaUNhGXbBk,39133 +transformers/optimization_tf.py,sha256=UPtbbeR__ZoPG7eBD5XMBBiYfjAZR8a5L2zWJsCLL_8,16854 +transformers/pipelines/__init__.py,sha256=O72BzwH-2MgV0feRqSiRber8vsBTsUccGIEd3seEy3I,55211 +transformers/pipelines/__pycache__/__init__.cpython-310.pyc,, +transformers/pipelines/__pycache__/audio_classification.cpython-310.pyc,, +transformers/pipelines/__pycache__/audio_utils.cpython-310.pyc,, +transformers/pipelines/__pycache__/automatic_speech_recognition.cpython-310.pyc,, +transformers/pipelines/__pycache__/base.cpython-310.pyc,, +transformers/pipelines/__pycache__/depth_estimation.cpython-310.pyc,, +transformers/pipelines/__pycache__/document_question_answering.cpython-310.pyc,, +transformers/pipelines/__pycache__/feature_extraction.cpython-310.pyc,, +transformers/pipelines/__pycache__/fill_mask.cpython-310.pyc,, +transformers/pipelines/__pycache__/image_classification.cpython-310.pyc,, +transformers/pipelines/__pycache__/image_feature_extraction.cpython-310.pyc,, +transformers/pipelines/__pycache__/image_segmentation.cpython-310.pyc,, +transformers/pipelines/__pycache__/image_text_to_text.cpython-310.pyc,, +transformers/pipelines/__pycache__/image_to_image.cpython-310.pyc,, +transformers/pipelines/__pycache__/image_to_text.cpython-310.pyc,, +transformers/pipelines/__pycache__/mask_generation.cpython-310.pyc,, +transformers/pipelines/__pycache__/object_detection.cpython-310.pyc,, +transformers/pipelines/__pycache__/pt_utils.cpython-310.pyc,, +transformers/pipelines/__pycache__/question_answering.cpython-310.pyc,, +transformers/pipelines/__pycache__/table_question_answering.cpython-310.pyc,, +transformers/pipelines/__pycache__/text2text_generation.cpython-310.pyc,, +transformers/pipelines/__pycache__/text_classification.cpython-310.pyc,, +transformers/pipelines/__pycache__/text_generation.cpython-310.pyc,, +transformers/pipelines/__pycache__/text_to_audio.cpython-310.pyc,, +transformers/pipelines/__pycache__/token_classification.cpython-310.pyc,, +transformers/pipelines/__pycache__/video_classification.cpython-310.pyc,, +transformers/pipelines/__pycache__/visual_question_answering.cpython-310.pyc,, +transformers/pipelines/__pycache__/zero_shot_audio_classification.cpython-310.pyc,, +transformers/pipelines/__pycache__/zero_shot_classification.cpython-310.pyc,, +transformers/pipelines/__pycache__/zero_shot_image_classification.cpython-310.pyc,, +transformers/pipelines/__pycache__/zero_shot_object_detection.cpython-310.pyc,, +transformers/pipelines/audio_classification.py,sha256=ler2iJmBnYihqAYD-UuiUF0ytLGFwfipkI1zkBhccUA,10021 +transformers/pipelines/audio_utils.py,sha256=5kg9oeQf5jhOKgMvcbiLMBqa5mzHwfXCb_DVd5gjpBA,12276 +transformers/pipelines/automatic_speech_recognition.py,sha256=PG7D35He1CidDAsR4csDb9a7HJMb_mYuiVPzHrOB1YU,39330 +transformers/pipelines/base.py,sha256=eg5BefjRYByvIEzdNDTRP4fg-pOXtwgywPZXB7aw4nQ,63053 +transformers/pipelines/depth_estimation.py,sha256=b-S2up5TFQO5OaiB--ZqE6UwXHfJKtjrS7P1-DeW_Ac,5748 +transformers/pipelines/document_question_answering.py,sha256=fl8holx_UUDc5aoXau9c3IXX-w5q1DKkat4tRB1XGv0,24286 +transformers/pipelines/feature_extraction.py,sha256=Ar_hPljY1Fa_xAsRYX4cCCss1vf-iC5uuKYHp3rejd0,3374 +transformers/pipelines/fill_mask.py,sha256=DfU3fNqd-u1b8gdV1BcP46dToDFLpjkRrIpX7PizgPU,11641 +transformers/pipelines/image_classification.py,sha256=rOHtVdjajnTtVrBogBZpeEAuZ5QdDTxj4gDCDqhprnY,9781 +transformers/pipelines/image_feature_extraction.py,sha256=KIn3yldAUTxFmal-SNF7CPkcx1h3q_KMvyjUsHzsx9o,4732 +transformers/pipelines/image_segmentation.py,sha256=j68l-F6zFJqjaVXurVH6JnUvIIRzBVMBkG1JwokBeFE,9614 +transformers/pipelines/image_text_to_text.py,sha256=7viIvEtjQuvtUPfaUoTFCQyIJSMuHwlsZzO6haxcmsk,20484 +transformers/pipelines/image_to_image.py,sha256=VHB7ElQIYrBGPQsD5QtTZ0p4piXJ_r13KU4fkWgbLms,5022 +transformers/pipelines/image_to_text.py,sha256=i0M6APAlw2CAZoKT0_XQt-xAqZGrCHH0X68bOYJdPis,9505 +transformers/pipelines/mask_generation.py,sha256=HRmUx-H9pl85go26t70Th7lsPQu_nDdnHgi62FkKL-s,13204 +transformers/pipelines/object_detection.py,sha256=AYXkT4ItG1krxzRUNcXKLEkGqElVetA8EED3qkUGA0I,8219 +transformers/pipelines/pt_utils.py,sha256=D-cFFKAaVtn3jaZGPKFr-U3JF3_YR5H3kO4QD1jrqQY,12762 +transformers/pipelines/question_answering.py,sha256=3swwedMRxAUuOd-p-bl9QWu5R6h9uYN042brq-xYAAk,30355 +transformers/pipelines/table_question_answering.py,sha256=j6jLgTZiT075nnl2G5hese2uH1Gc8Q3_yJVh81XoP78,20358 +transformers/pipelines/text2text_generation.py,sha256=Fhu5MnZ6dj51OwuhPXzj1mU1c6MmXDv-tQY7Ybfuy34,17706 +transformers/pipelines/text_classification.py,sha256=x2aqpPTMHnlxl7UaCiF7svNYFvGYQaLXFck0wjuREkM,11044 +transformers/pipelines/text_generation.py,sha256=vZyAnyaYK4RLNsHp_81xwX2QO2bSZwUiXQhyGTzsero,22918 +transformers/pipelines/text_to_audio.py,sha256=Wtwt4TGuTHfTH8MY7am9lgRLmJ874GZl137F4RSNa3M,8804 +transformers/pipelines/token_classification.py,sha256=h0GzOec0wAEofnGukueLcP8QHB14r3b4ne9UaFVpcSc,26943 +transformers/pipelines/video_classification.py,sha256=H7Utj2UBVjRIblmJlF86koR8LYj9NMgcH24L4Qrvspc,7833 +transformers/pipelines/visual_question_answering.py,sha256=QfDj2u3s4jxkfwdpFWZiuZTZZ16GRAsjLk7n04vPQ4w,9192 +transformers/pipelines/zero_shot_audio_classification.py,sha256=yxjBnd1f99GoJYOE51n8JhUiMRmkotMiFn-2uLnSsPo,6869 +transformers/pipelines/zero_shot_classification.py,sha256=fBqB7-aNsKCk0nKMQPuiGFVb6HWxzAp8K-geg89-F9Q,12500 +transformers/pipelines/zero_shot_image_classification.py,sha256=5poR-cQ-PgpLpN4TlHJpBFg6LsYgh9GZn0adKm0NG5Y,7818 +transformers/pipelines/zero_shot_object_detection.py,sha256=A46-lyloP6XLyXCzuoYMgP0TDhchIZYr8IjiKNbJhos,10251 +transformers/processing_utils.py,sha256=bBzHveEpVdBu70DpFM8pRr23KAKqC8j5MHhQXIg6TBw,60580 +transformers/pytorch_utils.py,sha256=2Kl6kR8x5bhILHDm3O9qKh4fx-E5Cpzzd6KXphYqH7o,14122 +transformers/quantizers/__init__.py,sha256=hCprQnoI20-O1FSMSRgD-P9_NKEzN7kEfY66_BrQxz0,699 +transformers/quantizers/__pycache__/__init__.cpython-310.pyc,, +transformers/quantizers/__pycache__/auto.cpython-310.pyc,, +transformers/quantizers/__pycache__/base.cpython-310.pyc,, +transformers/quantizers/__pycache__/quantizer_aqlm.cpython-310.pyc,, +transformers/quantizers/__pycache__/quantizer_awq.cpython-310.pyc,, +transformers/quantizers/__pycache__/quantizer_bitnet.cpython-310.pyc,, +transformers/quantizers/__pycache__/quantizer_bnb_4bit.cpython-310.pyc,, +transformers/quantizers/__pycache__/quantizer_bnb_8bit.cpython-310.pyc,, +transformers/quantizers/__pycache__/quantizer_compressed_tensors.cpython-310.pyc,, +transformers/quantizers/__pycache__/quantizer_eetq.cpython-310.pyc,, +transformers/quantizers/__pycache__/quantizer_fbgemm_fp8.cpython-310.pyc,, +transformers/quantizers/__pycache__/quantizer_gptq.cpython-310.pyc,, +transformers/quantizers/__pycache__/quantizer_higgs.cpython-310.pyc,, +transformers/quantizers/__pycache__/quantizer_hqq.cpython-310.pyc,, +transformers/quantizers/__pycache__/quantizer_quanto.cpython-310.pyc,, +transformers/quantizers/__pycache__/quantizer_torchao.cpython-310.pyc,, +transformers/quantizers/__pycache__/quantizer_vptq.cpython-310.pyc,, +transformers/quantizers/__pycache__/quantizers_utils.cpython-310.pyc,, +transformers/quantizers/auto.py,sha256=FSeHATC5UUDwBtkskSTXcLunyEG715D1oYKe0ijzWq0,8015 +transformers/quantizers/base.py,sha256=nzciGVtvMbzp6otpGhQ8opO4fOHV3OXyjtt67pLXO2s,10502 +transformers/quantizers/quantizer_aqlm.py,sha256=khCb8mQ2T0tYPwCJqA7wkO6_59R2ChtHHnwQJzhfABc,3692 +transformers/quantizers/quantizer_awq.py,sha256=T-6LZIw2dmP03iQx_fYLmN72SYiZcds0pK68tlywqQs,6902 +transformers/quantizers/quantizer_bitnet.py,sha256=CemDWbQrsFSlay_VK0fumF5EKPo6QsCEzJihkbC8PZg,4302 +transformers/quantizers/quantizer_bnb_4bit.py,sha256=Fv9CvyaPKSDd_U7lMpYuUU3Jp-hUbbOTKrnZOi-Vpsc,16331 +transformers/quantizers/quantizer_bnb_8bit.py,sha256=KE4Vj-l1gFzmL1dIiOpCvgEFPtg6v3Bsa1JwoSpbndc,13894 +transformers/quantizers/quantizer_compressed_tensors.py,sha256=hBvOaMv78pwnnaRj3yUhP7MbsWs4580ZcMHRgBV1aMk,5367 +transformers/quantizers/quantizer_eetq.py,sha256=9Vw168CJV2GTyzA_hIuwTtPEyFArhrjA6ZQQqMBTbzo,7326 +transformers/quantizers/quantizer_fbgemm_fp8.py,sha256=i2Ad9lj4jZtwjd3nDRgLXy5I35qYYgItuZ8AwNSQlSo,8142 +transformers/quantizers/quantizer_gptq.py,sha256=YqNPg0OXznMEpmbK9tY33wk6ktie4JJ3Q4LrT2bETe0,5431 +transformers/quantizers/quantizer_higgs.py,sha256=xN-7WmQ2LWbAwEdbrK8qULJ86mlOUQiFc9AaArkNCY4,9577 +transformers/quantizers/quantizer_hqq.py,sha256=-9GP6MPqtRDdLLundZmpgkppjWZ9WFWFAO13xCttDHw,11438 +transformers/quantizers/quantizer_quanto.py,sha256=6kba9vt6-q7zUh68HaMzLoGKEX0k0JvNhjjVe5OJXDY,8107 +transformers/quantizers/quantizer_torchao.py,sha256=KkJw-Q3Zj87SlThx37nfbM1_VeYslfvalctYt13eKB0,10120 +transformers/quantizers/quantizer_vptq.py,sha256=qGiJrDQHuYTb5AQw7TuapZwwhsV--NAtBfLC1ntBPSY,3720 +transformers/quantizers/quantizers_utils.py,sha256=6bgmf8mLxow6gXonTFX7PLfqFsf6plUj7DOeXnXhwMM,1066 +transformers/safetensors_conversion.py,sha256=LjnFRVfXRsOhIHdyiw6pevDJcMdsKwc3kvQ6csPs9wA,4074 +transformers/sagemaker/__init__.py,sha256=fKtKAHamz_CLL9jPGCa2E-1n8RmuS-58qGtzZuKc3qg,730 +transformers/sagemaker/__pycache__/__init__.cpython-310.pyc,, +transformers/sagemaker/__pycache__/trainer_sm.cpython-310.pyc,, +transformers/sagemaker/__pycache__/training_args_sm.cpython-310.pyc,, +transformers/sagemaker/trainer_sm.py,sha256=7GsKLtjdMfKp98OwHD7RcBsl745OOwHAaBswkfLkfsE,1044 +transformers/sagemaker/training_args_sm.py,sha256=4ZnQhITfMwT0y2Y2MvkI11PEB_yfTX5Z7WrPKt0VXD8,5389 +transformers/testing_utils.py,sha256=jSySMI5h_wWIzuHSaVKswYCjBDfAeyvHrGVOPUmS2Uc,100079 +transformers/tf_utils.py,sha256=v4iybFTb3eRDgGzhAUTVYim-qNZvYF2k6rlHk7wTii4,11386 +transformers/time_series_utils.py,sha256=MT780YtbZhdZcz7I9WJ9XVpmZgCVUT2eJ4-g8snaYvQ,7521 +transformers/tokenization_utils.py,sha256=-poyf1T1zu46IYClocA-w3wxZYf2hRcxdfEb2tEMrlQ,47793 +transformers/tokenization_utils_base.py,sha256=J5gKmMytFCZNzYtkT1pJKHwpdmIJj0Tt2KvlBswC0k4,207082 +transformers/tokenization_utils_fast.py,sha256=WdT86ZfZ6asp_sa15JpiPjogmyM-VgIDUjgchEMs9E0,40724 +transformers/trainer.py,sha256=muPa90eWSENaG9cNzIXerZp3XZ4BPRYC4aJuAG5bTvE,252742 +transformers/trainer_callback.py,sha256=Xo9Rvwlvy1_GuapxIttRGV_XKZTarCvjA4vFxQ8Wo14,31867 +transformers/trainer_pt_utils.py,sha256=5Zo4ZExIJwnzS7d1Mft-tWJfREhSmNx_wrsnhlCEzOE,60567 +transformers/trainer_seq2seq.py,sha256=MIcblxlkEhfVMwxCIWNE9es3pDPwiY5E2_BqZHpC_bQ,18372 +transformers/trainer_utils.py,sha256=m-Vj5VKKEVfSttQ8Jd6oItiModIHtnWhZWXBQE-_S10,32881 +transformers/training_args.py,sha256=kya6inNPtmEHIhsMQfpnKFV1VlUwpSQE9p5sqLC6Css,158449 +transformers/training_args_seq2seq.py,sha256=J9_vJQR4VxWAHWVbRmxjXHSRLd6KSe8inisIVezlbXI,3896 +transformers/training_args_tf.py,sha256=dfMUa9jVO-rVxvImlQZr_NVqpVgtgBafBUbR_EcbkPY,14572 +transformers/utils/__init__.py,sha256=jmq1sh3gzRM1Pz-POIqdhJMxUN7NJEwpsu9RwRSh1IE,9397 +transformers/utils/__pycache__/__init__.cpython-310.pyc,, +transformers/utils/__pycache__/backbone_utils.cpython-310.pyc,, +transformers/utils/__pycache__/bitsandbytes.cpython-310.pyc,, +transformers/utils/__pycache__/chat_template_utils.cpython-310.pyc,, +transformers/utils/__pycache__/constants.cpython-310.pyc,, +transformers/utils/__pycache__/deprecation.cpython-310.pyc,, +transformers/utils/__pycache__/doc.cpython-310.pyc,, +transformers/utils/__pycache__/dummy_detectron2_objects.cpython-310.pyc,, +transformers/utils/__pycache__/dummy_essentia_and_librosa_and_pretty_midi_and_scipy_and_torch_objects.cpython-310.pyc,, +transformers/utils/__pycache__/dummy_flax_objects.cpython-310.pyc,, +transformers/utils/__pycache__/dummy_keras_nlp_objects.cpython-310.pyc,, +transformers/utils/__pycache__/dummy_music_objects.cpython-310.pyc,, +transformers/utils/__pycache__/dummy_pt_objects.cpython-310.pyc,, +transformers/utils/__pycache__/dummy_sentencepiece_and_tokenizers_objects.cpython-310.pyc,, +transformers/utils/__pycache__/dummy_sentencepiece_objects.cpython-310.pyc,, +transformers/utils/__pycache__/dummy_speech_objects.cpython-310.pyc,, +transformers/utils/__pycache__/dummy_tensorflow_text_objects.cpython-310.pyc,, +transformers/utils/__pycache__/dummy_tf_objects.cpython-310.pyc,, +transformers/utils/__pycache__/dummy_timm_and_torchvision_objects.cpython-310.pyc,, +transformers/utils/__pycache__/dummy_tokenizers_objects.cpython-310.pyc,, +transformers/utils/__pycache__/dummy_torchaudio_objects.cpython-310.pyc,, +transformers/utils/__pycache__/dummy_torchvision_objects.cpython-310.pyc,, +transformers/utils/__pycache__/dummy_vision_objects.cpython-310.pyc,, +transformers/utils/__pycache__/fx.cpython-310.pyc,, +transformers/utils/__pycache__/generic.cpython-310.pyc,, +transformers/utils/__pycache__/hp_naming.cpython-310.pyc,, +transformers/utils/__pycache__/hub.cpython-310.pyc,, +transformers/utils/__pycache__/import_utils.cpython-310.pyc,, +transformers/utils/__pycache__/logging.cpython-310.pyc,, +transformers/utils/__pycache__/model_parallel_utils.cpython-310.pyc,, +transformers/utils/__pycache__/notebook.cpython-310.pyc,, +transformers/utils/__pycache__/peft_utils.cpython-310.pyc,, +transformers/utils/__pycache__/quantization_config.cpython-310.pyc,, +transformers/utils/__pycache__/sentencepiece_model_pb2.cpython-310.pyc,, +transformers/utils/__pycache__/sentencepiece_model_pb2_new.cpython-310.pyc,, +transformers/utils/__pycache__/versions.cpython-310.pyc,, +transformers/utils/backbone_utils.py,sha256=BZJOavniwDKDkz_f7yD-m8ZGDUx-li5FwqVZtJjm3rM,17431 +transformers/utils/bitsandbytes.py,sha256=LzOKwcHWAxxZZv-7Ts9Q0vlEYvHd18affVgVbiR3Tzs,1040 +transformers/utils/chat_template_utils.py,sha256=GiTmUnYMmnCU7bh6P7j-tNJDISZZD0BGzNfAORCFW90,17232 +transformers/utils/constants.py,sha256=sZsUwOnA3CbtN1svs9YoaNLTTsAc9RVaITsgpf8K4iI,282 +transformers/utils/deprecation.py,sha256=1XHELEE0sqQkrB_FhtBcwAjHk5ETcLVCsEeakZa8Jdw,7614 +transformers/utils/doc.py,sha256=1oAZT_lRqLlvI-tL9BCd_whVOvGCN1dub8b5Z85n3GA,41001 +transformers/utils/dummy_detectron2_objects.py,sha256=n7Pt_7sbVBNfohKGcOARB-ZcPcJRbjEAcoLd2vTXndU,340 +transformers/utils/dummy_essentia_and_librosa_and_pretty_midi_and_scipy_and_torch_objects.py,sha256=n6pY4s7zCII3dzo7Ejd0RviHa_pMateuDEwbbHgsTUY,902 +transformers/utils/dummy_flax_objects.py,sha256=-iOHQAjQUx2S0_lpUA2REHQUULUmox2iEINyBjM3xn0,34003 +transformers/utils/dummy_keras_nlp_objects.py,sha256=AVWt2orICCUXi754bkavvqPzYO91PjER-FlUZAw2jZc,294 +transformers/utils/dummy_music_objects.py,sha256=1lxIebYUOdHJWMQ_T5IQgPgcO_wp_8YM_HGc3skuGVg,458 +transformers/utils/dummy_pt_objects.py,sha256=e4RZvfFwTbNy_u9MOXdFfWx9t49ou8u9Ky0_ptKZ_Ok,252498 +transformers/utils/dummy_sentencepiece_and_tokenizers_objects.py,sha256=BgPLr8Wz8A-17K86x04N21CKXtWNQLJEWx2c4aZRqaA,286 +transformers/utils/dummy_sentencepiece_objects.py,sha256=pBykNNg9IPDeshVOeaw4sxHvgmt3by9X4rIQtz0ONYg,6455 +transformers/utils/dummy_speech_objects.py,sha256=9eFm1cjdsYOPBoAz9JTgP35Bg8WF2C9AZ_y1hFpKZdQ,465 +transformers/utils/dummy_tensorflow_text_objects.py,sha256=43V0IA2kb9gtuL0S1OL1eRFFxzQwKg4pPjMVuXUB5qg,306 +transformers/utils/dummy_tf_objects.py,sha256=q_GvL4tCtlJjSpsuLbBt3uIw_4xdE9eU0V0x7rslRq4,66262 +transformers/utils/dummy_timm_and_torchvision_objects.py,sha256=EFuC5z6IsKOqqowoUGviJ3KgTjzvdTTN7gGQ3it-4t0,324 +transformers/utils/dummy_tokenizers_objects.py,sha256=HW_eUXlwV3VPXxfSHSX3l4taOLbrajkziGUKTx8PCtE,11456 +transformers/utils/dummy_torchaudio_objects.py,sha256=9A7Y4643_hTaqqZKlL-O524wRnrmNtODxisuDdO_7kU,488 +transformers/utils/dummy_torchvision_objects.py,sha256=_mol1E5LfKc4h-pMW5BmCRbx0Om_zcAwtyTbqWhIbYA,1209 +transformers/utils/dummy_vision_objects.py,sha256=J5rebtuBjd9ZM-OjcCNHV7PYMv9ie6N9rYViwANdRs0,17759 +transformers/utils/fx.py,sha256=_8340YkdY7oPlhCEFeo8TQTCjMfNDSzSNtZX5j3-aOg,57316 +transformers/utils/generic.py,sha256=KgDoJBemQ_WSHrvhpzolJrhia6beKllCLOXwodz6IvQ,29585 +transformers/utils/hp_naming.py,sha256=vqcOXcDOyqbISWo8-ClUJUOBVbZM1h08EcymTwcRthc,4979 +transformers/utils/hub.py,sha256=GUfEXtdFG6FVAIswTgqBMNiUVaTyCFYJUBc9PPG2un8,58153 +transformers/utils/import_utils.py,sha256=SNJAot-C_t5TmzC-xTLnK974R5lScLvSRuTcQ8Hh-l4,81440 +transformers/utils/logging.py,sha256=jhMbECpCDaMMjd6LEZC7Qys3wpFtsmZ0K3dCVB5ZLCo,12197 +transformers/utils/model_parallel_utils.py,sha256=XbGU9IlFF59K_aplRxUGVnTfIZ9mpbLomKqQ08ooTew,2272 +transformers/utils/notebook.py,sha256=u-0gxSgdQ6Ow1jdVljOcWMFJ4roll-6nEsS9lsYPkKc,15828 +transformers/utils/peft_utils.py,sha256=Jw6MjvVLtQ7booot0zK6X_xqRl_PAOh3lFZj1A2Guc8,5207 +transformers/utils/quantization_config.py,sha256=4KOEXYkM2YUFBldY0Fo7lM9o5JBCR4LuSD7C8Rxbif0,69717 +transformers/utils/sentencepiece_model_pb2.py,sha256=XiQs9uMEusfAZP6t6IBuTTX9yl7LiOyJEi7Ib-Wzmq0,50677 +transformers/utils/sentencepiece_model_pb2_new.py,sha256=Is_lMJU8MlmXGTkRL-Ut9hDWJwEmYeXedPCHPFaqlwM,6622 +transformers/utils/versions.py,sha256=C-Tqr4qGSHH64ygIBCSo8gA6azz7Dbzh8zdc_yjMkX8,4337 diff --git a/vlmpy310/lib/python3.10/site-packages/transformers-4.49.0.dev0.dist-info/REQUESTED b/vlmpy310/lib/python3.10/site-packages/transformers-4.49.0.dev0.dist-info/REQUESTED new file mode 100644 index 0000000000000000000000000000000000000000..e69de29bb2d1d6434b8b29ae775ad8c2e48c5391 diff --git a/vlmpy310/lib/python3.10/site-packages/transformers-4.49.0.dev0.dist-info/WHEEL b/vlmpy310/lib/python3.10/site-packages/transformers-4.49.0.dev0.dist-info/WHEEL new file mode 100644 index 0000000000000000000000000000000000000000..1eb3c49d99559863120cfb8433fc8738fba43ba9 --- /dev/null +++ b/vlmpy310/lib/python3.10/site-packages/transformers-4.49.0.dev0.dist-info/WHEEL @@ -0,0 +1,5 @@ +Wheel-Version: 1.0 +Generator: setuptools (78.1.0) +Root-Is-Purelib: true +Tag: py3-none-any + diff --git a/vlmpy310/lib/python3.10/site-packages/transformers-4.49.0.dev0.dist-info/direct_url.json b/vlmpy310/lib/python3.10/site-packages/transformers-4.49.0.dev0.dist-info/direct_url.json new file mode 100644 index 0000000000000000000000000000000000000000..7aeeee178aa351c066f6b4d321e56bc2c730bc89 --- /dev/null +++ b/vlmpy310/lib/python3.10/site-packages/transformers-4.49.0.dev0.dist-info/direct_url.json @@ -0,0 +1 @@ +{"url": "https://github.com/huggingface/transformers.git", "vcs_info": {"commit_id": "02a492a8384abc2fa22516f859e17e8528dd1aa6", "requested_revision": "02a492a8384abc2fa22516f859e17e8528dd1aa6", "vcs": "git"}} \ No newline at end of file diff --git a/vlmpy310/lib/python3.10/site-packages/transformers-4.49.0.dev0.dist-info/entry_points.txt b/vlmpy310/lib/python3.10/site-packages/transformers-4.49.0.dev0.dist-info/entry_points.txt new file mode 100644 index 0000000000000000000000000000000000000000..8a7995ed6f21261a78509c57d57daba51ecf1a7d --- /dev/null +++ b/vlmpy310/lib/python3.10/site-packages/transformers-4.49.0.dev0.dist-info/entry_points.txt @@ -0,0 +1,2 @@ +[console_scripts] +transformers-cli = transformers.commands.transformers_cli:main diff --git a/vlmpy310/lib/python3.10/site-packages/transformers-4.49.0.dev0.dist-info/licenses/LICENSE b/vlmpy310/lib/python3.10/site-packages/transformers-4.49.0.dev0.dist-info/licenses/LICENSE new file mode 100644 index 0000000000000000000000000000000000000000..68b7d66c97d66c58de883ed0c451af2b3183e6f3 --- /dev/null +++ b/vlmpy310/lib/python3.10/site-packages/transformers-4.49.0.dev0.dist-info/licenses/LICENSE @@ -0,0 +1,203 @@ +Copyright 2018- The Hugging Face team. All rights reserved. + + Apache License + Version 2.0, January 2004 + http://www.apache.org/licenses/ + + TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION + + 1. Definitions. + + "License" shall mean the terms and conditions for use, reproduction, + and distribution as defined by Sections 1 through 9 of this document. + + "Licensor" shall mean the copyright owner or entity authorized by + the copyright owner that is granting the License. + + "Legal Entity" shall mean the union of the acting entity and all + other entities that control, are controlled by, or are under common + control with that entity. For the purposes of this definition, + "control" means (i) the power, direct or indirect, to cause the + direction or management of such entity, whether by contract or + otherwise, or (ii) ownership of fifty percent (50%) or more of the + outstanding shares, or (iii) beneficial ownership of such entity. + + "You" (or "Your") shall mean an individual or Legal Entity + exercising permissions granted by this License. + + "Source" form shall mean the preferred form for making modifications, + including but not limited to software source code, documentation + source, and configuration files. + + "Object" form shall mean any form resulting from mechanical + transformation or translation of a Source form, including but + not limited to compiled object code, generated documentation, + and conversions to other media types. + + "Work" shall mean the work of authorship, whether in Source or + Object form, made available under the License, as indicated by a + copyright notice that is included in or attached to the work + (an example is provided in the Appendix below). + + "Derivative Works" shall mean any work, whether in Source or Object + form, that is based on (or derived from) the Work and for which the + editorial revisions, annotations, elaborations, or other modifications + represent, as a whole, an original work of authorship. For the purposes + of this License, Derivative Works shall not include works that remain + separable from, or merely link (or bind by name) to the interfaces of, + the Work and Derivative Works thereof. + + "Contribution" shall mean any work of authorship, including + the original version of the Work and any modifications or additions + to that Work or Derivative Works thereof, that is intentionally + submitted to Licensor for inclusion in the Work by the copyright owner + or by an individual or Legal Entity authorized to submit on behalf of + the copyright owner. For the purposes of this definition, "submitted" + means any form of electronic, verbal, or written communication sent + to the Licensor or its representatives, including but not limited to + communication on electronic mailing lists, source code control systems, + and issue tracking systems that are managed by, or on behalf of, the + Licensor for the purpose of discussing and improving the Work, but + excluding communication that is conspicuously marked or otherwise + designated in writing by the copyright owner as "Not a Contribution." + + "Contributor" shall mean Licensor and any individual or Legal Entity + on behalf of whom a Contribution has been received by Licensor and + subsequently incorporated within the Work. + + 2. Grant of Copyright License. Subject to the terms and conditions of + this License, each Contributor hereby grants to You a perpetual, + worldwide, non-exclusive, no-charge, royalty-free, irrevocable + copyright license to reproduce, prepare Derivative Works of, + publicly display, publicly perform, sublicense, and distribute the + Work and such Derivative Works in Source or Object form. + + 3. Grant of Patent License. Subject to the terms and conditions of + this License, each Contributor hereby grants to You a perpetual, + worldwide, non-exclusive, no-charge, royalty-free, irrevocable + (except as stated in this section) patent license to make, have made, + use, offer to sell, sell, import, and otherwise transfer the Work, + where such license applies only to those patent claims licensable + by such Contributor that are necessarily infringed by their + Contribution(s) alone or by combination of their Contribution(s) + with the Work to which such Contribution(s) was submitted. If You + institute patent litigation against any entity (including a + cross-claim or counterclaim in a lawsuit) alleging that the Work + or a Contribution incorporated within the Work constitutes direct + or contributory patent infringement, then any patent licenses + granted to You under this License for that Work shall terminate + as of the date such litigation is filed. + + 4. Redistribution. You may reproduce and distribute copies of the + Work or Derivative Works thereof in any medium, with or without + modifications, and in Source or Object form, provided that You + meet the following conditions: + + (a) You must give any other recipients of the Work or + Derivative Works a copy of this License; and + + (b) You must cause any modified files to carry prominent notices + stating that You changed the files; and + + (c) You must retain, in the Source form of any Derivative Works + that You distribute, all copyright, patent, trademark, and + attribution notices from the Source form of the Work, + excluding those notices that do not pertain to any part of + the Derivative Works; and + + (d) If the Work includes a "NOTICE" text file as part of its + distribution, then any Derivative Works that You distribute must + include a readable copy of the attribution notices contained + within such NOTICE file, excluding those notices that do not + pertain to any part of the Derivative Works, in at least one + of the following places: within a NOTICE text file distributed + as part of the Derivative Works; within the Source form or + documentation, if provided along with the Derivative Works; or, + within a display generated by the Derivative Works, if and + wherever such third-party notices normally appear. The contents + of the NOTICE file are for informational purposes only and + do not modify the License. You may add Your own attribution + notices within Derivative Works that You distribute, alongside + or as an addendum to the NOTICE text from the Work, provided + that such additional attribution notices cannot be construed + as modifying the License. + + You may add Your own copyright statement to Your modifications and + may provide additional or different license terms and conditions + for use, reproduction, or distribution of Your modifications, or + for any such Derivative Works as a whole, provided Your use, + reproduction, and distribution of the Work otherwise complies with + the conditions stated in this License. + + 5. Submission of Contributions. Unless You explicitly state otherwise, + any Contribution intentionally submitted for inclusion in the Work + by You to the Licensor shall be under the terms and conditions of + this License, without any additional terms or conditions. + Notwithstanding the above, nothing herein shall supersede or modify + the terms of any separate license agreement you may have executed + with Licensor regarding such Contributions. + + 6. Trademarks. This License does not grant permission to use the trade + names, trademarks, service marks, or product names of the Licensor, + except as required for reasonable and customary use in describing the + origin of the Work and reproducing the content of the NOTICE file. + + 7. Disclaimer of Warranty. Unless required by applicable law or + agreed to in writing, Licensor provides the Work (and each + Contributor provides its Contributions) on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or + implied, including, without limitation, any warranties or conditions + of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A + PARTICULAR PURPOSE. You are solely responsible for determining the + appropriateness of using or redistributing the Work and assume any + risks associated with Your exercise of permissions under this License. + + 8. Limitation of Liability. In no event and under no legal theory, + whether in tort (including negligence), contract, or otherwise, + unless required by applicable law (such as deliberate and grossly + negligent acts) or agreed to in writing, shall any Contributor be + liable to You for damages, including any direct, indirect, special, + incidental, or consequential damages of any character arising as a + result of this License or out of the use or inability to use the + Work (including but not limited to damages for loss of goodwill, + work stoppage, computer failure or malfunction, or any and all + other commercial damages or losses), even if such Contributor + has been advised of the possibility of such damages. + + 9. Accepting Warranty or Additional Liability. While redistributing + the Work or Derivative Works thereof, You may choose to offer, + and charge a fee for, acceptance of support, warranty, indemnity, + or other liability obligations and/or rights consistent with this + License. However, in accepting such obligations, You may act only + on Your own behalf and on Your sole responsibility, not on behalf + of any other Contributor, and only if You agree to indemnify, + defend, and hold each Contributor harmless for any liability + incurred by, or claims asserted against, such Contributor by reason + of your accepting any such warranty or additional liability. + + END OF TERMS AND CONDITIONS + + APPENDIX: How to apply the Apache License to your work. + + To apply the Apache License to your work, attach the following + boilerplate notice, with the fields enclosed by brackets "[]" + replaced with your own identifying information. (Don't include + the brackets!) The text should be enclosed in the appropriate + comment syntax for the file format. We also recommend that a + file or class name and description of purpose be included on the + same "printed page" as the copyright notice for easier + identification within third-party archives. + + Copyright [yyyy] [name of copyright owner] + + Licensed under the Apache License, Version 2.0 (the "License"); + you may not use this file except in compliance with the License. + You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + + Unless required by applicable law or agreed to in writing, software + distributed under the License is distributed on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + See the License for the specific language governing permissions and + limitations under the License. diff --git a/vlmpy310/lib/python3.10/site-packages/transformers-4.49.0.dev0.dist-info/top_level.txt b/vlmpy310/lib/python3.10/site-packages/transformers-4.49.0.dev0.dist-info/top_level.txt new file mode 100644 index 0000000000000000000000000000000000000000..976a2b1f3998279c10c413279a095be86bf69167 --- /dev/null +++ b/vlmpy310/lib/python3.10/site-packages/transformers-4.49.0.dev0.dist-info/top_level.txt @@ -0,0 +1 @@ +transformers diff --git a/vlmpy310/lib/python3.10/site-packages/websockets-11.0.3.dist-info/INSTALLER b/vlmpy310/lib/python3.10/site-packages/websockets-11.0.3.dist-info/INSTALLER new file mode 100644 index 0000000000000000000000000000000000000000..a1b589e38a32041e49332e5e81c2d363dc418d68 --- /dev/null +++ b/vlmpy310/lib/python3.10/site-packages/websockets-11.0.3.dist-info/INSTALLER @@ -0,0 +1 @@ +pip diff --git a/vlmpy310/lib/python3.10/site-packages/websockets-11.0.3.dist-info/LICENSE b/vlmpy310/lib/python3.10/site-packages/websockets-11.0.3.dist-info/LICENSE new file mode 100644 index 0000000000000000000000000000000000000000..5d61ece22a75a759aed8e52af280eca28d35d6bf --- /dev/null +++ b/vlmpy310/lib/python3.10/site-packages/websockets-11.0.3.dist-info/LICENSE @@ -0,0 +1,24 @@ +Copyright (c) Aymeric Augustin and contributors + +Redistribution and use in source and binary forms, with or without +modification, are permitted provided that the following conditions are met: + + * Redistributions of source code must retain the above copyright notice, + this list of conditions and the following disclaimer. + * Redistributions in binary form must reproduce the above copyright notice, + this list of conditions and the following disclaimer in the documentation + and/or other materials provided with the distribution. + * Neither the name of the copyright holder nor the names of its contributors + may be used to endorse or promote products derived from this software + without specific prior written permission. + +THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND +ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED +WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE +DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE +FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL +DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR +SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER +CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, +OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE +OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. diff --git a/vlmpy310/lib/python3.10/site-packages/websockets-11.0.3.dist-info/METADATA b/vlmpy310/lib/python3.10/site-packages/websockets-11.0.3.dist-info/METADATA new file mode 100644 index 0000000000000000000000000000000000000000..33c816ade9c73d1837333d82ebbf7e52e044538a --- /dev/null +++ b/vlmpy310/lib/python3.10/site-packages/websockets-11.0.3.dist-info/METADATA @@ -0,0 +1,176 @@ +Metadata-Version: 2.1 +Name: websockets +Version: 11.0.3 +Summary: An implementation of the WebSocket Protocol (RFC 6455 & 7692) +Author-email: Aymeric Augustin +License: BSD-3-Clause +Project-URL: homepage, https://github.com/aaugustin/websockets +Project-URL: changelog, https://websockets.readthedocs.io/en/stable/project/changelog.html +Project-URL: documentation, https://websockets.readthedocs.io/ +Project-URL: funding, https://tidelift.com/subscription/pkg/pypi-websockets?utm_source=pypi-websockets&utm_medium=referral&utm_campaign=readme +Project-URL: tracker, https://github.com/aaugustin/websockets/issues +Keywords: WebSocket +Classifier: Development Status :: 5 - Production/Stable +Classifier: Environment :: Web Environment +Classifier: Intended Audience :: Developers +Classifier: License :: OSI Approved :: BSD License +Classifier: Operating System :: OS Independent +Classifier: Programming Language :: Python +Classifier: Programming Language :: Python :: 3 +Classifier: Programming Language :: Python :: 3.7 +Classifier: Programming Language :: Python :: 3.8 +Classifier: Programming Language :: Python :: 3.9 +Classifier: Programming Language :: Python :: 3.10 +Classifier: Programming Language :: Python :: 3.11 +Requires-Python: >=3.7 +License-File: LICENSE + +.. image:: logo/horizontal.svg + :width: 480px + :alt: websockets + +|licence| |version| |pyversions| |tests| |docs| |openssf| + +.. |licence| image:: https://img.shields.io/pypi/l/websockets.svg + :target: https://pypi.python.org/pypi/websockets + +.. |version| image:: https://img.shields.io/pypi/v/websockets.svg + :target: https://pypi.python.org/pypi/websockets + +.. |pyversions| image:: https://img.shields.io/pypi/pyversions/websockets.svg + :target: https://pypi.python.org/pypi/websockets + +.. |tests| image:: https://img.shields.io/github/checks-status/aaugustin/websockets/main?label=tests + :target: https://github.com/aaugustin/websockets/actions/workflows/tests.yml + +.. |docs| image:: https://img.shields.io/readthedocs/websockets.svg + :target: https://websockets.readthedocs.io/ + +.. |openssf| image:: https://bestpractices.coreinfrastructure.org/projects/6475/badge + :target: https://bestpractices.coreinfrastructure.org/projects/6475 + +What is ``websockets``? +----------------------- + +websockets is a library for building WebSocket_ servers and clients in Python +with a focus on correctness, simplicity, robustness, and performance. + +.. _WebSocket: https://developer.mozilla.org/en-US/docs/Web/API/WebSockets_API + +Built on top of ``asyncio``, Python's standard asynchronous I/O framework, the +default implementation provides an elegant coroutine-based API. + +An implementation on top of ``threading`` and a Sans-I/O implementation are also +available. + +`Documentation is available on Read the Docs. `_ + +.. copy-pasted because GitHub doesn't support the include directive + +Here's an echo server with the ``asyncio`` API: + +.. code:: python + + #!/usr/bin/env python + + import asyncio + from websockets.server import serve + + async def echo(websocket): + async for message in websocket: + await websocket.send(message) + + async def main(): + async with serve(echo, "localhost", 8765): + await asyncio.Future() # run forever + + asyncio.run(main()) + +Here's how a client sends and receives messages with the ``threading`` API: + +.. code:: python + + #!/usr/bin/env python + + import asyncio + from websockets.sync.client import connect + + def hello(): + with connect("ws://localhost:8765") as websocket: + websocket.send("Hello world!") + message = websocket.recv() + print(f"Received: {message}") + + hello() + + +Does that look good? + +`Get started with the tutorial! `_ + +Why should I use ``websockets``? +-------------------------------- + +The development of ``websockets`` is shaped by four principles: + +1. **Correctness**: ``websockets`` is heavily tested for compliance with + :rfc:`6455`. Continuous integration fails under 100% branch coverage. + +2. **Simplicity**: all you need to understand is ``msg = await ws.recv()`` and + ``await ws.send(msg)``. ``websockets`` takes care of managing connections + so you can focus on your application. + +3. **Robustness**: ``websockets`` is built for production. For example, it was + the only library to `handle backpressure correctly`_ before the issue + became widely known in the Python community. + +4. **Performance**: memory usage is optimized and configurable. A C extension + accelerates expensive operations. It's pre-compiled for Linux, macOS and + Windows and packaged in the wheel format for each system and Python version. + +Documentation is a first class concern in the project. Head over to `Read the +Docs`_ and see for yourself. + +.. _Read the Docs: https://websockets.readthedocs.io/ +.. _handle backpressure correctly: https://vorpus.org/blog/some-thoughts-on-asynchronous-api-design-in-a-post-asyncawait-world/#websocket-servers + +Why shouldn't I use ``websockets``? +----------------------------------- + +* If you prefer callbacks over coroutines: ``websockets`` was created to + provide the best coroutine-based API to manage WebSocket connections in + Python. Pick another library for a callback-based API. + +* If you're looking for a mixed HTTP / WebSocket library: ``websockets`` aims + at being an excellent implementation of :rfc:`6455`: The WebSocket Protocol + and :rfc:`7692`: Compression Extensions for WebSocket. Its support for HTTP + is minimal — just enough for an HTTP health check. + + If you want to do both in the same server, look at HTTP frameworks that + build on top of ``websockets`` to support WebSocket connections, like + Sanic_. + +.. _Sanic: https://sanicframework.org/en/ + +What else? +---------- + +Bug reports, patches and suggestions are welcome! + +To report a security vulnerability, please use the `Tidelift security +contact`_. Tidelift will coordinate the fix and disclosure. + +.. _Tidelift security contact: https://tidelift.com/security + +For anything else, please open an issue_ or send a `pull request`_. + +.. _issue: https://github.com/aaugustin/websockets/issues/new +.. _pull request: https://github.com/aaugustin/websockets/compare/ + +Participants must uphold the `Contributor Covenant code of conduct`_. + +.. _Contributor Covenant code of conduct: https://github.com/aaugustin/websockets/blob/main/CODE_OF_CONDUCT.md + +``websockets`` is released under the `BSD license`_. + +.. _BSD license: https://github.com/aaugustin/websockets/blob/main/LICENSE diff --git a/vlmpy310/lib/python3.10/site-packages/websockets-11.0.3.dist-info/RECORD b/vlmpy310/lib/python3.10/site-packages/websockets-11.0.3.dist-info/RECORD new file mode 100644 index 0000000000000000000000000000000000000000..0efc5fcc0fc4444445ad2a13b2137d7db36ddbfe --- /dev/null +++ b/vlmpy310/lib/python3.10/site-packages/websockets-11.0.3.dist-info/RECORD @@ -0,0 +1,88 @@ +websockets-11.0.3.dist-info/INSTALLER,sha256=zuuue4knoyJ-UwPPXg8fezS7VCrXJQrAP7zeNuwvFQg,4 +websockets-11.0.3.dist-info/LICENSE,sha256=PWoMBQ2L7FL6utUC5F-yW9ArytvXDeo01Ee2oP9Obag,1514 +websockets-11.0.3.dist-info/METADATA,sha256=p679f2CwWdb-87gsrSEpFWqlue5gFCyNJbYqCucVpmg,6619 +websockets-11.0.3.dist-info/RECORD,, +websockets-11.0.3.dist-info/REQUESTED,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0 +websockets-11.0.3.dist-info/WHEEL,sha256=24QVFBJBvF4a1ZC352AVuqHCJgY3xUfAV-_i6gkjtuc,225 +websockets-11.0.3.dist-info/top_level.txt,sha256=CMpdKklxKsvZgCgyltxUWOHibZXZ1uYIVpca9xsQ8Hk,11 +websockets/__init__.py,sha256=kezN0NeaLAYBNP8eNgIadwCf-X4qAuQjdqXVrdLm5AM,3426 +websockets/__main__.py,sha256=8Dtga-XePHQ4jqgMMuXHT8XRH_hSvs8bEZ7-v49vTKg,4744 +websockets/__pycache__/__init__.cpython-310.pyc,, +websockets/__pycache__/__main__.cpython-310.pyc,, +websockets/__pycache__/auth.cpython-310.pyc,, +websockets/__pycache__/client.cpython-310.pyc,, +websockets/__pycache__/connection.cpython-310.pyc,, +websockets/__pycache__/datastructures.cpython-310.pyc,, +websockets/__pycache__/exceptions.cpython-310.pyc,, +websockets/__pycache__/frames.cpython-310.pyc,, +websockets/__pycache__/headers.cpython-310.pyc,, +websockets/__pycache__/http.cpython-310.pyc,, +websockets/__pycache__/http11.cpython-310.pyc,, +websockets/__pycache__/imports.cpython-310.pyc,, +websockets/__pycache__/protocol.cpython-310.pyc,, +websockets/__pycache__/server.cpython-310.pyc,, +websockets/__pycache__/streams.cpython-310.pyc,, +websockets/__pycache__/typing.cpython-310.pyc,, +websockets/__pycache__/uri.cpython-310.pyc,, +websockets/__pycache__/utils.cpython-310.pyc,, +websockets/__pycache__/version.cpython-310.pyc,, +websockets/auth.py,sha256=VObSo1Q61jh0XFXpeL6-1ir0OXlqA8OJjoChsB_01k8,139 +websockets/client.py,sha256=dfEJWRlGLaSfssMr3Lss-02n-IVosorFMD6Ub9JAI3E,12418 +websockets/connection.py,sha256=UivBmLaKmEOGpL1bU8uwh1PXIqMFiOUTVRi_gM7w5Rg,333 +websockets/datastructures.py,sha256=pcT7RdCI6ZfYddHWMcwPR-1A89GRpj26xgdtmZsRgiA,5738 +websockets/exceptions.py,sha256=DUzr1GdPO6FDAl9C5Wb3IhjSYSomVbvWo_NTM46zWm4,10143 +websockets/extensions/__init__.py,sha256=QkZsxaJVllVSp1uhdD5uPGibdbx_091GrVVfS5LXcpw,98 +websockets/extensions/__pycache__/__init__.cpython-310.pyc,, +websockets/extensions/__pycache__/base.cpython-310.pyc,, +websockets/extensions/__pycache__/permessage_deflate.cpython-310.pyc,, +websockets/extensions/base.py,sha256=5shEU7lqmsLC7-y3OCWih1VdS_wOImmZwuAaEKl9kDU,3271 +websockets/extensions/permessage_deflate.py,sha256=bPFOAyTUDU7IIJyCGnWfr5yZF_J8dhCwJWt7jWuYM6c,24782 +websockets/frames.py,sha256=jSHawlqpEDrVcnGrKDawlINoelU9Hg5Wb0p4B_3SEl0,12537 +websockets/headers.py,sha256=RYryH2zqB_2Y02BTF2KsQFfYxAM6-Kh-A3Dv_32opAA,16120 +websockets/http.py,sha256=HR_IIij3xpoKkiLzGp4h5_NkVr2a8ZeCqGUopo6U4Rs,644 +websockets/http11.py,sha256=QcZ7u-UYbO98xQXrUz43qgaBXk-AyoQBHJBR0J9qYRE,12565 +websockets/imports.py,sha256=SXXs0glid-UHcwla5yjR72DIbGeUTrS9VFagPvPvRNY,2790 +websockets/legacy/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0 +websockets/legacy/__pycache__/__init__.cpython-310.pyc,, +websockets/legacy/__pycache__/async_timeout.cpython-310.pyc,, +websockets/legacy/__pycache__/auth.cpython-310.pyc,, +websockets/legacy/__pycache__/client.cpython-310.pyc,, +websockets/legacy/__pycache__/compatibility.cpython-310.pyc,, +websockets/legacy/__pycache__/framing.cpython-310.pyc,, +websockets/legacy/__pycache__/handshake.cpython-310.pyc,, +websockets/legacy/__pycache__/http.cpython-310.pyc,, +websockets/legacy/__pycache__/protocol.cpython-310.pyc,, +websockets/legacy/__pycache__/server.cpython-310.pyc,, +websockets/legacy/async_timeout.py,sha256=nHW_nJYnxtuprwPduZMTl789KAymwmv0ukLbzm2Z8yU,8540 +websockets/legacy/auth.py,sha256=WP68nZ1KAS0YCfNRyYG2M6LrNmT6xa430YnAjoeAP3g,6287 +websockets/legacy/client.py,sha256=cEa1xlsuUhJk9T0RKdRSWeYm8WGV2M7M8JZeqwp2tpE,26555 +websockets/legacy/compatibility.py,sha256=HRmodUeop_0hT7TG8_qIZrXmfGYDFioSmg3jCoPs0Ow,758 +websockets/legacy/framing.py,sha256=M4J6ZPRK-zNqY_UgPQ4Qppc4R64aSMftO7FR_0VpG-Q,4998 +websockets/legacy/handshake.py,sha256=RggPKl-w8oFJZQYZR0IdIOTrsz040pYp0Gu4L_D7_4U,5479 +websockets/legacy/http.py,sha256=qmrM7pa0kuuJIroMVahBAH8_ZVqkD91YhwVux_xpfeI,6938 +websockets/legacy/protocol.py,sha256=W_et77gFzuJ4IWtoROWt45v_Nx4Auq552HrQPakdGps,63339 +websockets/legacy/server.py,sha256=BjiJELoOfY7KspN9RpqquxIMVDgQZw4IFQscHdH82lM,45232 +websockets/protocol.py,sha256=sKb7pl8k5TFTBwZPB9_kBzvBaxSzR9ExHTPrgUbW6lU,23822 +websockets/py.typed,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0 +websockets/server.py,sha256=uYaKhW6y01WCSegDVAwKhFv__OAhG6_-lwbLIBXq0lw,20857 +websockets/speedups.c,sha256=ghPq-NF35VLVNkMv0uFDIruNpVISyW-qvoZgPpE65qw,5834 +websockets/speedups.cpython-310-x86_64-linux-gnu.so,sha256=tFuYvXgP4zHqpDcekn8lGpbSHaMwApSUvtfoH-TDtAs,33736 +websockets/streams.py,sha256=8nv62HYyS74t_JSWGie4SoYAz8-jMcQacaHnD0RkK90,4038 +websockets/sync/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0 +websockets/sync/__pycache__/__init__.cpython-310.pyc,, +websockets/sync/__pycache__/client.cpython-310.pyc,, +websockets/sync/__pycache__/compatibility.cpython-310.pyc,, +websockets/sync/__pycache__/connection.cpython-310.pyc,, +websockets/sync/__pycache__/messages.cpython-310.pyc,, +websockets/sync/__pycache__/server.cpython-310.pyc,, +websockets/sync/__pycache__/utils.cpython-310.pyc,, +websockets/sync/client.py,sha256=-9we3AHtE25pcT6EHGQ0oIyGzfYs18AzLpyDn4RLi94,11265 +websockets/sync/compatibility.py,sha256=1k-EUGSz-tpDdj4c65uIgbzpET5ZRWdQtRTPbZ8kFvI,555 +websockets/sync/connection.py,sha256=3Fe1BRNr4AdXs5j8UAdrSODomnfNrI460T0nTyn_2N0,29284 +websockets/sync/messages.py,sha256=pTcWhwD-uwA0l4a26_xgPHgP8pjRYk5xrX5Vhq-JuCo,9484 +websockets/sync/server.py,sha256=Wi306IkixafVw0JqeFEXUE7WWgIU_Go_FG3UQ9bz0HA,18661 +websockets/sync/utils.py,sha256=yUDxjeM4yVeXOZ_Go4ajgTUDOy-0rEWkjcR_RZDqcYY,1151 +websockets/typing.py,sha256=yx0SxSmil5JfG4fUtj-dgyR1UcW5wwmvgqtEOmcJxm4,1384 +websockets/uri.py,sha256=oymYUo7bX8LofYzXpT3UqTZfkCt2y4s680Xr-qw88qk,3215 +websockets/utils.py,sha256=QBhgbXn9ZvvLEzj-X8-zSHWVMkUqc6Wm-_HBjga5RNM,1150 +websockets/version.py,sha256=1txnm49P_pXG7EmoPowwuhzYwUJVkR1iaD-ZTyLuvrc,2723 diff --git a/vlmpy310/lib/python3.10/site-packages/websockets-11.0.3.dist-info/REQUESTED b/vlmpy310/lib/python3.10/site-packages/websockets-11.0.3.dist-info/REQUESTED new file mode 100644 index 0000000000000000000000000000000000000000..e69de29bb2d1d6434b8b29ae775ad8c2e48c5391 diff --git a/vlmpy310/lib/python3.10/site-packages/websockets-11.0.3.dist-info/WHEEL b/vlmpy310/lib/python3.10/site-packages/websockets-11.0.3.dist-info/WHEEL new file mode 100644 index 0000000000000000000000000000000000000000..125bf43522bd62b864a314f40419ae3fdf7a2969 --- /dev/null +++ b/vlmpy310/lib/python3.10/site-packages/websockets-11.0.3.dist-info/WHEEL @@ -0,0 +1,8 @@ +Wheel-Version: 1.0 +Generator: bdist_wheel (0.40.0) +Root-Is-Purelib: false +Tag: cp310-cp310-manylinux_2_5_x86_64 +Tag: cp310-cp310-manylinux1_x86_64 +Tag: cp310-cp310-manylinux_2_17_x86_64 +Tag: cp310-cp310-manylinux2014_x86_64 + diff --git a/vlmpy310/lib/python3.10/site-packages/websockets-11.0.3.dist-info/top_level.txt b/vlmpy310/lib/python3.10/site-packages/websockets-11.0.3.dist-info/top_level.txt new file mode 100644 index 0000000000000000000000000000000000000000..14774b465e97f655dbcaa60d97c8a9aa72e7d51b --- /dev/null +++ b/vlmpy310/lib/python3.10/site-packages/websockets-11.0.3.dist-info/top_level.txt @@ -0,0 +1 @@ +websockets diff --git a/vlmpy310/lib/python3.10/site-packages/yapf-0.43.0.dist-info/AUTHORS b/vlmpy310/lib/python3.10/site-packages/yapf-0.43.0.dist-info/AUTHORS new file mode 100644 index 0000000000000000000000000000000000000000..b5e878e26cb65f57fd27827aae3a55638cfee098 --- /dev/null +++ b/vlmpy310/lib/python3.10/site-packages/yapf-0.43.0.dist-info/AUTHORS @@ -0,0 +1,9 @@ +# This is the official list of YAPF authors for copyright purposes. +# This file is distinct from the CONTRIBUTORS files. +# See the latter for an explanation. + +# Names should be added to this file as: +# Name or Organization +# The email address is not required for organizations. + +Google Inc. diff --git a/vlmpy310/lib/python3.10/site-packages/yapf-0.43.0.dist-info/INSTALLER b/vlmpy310/lib/python3.10/site-packages/yapf-0.43.0.dist-info/INSTALLER new file mode 100644 index 0000000000000000000000000000000000000000..a1b589e38a32041e49332e5e81c2d363dc418d68 --- /dev/null +++ b/vlmpy310/lib/python3.10/site-packages/yapf-0.43.0.dist-info/INSTALLER @@ -0,0 +1 @@ +pip diff --git a/vlmpy310/lib/python3.10/site-packages/yapf-0.43.0.dist-info/LICENSE b/vlmpy310/lib/python3.10/site-packages/yapf-0.43.0.dist-info/LICENSE new file mode 100644 index 0000000000000000000000000000000000000000..d645695673349e3947e8e5ae42332d0ac3164cd7 --- /dev/null +++ b/vlmpy310/lib/python3.10/site-packages/yapf-0.43.0.dist-info/LICENSE @@ -0,0 +1,202 @@ + + Apache License + Version 2.0, January 2004 + http://www.apache.org/licenses/ + + TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION + + 1. Definitions. + + "License" shall mean the terms and conditions for use, reproduction, + and distribution as defined by Sections 1 through 9 of this document. + + "Licensor" shall mean the copyright owner or entity authorized by + the copyright owner that is granting the License. + + "Legal Entity" shall mean the union of the acting entity and all + other entities that control, are controlled by, or are under common + control with that entity. For the purposes of this definition, + "control" means (i) the power, direct or indirect, to cause the + direction or management of such entity, whether by contract or + otherwise, or (ii) ownership of fifty percent (50%) or more of the + outstanding shares, or (iii) beneficial ownership of such entity. + + "You" (or "Your") shall mean an individual or Legal Entity + exercising permissions granted by this License. + + "Source" form shall mean the preferred form for making modifications, + including but not limited to software source code, documentation + source, and configuration files. + + "Object" form shall mean any form resulting from mechanical + transformation or translation of a Source form, including but + not limited to compiled object code, generated documentation, + and conversions to other media types. + + "Work" shall mean the work of authorship, whether in Source or + Object form, made available under the License, as indicated by a + copyright notice that is included in or attached to the work + (an example is provided in the Appendix below). + + "Derivative Works" shall mean any work, whether in Source or Object + form, that is based on (or derived from) the Work and for which the + editorial revisions, annotations, elaborations, or other modifications + represent, as a whole, an original work of authorship. For the purposes + of this License, Derivative Works shall not include works that remain + separable from, or merely link (or bind by name) to the interfaces of, + the Work and Derivative Works thereof. + + "Contribution" shall mean any work of authorship, including + the original version of the Work and any modifications or additions + to that Work or Derivative Works thereof, that is intentionally + submitted to Licensor for inclusion in the Work by the copyright owner + or by an individual or Legal Entity authorized to submit on behalf of + the copyright owner. For the purposes of this definition, "submitted" + means any form of electronic, verbal, or written communication sent + to the Licensor or its representatives, including but not limited to + communication on electronic mailing lists, source code control systems, + and issue tracking systems that are managed by, or on behalf of, the + Licensor for the purpose of discussing and improving the Work, but + excluding communication that is conspicuously marked or otherwise + designated in writing by the copyright owner as "Not a Contribution." + + "Contributor" shall mean Licensor and any individual or Legal Entity + on behalf of whom a Contribution has been received by Licensor and + subsequently incorporated within the Work. + + 2. Grant of Copyright License. Subject to the terms and conditions of + this License, each Contributor hereby grants to You a perpetual, + worldwide, non-exclusive, no-charge, royalty-free, irrevocable + copyright license to reproduce, prepare Derivative Works of, + publicly display, publicly perform, sublicense, and distribute the + Work and such Derivative Works in Source or Object form. + + 3. Grant of Patent License. Subject to the terms and conditions of + this License, each Contributor hereby grants to You a perpetual, + worldwide, non-exclusive, no-charge, royalty-free, irrevocable + (except as stated in this section) patent license to make, have made, + use, offer to sell, sell, import, and otherwise transfer the Work, + where such license applies only to those patent claims licensable + by such Contributor that are necessarily infringed by their + Contribution(s) alone or by combination of their Contribution(s) + with the Work to which such Contribution(s) was submitted. If You + institute patent litigation against any entity (including a + cross-claim or counterclaim in a lawsuit) alleging that the Work + or a Contribution incorporated within the Work constitutes direct + or contributory patent infringement, then any patent licenses + granted to You under this License for that Work shall terminate + as of the date such litigation is filed. + + 4. Redistribution. You may reproduce and distribute copies of the + Work or Derivative Works thereof in any medium, with or without + modifications, and in Source or Object form, provided that You + meet the following conditions: + + (a) You must give any other recipients of the Work or + Derivative Works a copy of this License; and + + (b) You must cause any modified files to carry prominent notices + stating that You changed the files; and + + (c) You must retain, in the Source form of any Derivative Works + that You distribute, all copyright, patent, trademark, and + attribution notices from the Source form of the Work, + excluding those notices that do not pertain to any part of + the Derivative Works; and + + (d) If the Work includes a "NOTICE" text file as part of its + distribution, then any Derivative Works that You distribute must + include a readable copy of the attribution notices contained + within such NOTICE file, excluding those notices that do not + pertain to any part of the Derivative Works, in at least one + of the following places: within a NOTICE text file distributed + as part of the Derivative Works; within the Source form or + documentation, if provided along with the Derivative Works; or, + within a display generated by the Derivative Works, if and + wherever such third-party notices normally appear. The contents + of the NOTICE file are for informational purposes only and + do not modify the License. You may add Your own attribution + notices within Derivative Works that You distribute, alongside + or as an addendum to the NOTICE text from the Work, provided + that such additional attribution notices cannot be construed + as modifying the License. + + You may add Your own copyright statement to Your modifications and + may provide additional or different license terms and conditions + for use, reproduction, or distribution of Your modifications, or + for any such Derivative Works as a whole, provided Your use, + reproduction, and distribution of the Work otherwise complies with + the conditions stated in this License. + + 5. Submission of Contributions. Unless You explicitly state otherwise, + any Contribution intentionally submitted for inclusion in the Work + by You to the Licensor shall be under the terms and conditions of + this License, without any additional terms or conditions. + Notwithstanding the above, nothing herein shall supersede or modify + the terms of any separate license agreement you may have executed + with Licensor regarding such Contributions. + + 6. Trademarks. This License does not grant permission to use the trade + names, trademarks, service marks, or product names of the Licensor, + except as required for reasonable and customary use in describing the + origin of the Work and reproducing the content of the NOTICE file. + + 7. Disclaimer of Warranty. Unless required by applicable law or + agreed to in writing, Licensor provides the Work (and each + Contributor provides its Contributions) on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or + implied, including, without limitation, any warranties or conditions + of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A + PARTICULAR PURPOSE. You are solely responsible for determining the + appropriateness of using or redistributing the Work and assume any + risks associated with Your exercise of permissions under this License. + + 8. Limitation of Liability. In no event and under no legal theory, + whether in tort (including negligence), contract, or otherwise, + unless required by applicable law (such as deliberate and grossly + negligent acts) or agreed to in writing, shall any Contributor be + liable to You for damages, including any direct, indirect, special, + incidental, or consequential damages of any character arising as a + result of this License or out of the use or inability to use the + Work (including but not limited to damages for loss of goodwill, + work stoppage, computer failure or malfunction, or any and all + other commercial damages or losses), even if such Contributor + has been advised of the possibility of such damages. + + 9. Accepting Warranty or Additional Liability. While redistributing + the Work or Derivative Works thereof, You may choose to offer, + and charge a fee for, acceptance of support, warranty, indemnity, + or other liability obligations and/or rights consistent with this + License. However, in accepting such obligations, You may act only + on Your own behalf and on Your sole responsibility, not on behalf + of any other Contributor, and only if You agree to indemnify, + defend, and hold each Contributor harmless for any liability + incurred by, or claims asserted against, such Contributor by reason + of your accepting any such warranty or additional liability. + + END OF TERMS AND CONDITIONS + + APPENDIX: How to apply the Apache License to your work. + + To apply the Apache License to your work, attach the following + boilerplate notice, with the fields enclosed by brackets "[]" + replaced with your own identifying information. (Don't include + the brackets!) The text should be enclosed in the appropriate + comment syntax for the file format. We also recommend that a + file or class name and description of purpose be included on the + same "printed page" as the copyright notice for easier + identification within third-party archives. + + Copyright [yyyy] [name of copyright owner] + + Licensed under the Apache License, Version 2.0 (the "License"); + you may not use this file except in compliance with the License. + You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + + Unless required by applicable law or agreed to in writing, software + distributed under the License is distributed on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + See the License for the specific language governing permissions and + limitations under the License. diff --git a/vlmpy310/lib/python3.10/site-packages/yapf-0.43.0.dist-info/METADATA b/vlmpy310/lib/python3.10/site-packages/yapf-0.43.0.dist-info/METADATA new file mode 100644 index 0000000000000000000000000000000000000000..8cd29025b9a7719b29df5f938fc9b594b90b6860 --- /dev/null +++ b/vlmpy310/lib/python3.10/site-packages/yapf-0.43.0.dist-info/METADATA @@ -0,0 +1,1305 @@ +Metadata-Version: 2.1 +Name: yapf +Version: 0.43.0 +Summary: A formatter for Python code +Author: Google Inc. +Maintainer-email: Bill Wendling +License: + Apache License + Version 2.0, January 2004 + http://www.apache.org/licenses/ + + TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION + + 1. Definitions. + + "License" shall mean the terms and conditions for use, reproduction, + and distribution as defined by Sections 1 through 9 of this document. + + "Licensor" shall mean the copyright owner or entity authorized by + the copyright owner that is granting the License. + + "Legal Entity" shall mean the union of the acting entity and all + other entities that control, are controlled by, or are under common + control with that entity. For the purposes of this definition, + "control" means (i) the power, direct or indirect, to cause the + direction or management of such entity, whether by contract or + otherwise, or (ii) ownership of fifty percent (50%) or more of the + outstanding shares, or (iii) beneficial ownership of such entity. + + "You" (or "Your") shall mean an individual or Legal Entity + exercising permissions granted by this License. + + "Source" form shall mean the preferred form for making modifications, + including but not limited to software source code, documentation + source, and configuration files. + + "Object" form shall mean any form resulting from mechanical + transformation or translation of a Source form, including but + not limited to compiled object code, generated documentation, + and conversions to other media types. + + "Work" shall mean the work of authorship, whether in Source or + Object form, made available under the License, as indicated by a + copyright notice that is included in or attached to the work + (an example is provided in the Appendix below). + + "Derivative Works" shall mean any work, whether in Source or Object + form, that is based on (or derived from) the Work and for which the + editorial revisions, annotations, elaborations, or other modifications + represent, as a whole, an original work of authorship. For the purposes + of this License, Derivative Works shall not include works that remain + separable from, or merely link (or bind by name) to the interfaces of, + the Work and Derivative Works thereof. + + "Contribution" shall mean any work of authorship, including + the original version of the Work and any modifications or additions + to that Work or Derivative Works thereof, that is intentionally + submitted to Licensor for inclusion in the Work by the copyright owner + or by an individual or Legal Entity authorized to submit on behalf of + the copyright owner. For the purposes of this definition, "submitted" + means any form of electronic, verbal, or written communication sent + to the Licensor or its representatives, including but not limited to + communication on electronic mailing lists, source code control systems, + and issue tracking systems that are managed by, or on behalf of, the + Licensor for the purpose of discussing and improving the Work, but + excluding communication that is conspicuously marked or otherwise + designated in writing by the copyright owner as "Not a Contribution." + + "Contributor" shall mean Licensor and any individual or Legal Entity + on behalf of whom a Contribution has been received by Licensor and + subsequently incorporated within the Work. + + 2. Grant of Copyright License. Subject to the terms and conditions of + this License, each Contributor hereby grants to You a perpetual, + worldwide, non-exclusive, no-charge, royalty-free, irrevocable + copyright license to reproduce, prepare Derivative Works of, + publicly display, publicly perform, sublicense, and distribute the + Work and such Derivative Works in Source or Object form. + + 3. Grant of Patent License. Subject to the terms and conditions of + this License, each Contributor hereby grants to You a perpetual, + worldwide, non-exclusive, no-charge, royalty-free, irrevocable + (except as stated in this section) patent license to make, have made, + use, offer to sell, sell, import, and otherwise transfer the Work, + where such license applies only to those patent claims licensable + by such Contributor that are necessarily infringed by their + Contribution(s) alone or by combination of their Contribution(s) + with the Work to which such Contribution(s) was submitted. If You + institute patent litigation against any entity (including a + cross-claim or counterclaim in a lawsuit) alleging that the Work + or a Contribution incorporated within the Work constitutes direct + or contributory patent infringement, then any patent licenses + granted to You under this License for that Work shall terminate + as of the date such litigation is filed. + + 4. Redistribution. You may reproduce and distribute copies of the + Work or Derivative Works thereof in any medium, with or without + modifications, and in Source or Object form, provided that You + meet the following conditions: + + (a) You must give any other recipients of the Work or + Derivative Works a copy of this License; and + + (b) You must cause any modified files to carry prominent notices + stating that You changed the files; and + + (c) You must retain, in the Source form of any Derivative Works + that You distribute, all copyright, patent, trademark, and + attribution notices from the Source form of the Work, + excluding those notices that do not pertain to any part of + the Derivative Works; and + + (d) If the Work includes a "NOTICE" text file as part of its + distribution, then any Derivative Works that You distribute must + include a readable copy of the attribution notices contained + within such NOTICE file, excluding those notices that do not + pertain to any part of the Derivative Works, in at least one + of the following places: within a NOTICE text file distributed + as part of the Derivative Works; within the Source form or + documentation, if provided along with the Derivative Works; or, + within a display generated by the Derivative Works, if and + wherever such third-party notices normally appear. The contents + of the NOTICE file are for informational purposes only and + do not modify the License. You may add Your own attribution + notices within Derivative Works that You distribute, alongside + or as an addendum to the NOTICE text from the Work, provided + that such additional attribution notices cannot be construed + as modifying the License. + + You may add Your own copyright statement to Your modifications and + may provide additional or different license terms and conditions + for use, reproduction, or distribution of Your modifications, or + for any such Derivative Works as a whole, provided Your use, + reproduction, and distribution of the Work otherwise complies with + the conditions stated in this License. + + 5. Submission of Contributions. Unless You explicitly state otherwise, + any Contribution intentionally submitted for inclusion in the Work + by You to the Licensor shall be under the terms and conditions of + this License, without any additional terms or conditions. + Notwithstanding the above, nothing herein shall supersede or modify + the terms of any separate license agreement you may have executed + with Licensor regarding such Contributions. + + 6. Trademarks. This License does not grant permission to use the trade + names, trademarks, service marks, or product names of the Licensor, + except as required for reasonable and customary use in describing the + origin of the Work and reproducing the content of the NOTICE file. + + 7. Disclaimer of Warranty. Unless required by applicable law or + agreed to in writing, Licensor provides the Work (and each + Contributor provides its Contributions) on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or + implied, including, without limitation, any warranties or conditions + of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A + PARTICULAR PURPOSE. You are solely responsible for determining the + appropriateness of using or redistributing the Work and assume any + risks associated with Your exercise of permissions under this License. + + 8. Limitation of Liability. In no event and under no legal theory, + whether in tort (including negligence), contract, or otherwise, + unless required by applicable law (such as deliberate and grossly + negligent acts) or agreed to in writing, shall any Contributor be + liable to You for damages, including any direct, indirect, special, + incidental, or consequential damages of any character arising as a + result of this License or out of the use or inability to use the + Work (including but not limited to damages for loss of goodwill, + work stoppage, computer failure or malfunction, or any and all + other commercial damages or losses), even if such Contributor + has been advised of the possibility of such damages. + + 9. Accepting Warranty or Additional Liability. While redistributing + the Work or Derivative Works thereof, You may choose to offer, + and charge a fee for, acceptance of support, warranty, indemnity, + or other liability obligations and/or rights consistent with this + License. However, in accepting such obligations, You may act only + on Your own behalf and on Your sole responsibility, not on behalf + of any other Contributor, and only if You agree to indemnify, + defend, and hold each Contributor harmless for any liability + incurred by, or claims asserted against, such Contributor by reason + of your accepting any such warranty or additional liability. + + END OF TERMS AND CONDITIONS + + APPENDIX: How to apply the Apache License to your work. + + To apply the Apache License to your work, attach the following + boilerplate notice, with the fields enclosed by brackets "[]" + replaced with your own identifying information. (Don't include + the brackets!) The text should be enclosed in the appropriate + comment syntax for the file format. We also recommend that a + file or class name and description of purpose be included on the + same "printed page" as the copyright notice for easier + identification within third-party archives. + + Copyright [yyyy] [name of copyright owner] + + Licensed under the Apache License, Version 2.0 (the "License"); + you may not use this file except in compliance with the License. + You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + + Unless required by applicable law or agreed to in writing, software + distributed under the License is distributed on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + See the License for the specific language governing permissions and + limitations under the License. + +Project-URL: Home, https://github.com/google/yapf +Project-URL: Changelog, https://github.com/google/yapf/blob/main/CHANGELOG.md +Project-URL: Docs, https://github.com/google/yapf/blob/main/README.md#yapf +Project-URL: Issues, https://github.com/google/yapf/issues +Classifier: Development Status :: 4 - Beta +Classifier: Environment :: Console +Classifier: Intended Audience :: Developers +Classifier: License :: OSI Approved :: Apache Software License +Classifier: Operating System :: OS Independent +Classifier: Programming Language :: Python +Classifier: Programming Language :: Python :: 3 :: Only +Classifier: Programming Language :: Python :: 3.7 +Classifier: Programming Language :: Python :: 3.8 +Classifier: Programming Language :: Python :: 3.9 +Classifier: Programming Language :: Python :: 3.10 +Classifier: Programming Language :: Python :: 3.11 +Classifier: Topic :: Software Development :: Libraries :: Python Modules +Classifier: Topic :: Software Development :: Quality Assurance +Requires-Python: >=3.7 +Description-Content-Type: text/markdown +License-File: LICENSE +License-File: AUTHORS +Requires-Dist: platformdirs >=3.5.1 +Requires-Dist: tomli >=2.0.1 ; python_version < "3.11" + +# YAPF + +

+PyPI Version +Build Status +Actions Status +Coverage Status +

+ + +## Introduction + +YAPF is a Python formatter based on [`clang-format`](https://clang.llvm.org/docs/ClangFormat.html) +(developed by Daniel Jasper). In essence, the algorithm takes the code and +calculates the best formatting that conforms to the configured style. It takes +away a lot of the drudgery of maintaining your code. + +The ultimate goal is that the code YAPF produces is as good as the code that a +programmer would write if they were following the style guide. + +> **Note** +> YAPF is not an official Google product (experimental or otherwise), it is +> just code that happens to be owned by Google. + + +## Installation + +To install YAPF from PyPI: + +```bash +$ pip install yapf +``` + +YAPF is still considered in "beta" stage, and the released version may change +often; therefore, the best way to keep up-to-date with the latest development +is to clone this repository or install directly from github: + +```bash +$ pip install git+https://github.com/google/yapf.git +``` + +Note that if you intend to use YAPF as a command-line tool rather than as a +library, installation is not necessary. YAPF supports being run as a directory +by the Python interpreter. If you cloned/unzipped YAPF into `DIR`, it's +possible to run: + +```bash +$ PYTHONPATH=DIR python DIR/yapf [options] ... +``` + +## Using YAPF within your favorite editor +YAPF is supported by multiple editors via community extensions or plugins. See [Editor Support](EDITOR%20SUPPORT.md) for more info. + +## Required Python versions + +YAPF supports Python 3.7+. + + +## Usage + +```console +usage: yapf [-h] [-v] [-d | -i | -q] [-r | -l START-END] [-e PATTERN] + [--style STYLE] [--style-help] [--no-local-style] [-p] [-m] [-vv] + [files ...] + +Formatter for Python code. + +positional arguments: + files reads from stdin when no files are specified. + +optional arguments: + -h, --help show this help message and exit + -v, --version show program's version number and exit + -d, --diff print the diff for the fixed source + -i, --in-place make changes to files in place + -q, --quiet output nothing and set return value + -r, --recursive run recursively over directories + -l START-END, --lines START-END + range of lines to reformat, one-based + -e PATTERN, --exclude PATTERN + patterns for files to exclude from formatting + --style STYLE specify formatting style: either a style name (for + example "pep8" or "google"), or the name of a file + with style settings. The default is pep8 unless a + .style.yapf or setup.cfg or pyproject.toml file + located in the same directory as the source or one of + its parent directories (for stdin, the current + directory is used). + --style-help show style settings and exit; this output can be saved + to .style.yapf to make your settings permanent + --no-local-style don't search for local style definition + -p, --parallel run YAPF in parallel when formatting multiple files. + -m, --print-modified print out file names of modified files + -vv, --verbose print out file names while processing +``` + + +### Return Codes + +Normally YAPF returns zero on successful program termination and non-zero +otherwise. + +If `--diff` is supplied, YAPF returns zero when no changes were necessary, +non-zero otherwise (including program error). You can use this in a CI workflow +to test that code has been YAPF-formatted. + +### Excluding files from formatting (.yapfignore or pyproject.toml) + +In addition to exclude patterns provided on commandline, YAPF looks for +additional patterns specified in a file named `.yapfignore` or `pyproject.toml` +located in the working directory from which YAPF is invoked. + +`.yapfignore`'s syntax is similar to UNIX's filename pattern matching: + +``` +* matches everything +? matches any single character +[seq] matches any character in seq +[!seq] matches any character not in seq +``` + +Note that no entry should begin with `./`. + +If you use `pyproject.toml`, exclude patterns are specified by `ignore_patterns` key +in `[tool.yapfignore]` section. For example: + +```ini +[tool.yapfignore] +ignore_patterns = [ + "temp/**/*.py", + "temp2/*.py" +] +``` + + +Formatting style +================ + +The formatting style used by YAPF is configurable and there are many "knobs" +that can be used to tune how YAPF does formatting. See the `style.py` module +for the full list. + +To control the style, run YAPF with the `--style` argument. It accepts one of +the predefined styles (e.g., `pep8` or `google`), a path to a configuration +file that specifies the desired style, or a dictionary of key/value pairs. + +The config file is a simple listing of (case-insensitive) `key = value` pairs +with a `[style]` heading. For example: + +```ini +[style] +based_on_style = pep8 +spaces_before_comment = 4 +split_before_logical_operator = true +``` + +The `based_on_style` setting determines which of the predefined styles this +custom style is based on (think of it like subclassing). Four +styles are predefined: + +- `pep8` (default) +- `google` (based off of the [Google Python Style Guide](https://github.com/google/styleguide/blob/gh-pages/pyguide.md)) +- `yapf` (for use with Google open source projects) +- `facebook` + +See `_STYLE_NAME_TO_FACTORY` in [`style.py`](https://github.com/google/yapf/blob/main/yapf/yapflib/style.py) for details. + +It's also possible to do the same on the command line with a dictionary. For +example: + +```bash +--style='{based_on_style: pep8, indent_width: 2}' +``` + +This will take the `pep8` base style and modify it to have two space +indentations. + +YAPF will search for the formatting style in the following manner: + +1. Specified on the command line +2. In the `[style]` section of a `.style.yapf` file in either the current + directory or one of its parent directories. +3. In the `[yapf]` section of a `setup.cfg` file in either the current + directory or one of its parent directories. +4. In the `[tool.yapf]` section of a `pyproject.toml` file in either the current + directory or one of its parent directories. +5. In the `[style]` section of a `~/.config/yapf/style` file in your home + directory. + +If none of those files are found, the default style PEP8 is used. + + +Example +======= + +An example of the type of formatting that YAPF can do, it will take this ugly +code: + +```python +x = { 'a':37,'b':42, + +'c':927} + +y = 'hello ''world' +z = 'hello '+'world' +a = 'hello {}'.format('world') +class foo ( object ): + def f (self ): + return 37*-+2 + def g(self, x,y=42): + return y +def f ( a ) : + return 37+-+a[42-x : y**3] +``` + +and reformat it into: + +```python +x = {'a': 37, 'b': 42, 'c': 927} + +y = 'hello ' 'world' +z = 'hello ' + 'world' +a = 'hello {}'.format('world') + + +class foo(object): + def f(self): + return 37 * -+2 + + def g(self, x, y=42): + return y + + +def f(a): + return 37 + -+a[42 - x:y**3] +``` + + +## Example as a module + +The two main APIs for calling YAPF are `FormatCode` and `FormatFile`, these +share several arguments which are described below: + +```python +>>> from yapf.yapflib.yapf_api import FormatCode # reformat a string of code + +>>> formatted_code, changed = FormatCode("f ( a = 1, b = 2 )") +>>> formatted_code +'f(a=1, b=2)\n' +>>> changed +True +``` + +A `style_config` argument: Either a style name or a path to a file that +contains formatting style settings. If None is specified, use the default style +as set in `style.DEFAULT_STYLE_FACTORY`. + +```python +>>> FormatCode("def g():\n return True", style_config='pep8')[0] +'def g():\n return True\n' +``` + +A `lines` argument: A list of tuples of lines (ints), [start, end], that we +want to format. The lines are 1-based indexed. It can be used by third-party +code (e.g., IDEs) when reformatting a snippet of code rather than a whole file. + +```python +>>> FormatCode("def g( ):\n a=1\n b = 2\n return a==b", lines=[(1, 1), (2, 3)])[0] +'def g():\n a = 1\n b = 2\n return a==b\n' +``` + +A `print_diff` (bool): Instead of returning the reformatted source, return a +diff that turns the formatted source into reformatted source. + +```diff +>>> print(FormatCode("a==b", filename="foo.py", print_diff=True)[0]) +--- foo.py (original) ++++ foo.py (reformatted) +@@ -1 +1 @@ +-a==b ++a == b +``` + +Note: the `filename` argument for `FormatCode` is what is inserted into the +diff, the default is ``. + +`FormatFile` returns reformatted code from the passed file along with its encoding: + +```python +>>> from yapf.yapflib.yapf_api import FormatFile # reformat a file + +>>> print(open("foo.py").read()) # contents of file +a==b + +>>> reformatted_code, encoding, changed = FormatFile("foo.py") +>>> formatted_code +'a == b\n' +>>> encoding +'utf-8' +>>> changed +True +``` + +The `in_place` argument saves the reformatted code back to the file: + +```python +>>> FormatFile("foo.py", in_place=True)[:2] +(None, 'utf-8') + +>>> print(open("foo.py").read()) # contents of file (now fixed) +a == b +``` + + +## Formatting diffs + +Options: + +```console +usage: yapf-diff [-h] [-i] [-p NUM] [--regex PATTERN] [--iregex PATTERN][-v] + [--style STYLE] [--binary BINARY] + +This script reads input from a unified diff and reformats all the changed +lines. This is useful to reformat all the lines touched by a specific patch. +Example usage for git/svn users: + + git diff -U0 --no-color --relative HEAD^ | yapf-diff -i + svn diff --diff-cmd=diff -x-U0 | yapf-diff -p0 -i + +It should be noted that the filename contained in the diff is used +unmodified to determine the source file to update. Users calling this script +directly should be careful to ensure that the path in the diff is correct +relative to the current working directory. + +optional arguments: + -h, --help show this help message and exit + -i, --in-place apply edits to files instead of displaying a diff + -p NUM, --prefix NUM strip the smallest prefix containing P slashes + --regex PATTERN custom pattern selecting file paths to reformat + (case sensitive, overrides -iregex) + --iregex PATTERN custom pattern selecting file paths to reformat + (case insensitive, overridden by -regex) + -v, --verbose be more verbose, ineffective without -i + --style STYLE specify formatting style: either a style name (for + example "pep8" or "google"), or the name of a file + with style settings. The default is pep8 unless a + .style.yapf or setup.cfg or pyproject.toml file + located in the same directory as the source or one of + its parent directories (for stdin, the current + directory is used). + --binary BINARY location of binary to use for YAPF +``` + +## Python features not yet supported +* Python 3.12 – [PEP 695 – Type Parameter Syntax](https://peps.python.org/pep-0695/) – [YAPF #1170](https://github.com/google/yapf/issues/1170) +* Python 3.12 – [PEP 701 – Syntactic formalization of f-strings](https://peps.python.org/pep-0701/) – [YAPF #1136](https://github.com/google/yapf/issues/1136) + +## Knobs + +#### `ALIGN_CLOSING_BRACKET_WITH_VISUAL_INDENT` + +> Align closing bracket with visual indentation. + +#### `ALLOW_MULTILINE_LAMBDAS` + +> Allow lambdas to be formatted on more than one line. + +#### `ALLOW_MULTILINE_DICTIONARY_KEYS` + +> Allow dictionary keys to exist on multiple lines. For example: + +```python + x = { + ('this is the first element of a tuple', + 'this is the second element of a tuple'): + value, + } +``` + +#### `ALLOW_SPLIT_BEFORE_DEFAULT_OR_NAMED_ASSIGNS` + +> Allow splitting before a default / named assignment in an argument list. + +#### `ALLOW_SPLIT_BEFORE_DICT_VALUE` + +> Allow splits before the dictionary value. + +#### `ARITHMETIC_PRECEDENCE_INDICATION` + +> Let spacing indicate operator precedence. For example: + +```python + a = 1 * 2 + 3 / 4 + b = 1 / 2 - 3 * 4 + c = (1 + 2) * (3 - 4) + d = (1 - 2) / (3 + 4) + e = 1 * 2 - 3 + f = 1 + 2 + 3 + 4 +``` + +> will be formatted as follows to indicate precedence: + +```python + a = 1*2 + 3/4 + b = 1/2 - 3*4 + c = (1+2) * (3-4) + d = (1-2) / (3+4) + e = 1*2 - 3 + f = 1 + 2 + 3 + 4 +``` + +#### `BLANK_LINES_AROUND_TOP_LEVEL_DEFINITION` + +> Sets the number of desired blank lines surrounding top-level function and +> class definitions. For example: + +```python + class Foo: + pass + # <------ having two blank lines here + # <------ is the default setting + class Bar: + pass +``` + +#### `BLANK_LINE_BEFORE_CLASS_DOCSTRING` + +> Insert a blank line before a class-level docstring. + +#### `BLANK_LINE_BEFORE_MODULE_DOCSTRING` + +> Insert a blank line before a module docstring. + +#### `BLANK_LINE_BEFORE_NESTED_CLASS_OR_DEF` + +> Insert a blank line before a `def` or `class` immediately nested within +> another `def` or `class`. For example: + +```python + class Foo: + # <------ this blank line + def method(): + pass +``` + +#### `BLANK_LINES_BETWEEN_TOP_LEVEL_IMPORTS_AND_VARIABLES` + +> Sets the number of desired blank lines between top-level imports and +> variable definitions. Useful for compatibility with tools like isort. + +#### `COALESCE_BRACKETS` + +> Do not split consecutive brackets. Only relevant when +> `DEDENT_CLOSING_BRACKETS` or `INDENT_CLOSING_BRACKETS` is set. For example: + +```python + call_func_that_takes_a_dict( + { + 'key1': 'value1', + 'key2': 'value2', + } + ) +``` + +> would reformat to: + +```python + call_func_that_takes_a_dict({ + 'key1': 'value1', + 'key2': 'value2', + }) +``` + +#### `COLUMN_LIMIT` + +> The column limit (or max line-length) + +#### `CONTINUATION_ALIGN_STYLE` + +> The style for continuation alignment. Possible values are: + +> - `SPACE`: Use spaces for continuation alignment. This is default +> behavior. +> - `FIXED`: Use fixed number (`CONTINUATION_INDENT_WIDTH`) of columns +> (i.e. `CONTINUATION_INDENT_WIDTH`/`INDENT_WIDTH` tabs or +> `CONTINUATION_INDENT_WIDTH` spaces) for continuation alignment. +> - `VALIGN-RIGHT`: Vertically align continuation lines to multiple of +> `INDENT_WIDTH` columns. Slightly right (one tab or a few spaces) if cannot +> vertically align continuation lines with indent characters. + +#### `CONTINUATION_INDENT_WIDTH` + +> Indent width used for line continuations. + +#### `DEDENT_CLOSING_BRACKETS` + +> Put closing brackets on a separate line, dedented, if the bracketed +> expression can't fit in a single line. Applies to all kinds of brackets, +> including function definitions and calls. For example: + +```python + config = { + 'key1': 'value1', + 'key2': 'value2', + } # <--- this bracket is dedented and on a separate line + + time_series = self.remote_client.query_entity_counters( + entity='dev3246.region1', + key='dns.query_latency_tcp', + transform=Transformation.AVERAGE(window=timedelta(seconds=60)), + start_ts=now()-timedelta(days=3), + end_ts=now(), + ) # <--- this bracket is dedented and on a separate line +``` + +#### `DISABLE_ENDING_COMMA_HEURISTIC` + +> Disable the heuristic which places each list element on a separate line if +> the list is comma-terminated. +> +> Note: The behavior of this flag changed in v0.40.3. Before, if this flag +> was true, we would split lists that contained a trailing comma or a +> comment. Now, we have a separate flag, `DISABLE_SPLIT_LIST_WITH_COMMENT`, +> that controls splitting when a list contains a comment. To get the old +> behavior, set both flags to true. More information in +> [CHANGELOG.md](CHANGELOG.md#new-disable_split_list_with_comment-flag). + +#### `DISABLE_SPLIT_LIST_WITH_COMMENT` + +> Don't put every element on a new line within a list that contains +> interstitial comments. +> +> Without this flag (default): +> +> ``` +> [ +> a, +> b, # +> c +> ] +> ``` +> +> With this flag: +> +> ``` +> [ +> a, b, # +> c +> ] +> ``` +> +> This mirrors the behavior of clang-format and is useful for forming +> "logical groups" of elements in a list. It also works in function +> declarations. + +#### `EACH_DICT_ENTRY_ON_SEPARATE_LINE` + +> Place each dictionary entry onto its own line. + +#### `FORCE_MULTILINE_DICT` + +> Respect `EACH_DICT_ENTRY_ON_SEPARATE_LINE` even if the line is shorter than +> `COLUMN_LIMIT`. + +#### `I18N_COMMENT` + +> The regex for an internationalization comment. The presence of this comment +> stops reformatting of that line, because the comments are required to be +> next to the string they translate. + +#### `I18N_FUNCTION_CALL` + +> The internationalization function call names. The presence of this function +> stops reformatting on that line, because the string it has cannot be moved +> away from the i18n comment. + +#### `INDENT_BLANK_LINES` + +> Set to `True` to prefer indented blank lines rather than empty + +#### `INDENT_CLOSING_BRACKETS` + +> Put closing brackets on a separate line, indented, if the bracketed +> expression can't fit in a single line. Applies to all kinds of brackets, +> including function definitions and calls. For example: + +```python + config = { + 'key1': 'value1', + 'key2': 'value2', + } # <--- this bracket is indented and on a separate line + + time_series = self.remote_client.query_entity_counters( + entity='dev3246.region1', + key='dns.query_latency_tcp', + transform=Transformation.AVERAGE(window=timedelta(seconds=60)), + start_ts=now()-timedelta(days=3), + end_ts=now(), + ) # <--- this bracket is indented and on a separate line +``` + +#### `INDENT_DICTIONARY_VALUE` + +> Indent the dictionary value if it cannot fit on the same line as the +> dictionary key. For example: + +```python + config = { + 'key1': + 'value1', + 'key2': value1 + + value2, + } +``` + +#### `INDENT_WIDTH` + +> The number of columns to use for indentation. + +#### `JOIN_MULTIPLE_LINES` + +> Join short lines into one line. E.g., single line `if` statements. + +#### `NO_SPACES_AROUND_SELECTED_BINARY_OPERATORS` + +> Do not include spaces around selected binary operators. For example: + +```python + 1 + 2 * 3 - 4 / 5 +``` + +> will be formatted as follows when configured with `*`, `/`: + +```python + 1 + 2*3 - 4/5 +``` + +#### `SPACE_BETWEEN_ENDING_COMMA_AND_CLOSING_BRACKET` + +> Insert a space between the ending comma and closing bracket of a list, etc. + +#### `SPACE_INSIDE_BRACKETS` + + Use spaces inside brackets, braces, and parentheses. For example: + +```python + method_call( 1 ) + my_dict[ 3 ][ 1 ][ get_index( *args, **kwargs ) ] + my_set = { 1, 2, 3 } +``` + +#### `SPACES_AROUND_DEFAULT_OR_NAMED_ASSIGN` + +> Set to `True` to prefer spaces around the assignment operator for default +> or keyword arguments. + +#### `SPACES_AROUND_DICT_DELIMITERS` + +> Adds a space after the opening '{' and before the ending '}' dict delimiters. + +```python + {1: 2} +``` + +> will be formatted as: + +```python + { 1: 2 } +``` + +#### `SPACES_AROUND_LIST_DELIMITERS` + +> Adds a space after the opening '[' and before the ending ']' list delimiters. + +```python + [1, 2] +``` + +> will be formatted as: + +```python + [ 1, 2 ] +``` + +#### `SPACES_AROUND_POWER_OPERATOR` + +> Set to `True` to prefer using spaces around `**`. + +#### `SPACES_AROUND_SUBSCRIPT_COLON` + +> Use spaces around the subscript / slice operator. For example: + +```python + my_list[1 : 10 : 2] +``` + +##### `SPACES_AROUND_TUPLE_DELIMITERS` + +> Adds a space after the opening '(' and before the ending ')' tuple delimiters. + +```python + (1, 2, 3) +``` + +> will be formatted as: + +```python + ( 1, 2, 3 ) +``` + +#### `SPACES_BEFORE_COMMENT` + +> The number of spaces required before a trailing comment. +> This can be a single value (representing the number of spaces +> before each trailing comment) or list of values (representing +> alignment column values; trailing comments within a block will +> be aligned to the first column value that is greater than the maximum +> line length within the block). + +> **Note:** Lists of values may need to be quoted in some contexts +> (eg. shells or editor config files). + +> For example, with `spaces_before_comment=5`: + +```python + 1 + 1 # Adding values +``` + +> will be formatted as: + +```python + 1 + 1 # Adding values <-- 5 spaces between the end of the statement and comment +``` + +> with `spaces_before_comment="15, 20"`: + +```python + 1 + 1 # Adding values + two + two # More adding + + longer_statement # This is a longer statement + short # This is a shorter statement + + a_very_long_statement_that_extends_beyond_the_final_column # Comment + short # This is a shorter statement +``` + +> will be formatted as: + +```python + 1 + 1 # Adding values <-- end of line comments in block aligned to col 15 + two + two # More adding + + longer_statement # This is a longer statement <-- end of line comments in block aligned to col 20 + short # This is a shorter statement + + a_very_long_statement_that_extends_beyond_the_final_column # Comment <-- the end of line comments are aligned based on the line length + short # This is a shorter statement +``` + +#### `SPLIT_ALL_COMMA_SEPARATED_VALUES` + +> If a comma separated list (`dict`, `list`, `tuple`, or function `def`) is +> on a line that is too long, split such that each element is on a separate +> line. + +#### `SPLIT_ALL_TOP_LEVEL_COMMA_SEPARATED_VALUES` + +> Variation on `SPLIT_ALL_COMMA_SEPARATED_VALUES` in which, if a +> subexpression with a comma fits in its starting line, then the +> subexpression is not split. This avoids splits like the one for +> `b` in this code: + +```python + abcdef( + aReallyLongThing: int, + b: [Int, + Int]) +``` + +> with the new knob this is split as: + +```python + abcdef( + aReallyLongThing: int, + b: [Int, Int]) +``` + +#### `SPLIT_ARGUMENTS_WHEN_COMMA_TERMINATED` + +> Split before arguments if the argument list is terminated by a comma. + +#### `SPLIT_BEFORE_ARITHMETIC_OPERATOR` + +> Set to `True` to prefer splitting before `+`, `-`, `*`, `/`, `//`, or `@` +> rather than after. + +#### `SPLIT_BEFORE_BITWISE_OPERATOR` + +> Set to `True` to prefer splitting before `&`, `|` or `^` rather than after. + +#### `SPLIT_BEFORE_CLOSING_BRACKET` + +> Split before the closing bracket if a `list` or `dict` literal doesn't fit +> on a single line. + +#### `SPLIT_BEFORE_DICT_SET_GENERATOR` + +> Split before a dictionary or set generator (`comp_for`). For example, note +> the split before the `for`: + +```python + foo = { + variable: 'Hello world, have a nice day!' + for variable in bar if variable != 42 + } +``` + +#### `SPLIT_BEFORE_DOT` + +> Split before the `.` if we need to split a longer expression: + +```python + foo = ('This is a really long string: {}, {}, {}, {}'.format(a, b, c, d)) +``` + +> would reformat to something like: + +```python + foo = ('This is a really long string: {}, {}, {}, {}' + .format(a, b, c, d)) +``` + +#### `SPLIT_BEFORE_EXPRESSION_AFTER_OPENING_PAREN` + +> Split after the opening paren which surrounds an expression if it doesn't +> fit on a single line. + +#### `SPLIT_BEFORE_FIRST_ARGUMENT` + +> If an argument / parameter list is going to be split, then split before the +> first argument. + +#### `SPLIT_BEFORE_LOGICAL_OPERATOR` + +> Set to `True` to prefer splitting before `and` or `or` rather than after. + +#### `SPLIT_BEFORE_NAMED_ASSIGNS` + +> Split named assignments onto individual lines. + +#### `SPLIT_COMPLEX_COMPREHENSION` + +> For list comprehensions and generator expressions with multiple clauses +> (e.g multiple `for` calls, `if` filter expressions) and which need to be +> reflowed, split each clause onto its own line. For example: + +```python + result = [ + a_var + b_var for a_var in xrange(1000) for b_var in xrange(1000) + if a_var % b_var] +``` + +> would reformat to something like: + +```python + result = [ + a_var + b_var + for a_var in xrange(1000) + for b_var in xrange(1000) + if a_var % b_var] +``` + +#### `SPLIT_PENALTY_AFTER_OPENING_BRACKET` + +> The penalty for splitting right after the opening bracket. + +#### `SPLIT_PENALTY_AFTER_UNARY_OPERATOR` + +> The penalty for splitting the line after a unary operator. + +#### `SPLIT_PENALTY_ARITHMETIC_OPERATOR` + +> The penalty of splitting the line around the `+`, `-`, `*`, `/`, `//`, `%`, +> and `@` operators. + +#### `SPLIT_PENALTY_BEFORE_IF_EXPR` + +> The penalty for splitting right before an `if` expression. + +#### `SPLIT_PENALTY_BITWISE_OPERATOR` + +> The penalty of splitting the line around the `&`, `|`, and `^` operators. + +#### `SPLIT_PENALTY_COMPREHENSION` + +> The penalty for splitting a list comprehension or generator expression. + +#### `SPLIT_PENALTY_EXCESS_CHARACTER` + +> The penalty for characters over the column limit. + +#### `SPLIT_PENALTY_FOR_ADDED_LINE_SPLIT` + +> The penalty incurred by adding a line split to the logical line. The more +> line splits added the higher the penalty. + +#### `SPLIT_PENALTY_IMPORT_NAMES` + +> The penalty of splitting a list of `import as` names. For example: + +```python + from a_very_long_or_indented_module_name_yada_yad import (long_argument_1, + long_argument_2, + long_argument_3) +``` + +> would reformat to something like: + +```python + from a_very_long_or_indented_module_name_yada_yad import ( + long_argument_1, long_argument_2, long_argument_3) +``` + +#### `SPLIT_PENALTY_LOGICAL_OPERATOR` + +> The penalty of splitting the line around the `and` and `or` operators. + +#### `USE_TABS` + +> Use the Tab character for indentation. + + +## (Potentially) Frequently Asked Questions + +### Why does YAPF destroy my awesome formatting? + +YAPF tries very hard to get the formatting correct. But for some code, it won't +be as good as hand-formatting. In particular, large data literals may become +horribly disfigured under YAPF. + +The reasons for this are manyfold. In short, YAPF is simply a tool to help +with development. It will format things to coincide with the style guide, but +that may not equate with readability. + +What can be done to alleviate this situation is to indicate regions YAPF should +ignore when reformatting something: + +```python +# yapf: disable +FOO = { + # ... some very large, complex data literal. +} + +BAR = [ + # ... another large data literal. +] +# yapf: enable +``` + +You can also disable formatting for a single literal like this: + +```python +BAZ = { + (1, 2, 3, 4), + (5, 6, 7, 8), + (9, 10, 11, 12), +} # yapf: disable +``` + +To preserve the nice dedented closing brackets, use the +`dedent_closing_brackets` in your style. Note that in this case all +brackets, including function definitions and calls, are going to use +that style. This provides consistency across the formatted codebase. + +### Why Not Improve Existing Tools? + +We wanted to use clang-format's reformatting algorithm. It's very powerful and +designed to come up with the best formatting possible. Existing tools were +created with different goals in mind, and would require extensive modifications +to convert to using clang-format's algorithm. + +### Can I Use YAPF In My Program? + +Please do! YAPF was designed to be used as a library as well as a command line +tool. This means that a tool or IDE plugin is free to use YAPF. + +### I still get non-PEP8 compliant code! Why? + +YAPF tries very hard to be fully PEP 8 compliant. However, it is paramount +to not risk altering the semantics of your code. Thus, YAPF tries to be as +safe as possible and does not change the token stream +(e.g., by adding parentheses). +All these cases however, can be easily fixed manually. For instance, + +```python +from my_package import my_function_1, my_function_2, my_function_3, my_function_4, my_function_5 + +FOO = my_variable_1 + my_variable_2 + my_variable_3 + my_variable_4 + my_variable_5 + my_variable_6 + my_variable_7 + my_variable_8 +``` + +won't be split, but you can easily get it right by just adding parentheses: + +```python +from my_package import (my_function_1, my_function_2, my_function_3, + my_function_4, my_function_5) + +FOO = (my_variable_1 + my_variable_2 + my_variable_3 + my_variable_4 + + my_variable_5 + my_variable_6 + my_variable_7 + my_variable_8) +``` + + +## Gory Details + +### Algorithm Design + +The main data structure in YAPF is the `LogicalLine` object. It holds a list +of `FormatToken`\s, that we would want to place on a single line if there +were no column limit. An exception being a comment in the middle of an +expression statement will force the line to be formatted on more than one line. +The formatter works on one `LogicalLine` object at a time. + +An `LogicalLine` typically won't affect the formatting of lines before or +after it. There is a part of the algorithm that may join two or more +`LogicalLine`\s into one line. For instance, an if-then statement with a +short body can be placed on a single line: + +```python +if a == 42: continue +``` + +YAPF's formatting algorithm creates a weighted tree that acts as the solution +space for the algorithm. Each node in the tree represents the result of a +formatting decision --- i.e., whether to split or not to split before a token. +Each formatting decision has a cost associated with it. Therefore, the cost is +realized on the edge between two nodes. (In reality, the weighted tree doesn't +have separate edge objects, so the cost resides on the nodes themselves.) + +For example, take the following Python code snippet. For the sake of this +example, assume that line (1) violates the column limit restriction and needs to +be reformatted. + +```python +def xxxxxxxxxxx(aaaaaaaaaaaa, bbbbbbbbb, cccccccc, dddddddd, eeeeee): # 1 + pass # 2 +``` + +For line (1), the algorithm will build a tree where each node (a +`FormattingDecisionState` object) is the state of the line at that token given +the decision to split before the token or not. Note: the `FormatDecisionState` +objects are copied by value so each node in the graph is unique and a change in +one doesn't affect other nodes. + +Heuristics are used to determine the costs of splitting or not splitting. +Because a node holds the state of the tree up to a token's insertion, it can +easily determine if a splitting decision will violate one of the style +requirements. For instance, the heuristic is able to apply an extra penalty to +the edge when not splitting between the previous token and the one being added. + +There are some instances where we will never want to split the line, because +doing so will always be detrimental (i.e., it will require a backslash-newline, +which is very rarely desirable). For line (1), we will never want to split the +first three tokens: `def`, `xxxxxxxxxxx`, and `(`. Nor will we want to +split between the `)` and the `:` at the end. These regions are said to be +"unbreakable." This is reflected in the tree by there not being a "split" +decision (left hand branch) within the unbreakable region. + +Now that we have the tree, we determine what the "best" formatting is by finding +the path through the tree with the lowest cost. + +And that's it! diff --git a/vlmpy310/lib/python3.10/site-packages/yapf-0.43.0.dist-info/RECORD b/vlmpy310/lib/python3.10/site-packages/yapf-0.43.0.dist-info/RECORD new file mode 100644 index 0000000000000000000000000000000000000000..201e96c212c4ece176b7bdfc7b9e3a90ab9e4bee --- /dev/null +++ b/vlmpy310/lib/python3.10/site-packages/yapf-0.43.0.dist-info/RECORD @@ -0,0 +1,159 @@ +../../../bin/yapf,sha256=f-RhnC-hWgYbrjq-3kALq3g2eN-uKxm43_VOVHMKYyc,225 +../../../bin/yapf-diff,sha256=uJiDOu0wVofceV0J_aBIFt7Z6bBV5jSPOrIhB5Ah7e4,249 +yapf-0.43.0.dist-info/AUTHORS,sha256=GjWR8Xly-Dl9VrKuzTLOvRoCbMNy278DxK2Sud9LcCw,307 +yapf-0.43.0.dist-info/INSTALLER,sha256=zuuue4knoyJ-UwPPXg8fezS7VCrXJQrAP7zeNuwvFQg,4 +yapf-0.43.0.dist-info/LICENSE,sha256=z8d0m5b2O9McPEK1xHG_dWgUBT6EfBDz6wA0F7xSPTA,11358 +yapf-0.43.0.dist-info/METADATA,sha256=taVx24OjDoGDQJh-Gt_GgCKHfBIRzLlXQPSNJNQA9eQ,46765 +yapf-0.43.0.dist-info/RECORD,, +yapf-0.43.0.dist-info/REQUESTED,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0 +yapf-0.43.0.dist-info/WHEEL,sha256=R06PA3UVYHThwHvxuRWMqaGcr-PuniXahwjmQRFMEkY,91 +yapf-0.43.0.dist-info/entry_points.txt,sha256=chaU6j8JZ8zlHOpcFgorw4P7Y7LnMUMnbOFgvx5pi1E,93 +yapf-0.43.0.dist-info/top_level.txt,sha256=1iB3lYw_nLFf6D_vd8fTd-TZTRfpRzoC4euULCCuu6c,32 +yapf/__init__.py,sha256=6XTKnU3vAeB9N4egUT1f_4Q_7RipRGpzoS2knKf3Evw,12967 +yapf/__main__.py,sha256=jJ5Fcwe6tbXxqSuUxRO3uel44tHP53vVScojlus0cBM,680 +yapf/__pycache__/__init__.cpython-310.pyc,, +yapf/__pycache__/__main__.cpython-310.pyc,, +yapf/__pycache__/_version.cpython-310.pyc,, +yapf/_version.py,sha256=uLdCe5IU74Eq2OM-KK7s23arKNEHXWY8B1o0eK5WeNc,23 +yapf/pyparser/__init__.py,sha256=dGRRtEG7QB5hVEiGxl5tBWPY33NOrtPvofVjbcQdA7s,596 +yapf/pyparser/__pycache__/__init__.cpython-310.pyc,, +yapf/pyparser/__pycache__/pyparser.cpython-310.pyc,, +yapf/pyparser/__pycache__/pyparser_utils.cpython-310.pyc,, +yapf/pyparser/__pycache__/split_penalty_visitor.cpython-310.pyc,, +yapf/pyparser/pyparser.py,sha256=OEASzKRQfU3HzZUMTDGSEiWAxN3MDR0V_M4RyCinwG4,5247 +yapf/pyparser/pyparser_utils.py,sha256=WS4NHtq01H4I5Tnf9Ho2BNZ7f4IYJ6KlWBZfvOFa978,3076 +yapf/pyparser/split_penalty_visitor.py,sha256=EDD14Vj0u2Clwaij7FDzweV7kcu5B7MGJjuy6CWdg7Y,29136 +yapf/pytree/__init__.py,sha256=0BQHBsowG4Mkqe5qqLBWf9kH6-rDXpIgyytXhmUeXcw,596 +yapf/pytree/__pycache__/__init__.cpython-310.pyc,, +yapf/pytree/__pycache__/blank_line_calculator.cpython-310.pyc,, +yapf/pytree/__pycache__/comment_splicer.cpython-310.pyc,, +yapf/pytree/__pycache__/continuation_splicer.cpython-310.pyc,, +yapf/pytree/__pycache__/pytree_unwrapper.cpython-310.pyc,, +yapf/pytree/__pycache__/pytree_utils.cpython-310.pyc,, +yapf/pytree/__pycache__/pytree_visitor.cpython-310.pyc,, +yapf/pytree/__pycache__/split_penalty.cpython-310.pyc,, +yapf/pytree/__pycache__/subtype_assigner.cpython-310.pyc,, +yapf/pytree/blank_line_calculator.py,sha256=D2IiDgJPQYXYD8z9oivF7NWdUMhixp7r-45O25A00no,6289 +yapf/pytree/comment_splicer.py,sha256=kx54hgFFg4HRd1d_QEHpnswKiJMY5VQT8ZXet9aN0Rw,15310 +yapf/pytree/continuation_splicer.py,sha256=k0m5s7FWUVEvCYuRTSmOF3BIXLh2YIYG0zIUsN7GsjA,1795 +yapf/pytree/pytree_unwrapper.py,sha256=xdX5c_XwDctU3TSmPT40_v4VdigBqQ74WShC6InHoB8,15973 +yapf/pytree/pytree_utils.py,sha256=8LgJJVEjuLu93L5-fry5Zy-IZx6drjfl5nlLZZGP4jI,10465 +yapf/pytree/pytree_visitor.py,sha256=DP1jM86rF9rsDS6PW9Txqzm8N18gC1kuuiKDIeQWnEU,4547 +yapf/pytree/split_penalty.py,sha256=dhaIeZPZdUoQ6nUSxoz2fbtcKz3Kj_Natl5Sh_TQaME,24445 +yapf/pytree/subtype_assigner.py,sha256=N6hNhKviCd-B6vsbtIb1I0f3sq4jT9_MseLoJmFMMsw,19564 +yapf/yapflib/__init__.py,sha256=abBqhqqPP10jK6pNCEt5VDVZfYK0u0fQugTp2AC0K4k,596 +yapf/yapflib/__pycache__/__init__.cpython-310.pyc,, +yapf/yapflib/__pycache__/errors.cpython-310.pyc,, +yapf/yapflib/__pycache__/file_resources.cpython-310.pyc,, +yapf/yapflib/__pycache__/format_decision_state.cpython-310.pyc,, +yapf/yapflib/__pycache__/format_token.cpython-310.pyc,, +yapf/yapflib/__pycache__/identify_container.cpython-310.pyc,, +yapf/yapflib/__pycache__/line_joiner.cpython-310.pyc,, +yapf/yapflib/__pycache__/logical_line.cpython-310.pyc,, +yapf/yapflib/__pycache__/object_state.cpython-310.pyc,, +yapf/yapflib/__pycache__/reformatter.cpython-310.pyc,, +yapf/yapflib/__pycache__/split_penalty.cpython-310.pyc,, +yapf/yapflib/__pycache__/style.cpython-310.pyc,, +yapf/yapflib/__pycache__/subtypes.cpython-310.pyc,, +yapf/yapflib/__pycache__/yapf_api.cpython-310.pyc,, +yapf/yapflib/errors.py,sha256=RloRbCws02c5stwhSPqZGCSDG1tpi5LKjbyX5obQ_XY,1501 +yapf/yapflib/file_resources.py,sha256=jbAD5dDV31xh503BzCUP7KYMsPKwcjWrsApsejbFU5M,9320 +yapf/yapflib/format_decision_state.py,sha256=BusSkFIVLA6mhR8V9DJUkjOUuipvo_RX8OR1WMNyaC4,48776 +yapf/yapflib/format_token.py,sha256=tyiWuGin2Zys8hYP22wPNL_lTZuOoIOjuQhfSNc-cUA,11203 +yapf/yapflib/identify_container.py,sha256=fqv-rHIupgGcXAPH2IkDhY7D0F2jWnSQYSwPsY8VUWQ,2334 +yapf/yapflib/line_joiner.py,sha256=HR_jVQjR3sX7k2PcJ_d8cSFf2chKOW1-gWf2ixeZYcY,3916 +yapf/yapflib/logical_line.py,sha256=Bj_rxgroNnr_KlpHSfwqUPzTGh1iQzlEgtlfGDcflgs,25201 +yapf/yapflib/object_state.py,sha256=-JOCWaB34BBi1zPq1rM-Ih9DlR9Qqgm5PL2PvXm12-E,7912 +yapf/yapflib/reformatter.py,sha256=qllrDOTv_XhFVZZu3u292lTRIDL06n7CPFNpj5vrTvc,28278 +yapf/yapflib/split_penalty.py,sha256=tbTQliU49OILbPuNGjH9xCJr8sr2YY9SmEnQUQufo1s,1252 +yapf/yapflib/style.py,sha256=U4AkLThCLZ8Mwra37CQvye6tFXOSseJ4pDczU-nWGH4,32616 +yapf/yapflib/subtypes.py,sha256=XOLQ2JaFysHUZiJA2npKtUGy5AM4waBrognz9RAV5T0,1157 +yapf/yapflib/yapf_api.py,sha256=G7VR1utXlRb43OAbSJvhfG6nE9dDR-GUjj7lRKiuQd4,11973 +yapf_third_party/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0 +yapf_third_party/__pycache__/__init__.cpython-310.pyc,, +yapf_third_party/_ylib2to3/Grammar.txt,sha256=uIxG7IrdemRi94OtTUaVoW2ktYyKyuXHluWFMMq8Af4,11278 +yapf_third_party/_ylib2to3/LICENSE,sha256=V4mIG4rrnJH1g19bt8q-hKD-zUuyvi9UyeaVenjseZ0,12762 +yapf_third_party/_ylib2to3/PatternGrammar.txt,sha256=7lul2ztnIqDi--JWDrwciD5yMo75w7TaHHxdHMZJvOM,793 +yapf_third_party/_ylib2to3/__init__.py,sha256=qjSpxnt8ZtKamxz0zBxlj0FICSIbfYhYEJ72T9-itak,73 +yapf_third_party/_ylib2to3/__pycache__/__init__.cpython-310.pyc,, +yapf_third_party/_ylib2to3/__pycache__/fixer_base.cpython-310.pyc,, +yapf_third_party/_ylib2to3/__pycache__/fixer_util.cpython-310.pyc,, +yapf_third_party/_ylib2to3/__pycache__/patcomp.cpython-310.pyc,, +yapf_third_party/_ylib2to3/__pycache__/pygram.cpython-310.pyc,, +yapf_third_party/_ylib2to3/__pycache__/pytree.cpython-310.pyc,, +yapf_third_party/_ylib2to3/fixer_base.py,sha256=VH1BkFUBWEZJHYmdNDM3TqdBOde6hx0TZrS8BWxO_1A,6081 +yapf_third_party/_ylib2to3/fixer_util.py,sha256=0Sklp3vR9ej_HIEuBHZvZx1AaXbInoMgUkJL92jJVy4,14005 +yapf_third_party/_ylib2to3/patcomp.py,sha256=yTdjpIAf9Mck02LDWbu8PIi7YDv76rDaTDb0Fdt_rSo,6374 +yapf_third_party/_ylib2to3/pgen2/__init__.py,sha256=7WIeTNKI48jLD2J7R8-eCiMKPaugyap-zIm3KU9v1ww,142 +yapf_third_party/_ylib2to3/pgen2/__pycache__/__init__.cpython-310.pyc,, +yapf_third_party/_ylib2to3/pgen2/__pycache__/conv.cpython-310.pyc,, +yapf_third_party/_ylib2to3/pgen2/__pycache__/driver.cpython-310.pyc,, +yapf_third_party/_ylib2to3/pgen2/__pycache__/grammar.cpython-310.pyc,, +yapf_third_party/_ylib2to3/pgen2/__pycache__/literals.cpython-310.pyc,, +yapf_third_party/_ylib2to3/pgen2/__pycache__/parse.cpython-310.pyc,, +yapf_third_party/_ylib2to3/pgen2/__pycache__/pgen.cpython-310.pyc,, +yapf_third_party/_ylib2to3/pgen2/__pycache__/token.cpython-310.pyc,, +yapf_third_party/_ylib2to3/pgen2/__pycache__/tokenize.cpython-310.pyc,, +yapf_third_party/_ylib2to3/pgen2/conv.py,sha256=HwFvs8OSeIUsuzxadn68UnSLSTW6NQ5wHOIHyHJeKec,8672 +yapf_third_party/_ylib2to3/pgen2/driver.py,sha256=sLbYtSMHZTYnL9slkigf0IXcFnGjSXbP47xqxCJtVQY,8732 +yapf_third_party/_ylib2to3/pgen2/grammar.py,sha256=50lpUCkrS5jBdeYPTsWSdGjZv1iBYAuc7UIGKkviO1U,6730 +yapf_third_party/_ylib2to3/pgen2/literals.py,sha256=zPKvTENZ4v5UpXsf9KCrkw3Yab05QHnUWSjfIGpCudI,1407 +yapf_third_party/_ylib2to3/pgen2/parse.py,sha256=OhFjvGVNQPsgF6JkV_VHw__d55F-WjcSGp8XUUKE0RQ,13020 +yapf_third_party/_ylib2to3/pgen2/pgen.py,sha256=qXJiIjBTgOND-QqMobNk5nTVHHWX5wILfjLjwq41LLU,12372 +yapf_third_party/_ylib2to3/pgen2/token.py,sha256=Jx7s_tsSdAQzhxUOx50PJSz9bOehi4r_DfWdllFdesk,1288 +yapf_third_party/_ylib2to3/pgen2/tokenize.py,sha256=qGvScdXULtSA6cOUOuCNTQ7PJA37zWkmc6LM-orA-KI,19037 +yapf_third_party/_ylib2to3/pygram.py,sha256=mL1AwT9eSTSiolL_NswWSQ9MrTdOTiURQL6TPHgXqPU,1200 +yapf_third_party/_ylib2to3/pytree.py,sha256=ys0BCcdCzayH0E5FvDSSJlxqkU8yv1QNf79Ef--Tk_k,25583 +yapf_third_party/yapf_diff/LICENSE,sha256=nkXoVr7czun2clQILKEYUdlU3i_tdEjEvtGa2aq5mpE,12262 +yapf_third_party/yapf_diff/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0 +yapf_third_party/yapf_diff/__pycache__/__init__.cpython-310.pyc,, +yapf_third_party/yapf_diff/__pycache__/yapf_diff.cpython-310.pyc,, +yapf_third_party/yapf_diff/yapf_diff.py,sha256=5jvzFoJ1t-GPKVnfG_7sUxM6OhYyhzZhgdvZaa1p6TM,4657 +yapftests/__init__.py,sha256=abBqhqqPP10jK6pNCEt5VDVZfYK0u0fQugTp2AC0K4k,596 +yapftests/__pycache__/__init__.cpython-310.pyc,, +yapftests/__pycache__/blank_line_calculator_test.cpython-310.pyc,, +yapftests/__pycache__/comment_splicer_test.cpython-310.pyc,, +yapftests/__pycache__/file_resources_test.cpython-310.pyc,, +yapftests/__pycache__/format_decision_state_test.cpython-310.pyc,, +yapftests/__pycache__/format_token_test.cpython-310.pyc,, +yapftests/__pycache__/line_joiner_test.cpython-310.pyc,, +yapftests/__pycache__/logical_line_test.cpython-310.pyc,, +yapftests/__pycache__/main_test.cpython-310.pyc,, +yapftests/__pycache__/pytree_unwrapper_test.cpython-310.pyc,, +yapftests/__pycache__/pytree_utils_test.cpython-310.pyc,, +yapftests/__pycache__/pytree_visitor_test.cpython-310.pyc,, +yapftests/__pycache__/reformatter_basic_test.cpython-310.pyc,, +yapftests/__pycache__/reformatter_buganizer_test.cpython-310.pyc,, +yapftests/__pycache__/reformatter_facebook_test.cpython-310.pyc,, +yapftests/__pycache__/reformatter_pep8_test.cpython-310.pyc,, +yapftests/__pycache__/reformatter_python3_test.cpython-310.pyc,, +yapftests/__pycache__/reformatter_style_config_test.cpython-310.pyc,, +yapftests/__pycache__/split_penalty_test.cpython-310.pyc,, +yapftests/__pycache__/style_test.cpython-310.pyc,, +yapftests/__pycache__/subtype_assigner_test.cpython-310.pyc,, +yapftests/__pycache__/utils.cpython-310.pyc,, +yapftests/__pycache__/yapf_test.cpython-310.pyc,, +yapftests/__pycache__/yapf_test_helper.cpython-310.pyc,, +yapftests/blank_line_calculator_test.py,sha256=7vgwyXfbQhYxCzezhJsuyjHY_-v9teeWUGSVUoYiKlc,9045 +yapftests/comment_splicer_test.py,sha256=WLuHqyY8EYuFD-BWiKyGrZDQx1nRz2bbDd-mLSCLo8Q,11081 +yapftests/file_resources_test.py,sha256=8SXEZlgAAEb-ye8lIYYB5onRe8y-QkpPBjJI9u5C11o,18953 +yapftests/format_decision_state_test.py,sha256=XAB1v3m08fydVIzEvN23xNwVJBiog8mtgg7LGG2IuYk,4401 +yapftests/format_token_test.py,sha256=JO6-XnboqnufAPF0loWPbaYVMHPqqq4loqcu9mhtzMY,3123 +yapftests/line_joiner_test.py,sha256=GswiMcVRAuaoUNvEoBzORMZWHsvVXu4AIK4rAInCegQ,2590 +yapftests/logical_line_test.py,sha256=kXSigooG63K0FJPBs_Ifx4xNzlTibSTlU-E2YlsUZbk,3228 +yapftests/main_test.py,sha256=t4HMCLkb0y0VyMQlE1WzeZDT2iG02XadrEZekUnpEao,3935 +yapftests/pytree_unwrapper_test.py,sha256=HiouLUQBpc5lNll8MKZ4hiTTvEwWwSg4u-l3mrRpUp0,9351 +yapftests/pytree_utils_test.py,sha256=vhgXLaSycsd7zhRDTnuaIrSCn19R-2VACRsKajA-Mwo,7973 +yapftests/pytree_visitor_test.py,sha256=By6gpHcBZ1lw7dx_ZW08IvmtjRbMj504L7t0KtmG69E,4083 +yapftests/reformatter_basic_test.py,sha256=SaH7IMV1BzCoo1GsfNt_0zaFNsfcpIV2VNNrz2-dWeQ,116678 +yapftests/reformatter_buganizer_test.py,sha256=JHDy6EDixvobthEma5G7i5wNMLBNMTg1bukuuECluEY,92309 +yapftests/reformatter_facebook_test.py,sha256=koxTPksYBVbhwxzWwvqz0_qvhg3qYju-tcfVaQSQ9DU,15193 +yapftests/reformatter_pep8_test.py,sha256=eynVZVg-RI3De16OwLgcBU6QCmsZhqxvz0i8r03MCyE,32060 +yapftests/reformatter_python3_test.py,sha256=3Dk9PjAiWPtlagbbIX3bLvhgfnrgmvcfSkW-BSF7aSk,18512 +yapftests/reformatter_style_config_test.py,sha256=dksg4n6OSt7_LXkN_jTyeG0i_KyORuS2LsaA-5PUlk8,7171 +yapftests/split_penalty_test.py,sha256=zN4AFeW0nEKF4pdOVyb5y7acwnNiWoJU2oM9XhCp06c,7551 +yapftests/style_test.py,sha256=HD36Z9isuDzZTBFlWkhHVsi3AUcEkzWTfKLu5OOuDJM,11404 +yapftests/subtype_assigner_test.py,sha256=cnGHcYxSmpsED4jM2HPXFRlcuI7n6Zxva09s-djGWZU,17149 +yapftests/utils.py,sha256=SsgEwqMsg1zT0bsnKPVPIdUuTzgdtWWZyoeNOklmdKg,2567 +yapftests/yapf_test.py,sha256=HJ_G7wagp5ZHmcxHsFQqMjb120KTzSrHD_G0Tmpux50,63313 +yapftests/yapf_test_helper.py,sha256=-eZeC7t4FYV7EmouRN-AQvvCn5_dIcjOYOLa-HdpFUY,2910 diff --git a/vlmpy310/lib/python3.10/site-packages/yapf-0.43.0.dist-info/REQUESTED b/vlmpy310/lib/python3.10/site-packages/yapf-0.43.0.dist-info/REQUESTED new file mode 100644 index 0000000000000000000000000000000000000000..e69de29bb2d1d6434b8b29ae775ad8c2e48c5391 diff --git a/vlmpy310/lib/python3.10/site-packages/yapf-0.43.0.dist-info/WHEEL b/vlmpy310/lib/python3.10/site-packages/yapf-0.43.0.dist-info/WHEEL new file mode 100644 index 0000000000000000000000000000000000000000..8201b235f5c279046293bf3f1214a21da1c0f215 --- /dev/null +++ b/vlmpy310/lib/python3.10/site-packages/yapf-0.43.0.dist-info/WHEEL @@ -0,0 +1,5 @@ +Wheel-Version: 1.0 +Generator: setuptools (75.5.0) +Root-Is-Purelib: true +Tag: py3-none-any + diff --git a/vlmpy310/lib/python3.10/site-packages/yapf-0.43.0.dist-info/entry_points.txt b/vlmpy310/lib/python3.10/site-packages/yapf-0.43.0.dist-info/entry_points.txt new file mode 100644 index 0000000000000000000000000000000000000000..ba45c0c0f80b86bae2fe42cc088395385253001c --- /dev/null +++ b/vlmpy310/lib/python3.10/site-packages/yapf-0.43.0.dist-info/entry_points.txt @@ -0,0 +1,3 @@ +[console_scripts] +yapf = yapf:run_main +yapf-diff = yapf_third_party.yapf_diff.yapf_diff:main diff --git a/vlmpy310/lib/python3.10/site-packages/yapf-0.43.0.dist-info/top_level.txt b/vlmpy310/lib/python3.10/site-packages/yapf-0.43.0.dist-info/top_level.txt new file mode 100644 index 0000000000000000000000000000000000000000..98986fc108190c83d48c862ca27cf487e70c611c --- /dev/null +++ b/vlmpy310/lib/python3.10/site-packages/yapf-0.43.0.dist-info/top_level.txt @@ -0,0 +1,3 @@ +yapf +yapf_third_party +yapftests