diff --git a/videollama2/lib/python3.10/site-packages/torch/include/ATen/ops/_neg_view_compositeexplicitautograd_dispatch.h b/videollama2/lib/python3.10/site-packages/torch/include/ATen/ops/_neg_view_compositeexplicitautograd_dispatch.h new file mode 100644 index 0000000000000000000000000000000000000000..37530952213f8af0b831467f6a5638b54e03bdc3 --- /dev/null +++ b/videollama2/lib/python3.10/site-packages/torch/include/ATen/ops/_neg_view_compositeexplicitautograd_dispatch.h @@ -0,0 +1,23 @@ +#pragma once +// @generated by torchgen/gen.py from DispatchKeyFunction.h + +// NB: The implementing C++ file is RegisterDispatchKey.cpp + +// The only #includes we need are for custom classes that have defaults in the C++ API +#include +#include +#include + +// Forward declarations of any types needed in the operator signatures. +// We can't directly include these classes because it will cause circular include dependencies. +// This file is included by TensorBody.h, which defines the Tensor class. +#include + +namespace at { + +namespace compositeexplicitautograd { + +TORCH_API at::Tensor _neg_view(const at::Tensor & self); + +} // namespace compositeexplicitautograd +} // namespace at diff --git a/videollama2/lib/python3.10/site-packages/torch/include/ATen/ops/_softmax_backward_data_compositeexplicitautogradnonfunctional_dispatch.h b/videollama2/lib/python3.10/site-packages/torch/include/ATen/ops/_softmax_backward_data_compositeexplicitautogradnonfunctional_dispatch.h new file mode 100644 index 0000000000000000000000000000000000000000..7e712e737214b47605d0bf81c8810e019652274a --- /dev/null +++ b/videollama2/lib/python3.10/site-packages/torch/include/ATen/ops/_softmax_backward_data_compositeexplicitautogradnonfunctional_dispatch.h @@ -0,0 +1,23 @@ +#pragma once +// @generated by torchgen/gen.py from DispatchKeyFunction.h + +// NB: The implementing C++ file is RegisterDispatchKey.cpp + +// The only #includes we need are for custom classes that have defaults in the C++ API +#include +#include +#include + +// Forward declarations of any types needed in the operator signatures. +// We can't directly include these classes because it will cause circular include dependencies. +// This file is included by TensorBody.h, which defines the Tensor class. +#include + +namespace at { + +namespace compositeexplicitautogradnonfunctional { + +TORCH_API at::Tensor _softmax_backward_data(const at::Tensor & grad_output, const at::Tensor & output, int64_t dim, at::ScalarType input_dtype); + +} // namespace compositeexplicitautogradnonfunctional +} // namespace at diff --git a/videollama2/lib/python3.10/site-packages/torch/include/ATen/ops/_spdiags_native.h b/videollama2/lib/python3.10/site-packages/torch/include/ATen/ops/_spdiags_native.h new file mode 100644 index 0000000000000000000000000000000000000000..2b094ab1ddf8183e3f902c4d9bbd55acb9682de8 --- /dev/null +++ b/videollama2/lib/python3.10/site-packages/torch/include/ATen/ops/_spdiags_native.h @@ -0,0 +1,22 @@ +#pragma once + +// @generated by torchgen/gen.py from NativeFunction.h + +#include +#include +#include +#include +#include +#include +#include +#include +#include +#include + + +namespace at { +namespace native { +TORCH_API at::Tensor & _spdiags_out(const at::Tensor & diagonals, const at::Tensor & offsets, at::IntArrayRef shape, c10::optional layout, at::Tensor & out); +TORCH_API at::Tensor spdiags(const at::Tensor & diagonals, const at::Tensor & offsets, at::IntArrayRef shape, c10::optional layout=c10::nullopt); +} // namespace native +} // namespace at diff --git a/videollama2/lib/python3.10/site-packages/torch/include/ATen/ops/_thnn_fused_gru_cell_backward_cuda_dispatch.h b/videollama2/lib/python3.10/site-packages/torch/include/ATen/ops/_thnn_fused_gru_cell_backward_cuda_dispatch.h new file mode 100644 index 0000000000000000000000000000000000000000..cd1d4e8077a4084f8989d4b7c45e281f7b55170c --- /dev/null +++ b/videollama2/lib/python3.10/site-packages/torch/include/ATen/ops/_thnn_fused_gru_cell_backward_cuda_dispatch.h @@ -0,0 +1,23 @@ +#pragma once +// @generated by torchgen/gen.py from DispatchKeyFunction.h + +// NB: The implementing C++ file is RegisterDispatchKey.cpp + +// The only #includes we need are for custom classes that have defaults in the C++ API +#include +#include +#include + +// Forward declarations of any types needed in the operator signatures. +// We can't directly include these classes because it will cause circular include dependencies. +// This file is included by TensorBody.h, which defines the Tensor class. +#include + +namespace at { + +namespace cuda { + +TORCH_API ::std::tuple _thnn_fused_gru_cell_backward(const at::Tensor & grad_hy, const at::Tensor & workspace, bool has_bias); + +} // namespace cuda +} // namespace at diff --git a/videollama2/lib/python3.10/site-packages/torch/include/ATen/ops/acos_native.h b/videollama2/lib/python3.10/site-packages/torch/include/ATen/ops/acos_native.h new file mode 100644 index 0000000000000000000000000000000000000000..1dbf39f8a4565bc4fa43caace5661a3631f34614 --- /dev/null +++ b/videollama2/lib/python3.10/site-packages/torch/include/ATen/ops/acos_native.h @@ -0,0 +1,23 @@ +#pragma once + +// @generated by torchgen/gen.py from NativeFunction.h + +#include +#include +#include +#include +#include +#include +#include +#include +#include +#include +#include + +namespace at { +namespace native { +struct TORCH_API structured_acos_out : public at::meta::structured_acos { +void impl(const at::Tensor & self, const at::Tensor & out); +}; +} // namespace native +} // namespace at diff --git a/videollama2/lib/python3.10/site-packages/torch/include/ATen/ops/addr_native.h b/videollama2/lib/python3.10/site-packages/torch/include/ATen/ops/addr_native.h new file mode 100644 index 0000000000000000000000000000000000000000..7740fc800c3e5a9f3eacd32a4647ffee461cf5f1 --- /dev/null +++ b/videollama2/lib/python3.10/site-packages/torch/include/ATen/ops/addr_native.h @@ -0,0 +1,25 @@ +#pragma once + +// @generated by torchgen/gen.py from NativeFunction.h + +#include +#include +#include +#include +#include +#include +#include +#include +#include +#include + + +namespace at { +namespace native { +TORCH_API at::Tensor math_addr(const at::Tensor & self, const at::Tensor & vec1, const at::Tensor & vec2, const at::Scalar & beta=1, const at::Scalar & alpha=1); +TORCH_API at::Tensor & math_addr_out(const at::Tensor & self, const at::Tensor & vec1, const at::Tensor & vec2, const at::Scalar & beta, const at::Scalar & alpha, at::Tensor & out); +TORCH_API at::Tensor & addr_(at::Tensor & self, const at::Tensor & vec1, const at::Tensor & vec2, const at::Scalar & beta=1, const at::Scalar & alpha=1); +TORCH_API at::Tensor addr(const at::Tensor & self, const at::Tensor & vec1, const at::Tensor & vec2, const at::Scalar & beta=1, const at::Scalar & alpha=1); +TORCH_API at::Tensor & addr_out(const at::Tensor & self, const at::Tensor & vec1, const at::Tensor & vec2, const at::Scalar & beta, const at::Scalar & alpha, at::Tensor & out); +} // namespace native +} // namespace at diff --git a/videollama2/lib/python3.10/site-packages/torch/include/ATen/ops/complex_cpu_dispatch.h b/videollama2/lib/python3.10/site-packages/torch/include/ATen/ops/complex_cpu_dispatch.h new file mode 100644 index 0000000000000000000000000000000000000000..e2494a6d60f849511e89450f2b8f68905bf31349 --- /dev/null +++ b/videollama2/lib/python3.10/site-packages/torch/include/ATen/ops/complex_cpu_dispatch.h @@ -0,0 +1,24 @@ +#pragma once +// @generated by torchgen/gen.py from DispatchKeyFunction.h + +// NB: The implementing C++ file is RegisterDispatchKey.cpp + +// The only #includes we need are for custom classes that have defaults in the C++ API +#include +#include +#include + +// Forward declarations of any types needed in the operator signatures. +// We can't directly include these classes because it will cause circular include dependencies. +// This file is included by TensorBody.h, which defines the Tensor class. +#include + +namespace at { + +namespace cpu { + +TORCH_API at::Tensor & complex_out(at::Tensor & out, const at::Tensor & real, const at::Tensor & imag); +TORCH_API at::Tensor & complex_outf(const at::Tensor & real, const at::Tensor & imag, at::Tensor & out); + +} // namespace cpu +} // namespace at diff --git a/videollama2/lib/python3.10/site-packages/torch/include/ATen/ops/copy_compositeexplicitautogradnonfunctional_dispatch.h b/videollama2/lib/python3.10/site-packages/torch/include/ATen/ops/copy_compositeexplicitautogradnonfunctional_dispatch.h new file mode 100644 index 0000000000000000000000000000000000000000..942d1b8c262910c30e73d12ae1a7ba75f339fefa --- /dev/null +++ b/videollama2/lib/python3.10/site-packages/torch/include/ATen/ops/copy_compositeexplicitautogradnonfunctional_dispatch.h @@ -0,0 +1,23 @@ +#pragma once +// @generated by torchgen/gen.py from DispatchKeyFunction.h + +// NB: The implementing C++ file is RegisterDispatchKey.cpp + +// The only #includes we need are for custom classes that have defaults in the C++ API +#include +#include +#include + +// Forward declarations of any types needed in the operator signatures. +// We can't directly include these classes because it will cause circular include dependencies. +// This file is included by TensorBody.h, which defines the Tensor class. +#include + +namespace at { + +namespace compositeexplicitautogradnonfunctional { + +TORCH_API at::Tensor copy(const at::Tensor & self, const at::Tensor & src, bool non_blocking=false); + +} // namespace compositeexplicitautogradnonfunctional +} // namespace at diff --git a/videollama2/lib/python3.10/site-packages/torch/include/ATen/ops/count_nonzero.h b/videollama2/lib/python3.10/site-packages/torch/include/ATen/ops/count_nonzero.h new file mode 100644 index 0000000000000000000000000000000000000000..a145693fcb08dc08ce09122f78eef07c3f06c910 --- /dev/null +++ b/videollama2/lib/python3.10/site-packages/torch/include/ATen/ops/count_nonzero.h @@ -0,0 +1,53 @@ +#pragma once + +// @generated by torchgen/gen.py from Function.h + +#include +#include +#include +#include +#include +#include +#include +#include +#include +#include +#include +#include + + + +#include + +namespace at { + + +// aten::count_nonzero.dim_IntList(Tensor self, int[] dim) -> Tensor +inline at::Tensor count_nonzero(const at::Tensor & self, at::IntArrayRef dim) { + return at::_ops::count_nonzero_dim_IntList::call(self, dim); +} + +// aten::count_nonzero(Tensor self, int? dim=None) -> Tensor +inline at::Tensor count_nonzero(const at::Tensor & self, c10::optional dim=c10::nullopt) { + return at::_ops::count_nonzero::call(self, dim); +} + +// aten::count_nonzero.dim_IntList_out(Tensor self, int[] dim, *, Tensor(a!) out) -> Tensor(a!) +inline at::Tensor & count_nonzero_out(at::Tensor & out, const at::Tensor & self, at::IntArrayRef dim) { + return at::_ops::count_nonzero_dim_IntList_out::call(self, dim, out); +} +// aten::count_nonzero.dim_IntList_out(Tensor self, int[] dim, *, Tensor(a!) out) -> Tensor(a!) +inline at::Tensor & count_nonzero_outf(const at::Tensor & self, at::IntArrayRef dim, at::Tensor & out) { + return at::_ops::count_nonzero_dim_IntList_out::call(self, dim, out); +} + +// aten::count_nonzero.out(Tensor self, int? dim=None, *, Tensor(a!) out) -> Tensor(a!) +inline at::Tensor & count_nonzero_out(at::Tensor & out, const at::Tensor & self, c10::optional dim=c10::nullopt) { + return at::_ops::count_nonzero_out::call(self, dim, out); +} +// aten::count_nonzero.out(Tensor self, int? dim=None, *, Tensor(a!) out) -> Tensor(a!) +inline at::Tensor & count_nonzero_outf(const at::Tensor & self, c10::optional dim, at::Tensor & out) { + return at::_ops::count_nonzero_out::call(self, dim, out); +} + +} diff --git a/videollama2/lib/python3.10/site-packages/torch/include/ATen/ops/cudnn_affine_grid_generator_backward_ops.h b/videollama2/lib/python3.10/site-packages/torch/include/ATen/ops/cudnn_affine_grid_generator_backward_ops.h new file mode 100644 index 0000000000000000000000000000000000000000..2a6c58eb1bf531c19f565f909e9b382a46dc24eb --- /dev/null +++ b/videollama2/lib/python3.10/site-packages/torch/include/ATen/ops/cudnn_affine_grid_generator_backward_ops.h @@ -0,0 +1,39 @@ +#pragma once + +// @generated by torchgen/gen.py from Operator.h + +#include +#include + +// Forward declarations of any types needed in the operator signatures. +// We can't directly include these classes because it will cause circular include dependencies. +// This file is included by TensorBody.h, which defines the Tensor class. +#include + +namespace at { +namespace _ops { + + +struct TORCH_API cudnn_affine_grid_generator_backward { + using schema = at::Tensor (const at::Tensor &, int64_t, int64_t, int64_t, int64_t); + using ptr_schema = schema*; + // See Note [static constexpr char* members for windows NVCC] + STATIC_CONSTEXPR_STR_INL_EXCEPT_WIN_CUDA(name, "aten::cudnn_affine_grid_generator_backward") + STATIC_CONSTEXPR_STR_INL_EXCEPT_WIN_CUDA(overload_name, "") + STATIC_CONSTEXPR_STR_INL_EXCEPT_WIN_CUDA(schema_str, "cudnn_affine_grid_generator_backward(Tensor grad, int N, int C, int H, int W) -> Tensor grad_theta") + static at::Tensor call(const at::Tensor & grad, int64_t N, int64_t C, int64_t H, int64_t W); + static at::Tensor redispatch(c10::DispatchKeySet dispatchKeySet, const at::Tensor & grad, int64_t N, int64_t C, int64_t H, int64_t W); +}; + +struct TORCH_API cudnn_affine_grid_generator_backward_out { + using schema = at::Tensor & (const at::Tensor &, int64_t, int64_t, int64_t, int64_t, at::Tensor &); + using ptr_schema = schema*; + // See Note [static constexpr char* members for windows NVCC] + STATIC_CONSTEXPR_STR_INL_EXCEPT_WIN_CUDA(name, "aten::cudnn_affine_grid_generator_backward") + STATIC_CONSTEXPR_STR_INL_EXCEPT_WIN_CUDA(overload_name, "out") + STATIC_CONSTEXPR_STR_INL_EXCEPT_WIN_CUDA(schema_str, "cudnn_affine_grid_generator_backward.out(Tensor grad, int N, int C, int H, int W, *, Tensor(a!) out) -> Tensor(a!)") + static at::Tensor & call(const at::Tensor & grad, int64_t N, int64_t C, int64_t H, int64_t W, at::Tensor & out); + static at::Tensor & redispatch(c10::DispatchKeySet dispatchKeySet, const at::Tensor & grad, int64_t N, int64_t C, int64_t H, int64_t W, at::Tensor & out); +}; + +}} // namespace at::_ops diff --git a/videollama2/lib/python3.10/site-packages/torch/include/ATen/ops/eq_meta_dispatch.h b/videollama2/lib/python3.10/site-packages/torch/include/ATen/ops/eq_meta_dispatch.h new file mode 100644 index 0000000000000000000000000000000000000000..ee920d04ed4f4443863897fb7b48bc4d8792a0cb --- /dev/null +++ b/videollama2/lib/python3.10/site-packages/torch/include/ATen/ops/eq_meta_dispatch.h @@ -0,0 +1,30 @@ +#pragma once +// @generated by torchgen/gen.py from DispatchKeyFunction.h + +// NB: The implementing C++ file is RegisterDispatchKey.cpp + +// The only #includes we need are for custom classes that have defaults in the C++ API +#include +#include +#include + +// Forward declarations of any types needed in the operator signatures. +// We can't directly include these classes because it will cause circular include dependencies. +// This file is included by TensorBody.h, which defines the Tensor class. +#include + +namespace at { + +namespace meta { + +TORCH_API at::Tensor eq(const at::Tensor & self, const at::Scalar & other); +TORCH_API at::Tensor & eq_out(at::Tensor & out, const at::Tensor & self, const at::Scalar & other); +TORCH_API at::Tensor & eq_outf(const at::Tensor & self, const at::Scalar & other, at::Tensor & out); +TORCH_API at::Tensor & eq_(at::Tensor & self, const at::Scalar & other); +TORCH_API at::Tensor eq(const at::Tensor & self, const at::Tensor & other); +TORCH_API at::Tensor & eq_out(at::Tensor & out, const at::Tensor & self, const at::Tensor & other); +TORCH_API at::Tensor & eq_outf(const at::Tensor & self, const at::Tensor & other, at::Tensor & out); +TORCH_API at::Tensor & eq_(at::Tensor & self, const at::Tensor & other); + +} // namespace meta +} // namespace at diff --git a/videollama2/lib/python3.10/site-packages/torch/include/ATen/ops/flatten_compositeimplicitautograd_dispatch.h b/videollama2/lib/python3.10/site-packages/torch/include/ATen/ops/flatten_compositeimplicitautograd_dispatch.h new file mode 100644 index 0000000000000000000000000000000000000000..484326525aabb5dfe48376df5032187fa90c6067 --- /dev/null +++ b/videollama2/lib/python3.10/site-packages/torch/include/ATen/ops/flatten_compositeimplicitautograd_dispatch.h @@ -0,0 +1,26 @@ +#pragma once +// @generated by torchgen/gen.py from DispatchKeyFunction.h + +// NB: The implementing C++ file is RegisterDispatchKey.cpp + +// The only #includes we need are for custom classes that have defaults in the C++ API +#include +#include +#include + +// Forward declarations of any types needed in the operator signatures. +// We can't directly include these classes because it will cause circular include dependencies. +// This file is included by TensorBody.h, which defines the Tensor class. +#include + +namespace at { + +namespace compositeimplicitautograd { + +TORCH_API at::Tensor flatten(const at::Tensor & self, int64_t start_dim=0, int64_t end_dim=-1); +TORCH_API at::Tensor flatten(const at::Tensor & self, int64_t start_dim, int64_t end_dim, at::Dimname out_dim); +TORCH_API at::Tensor flatten(const at::Tensor & self, at::Dimname start_dim, at::Dimname end_dim, at::Dimname out_dim); +TORCH_API at::Tensor flatten(const at::Tensor & self, at::DimnameList dims, at::Dimname out_dim); + +} // namespace compositeimplicitautograd +} // namespace at diff --git a/videollama2/lib/python3.10/site-packages/torch/include/ATen/ops/geometric_compositeexplicitautograd_dispatch.h b/videollama2/lib/python3.10/site-packages/torch/include/ATen/ops/geometric_compositeexplicitautograd_dispatch.h new file mode 100644 index 0000000000000000000000000000000000000000..face188b3b733fc49d7ca79cc1d76b4624b82492 --- /dev/null +++ b/videollama2/lib/python3.10/site-packages/torch/include/ATen/ops/geometric_compositeexplicitautograd_dispatch.h @@ -0,0 +1,25 @@ +#pragma once +// @generated by torchgen/gen.py from DispatchKeyFunction.h + +// NB: The implementing C++ file is RegisterDispatchKey.cpp + +// The only #includes we need are for custom classes that have defaults in the C++ API +#include +#include +#include + +// Forward declarations of any types needed in the operator signatures. +// We can't directly include these classes because it will cause circular include dependencies. +// This file is included by TensorBody.h, which defines the Tensor class. +#include + +namespace at { + +namespace compositeexplicitautograd { + +TORCH_API at::Tensor geometric(const at::Tensor & self, double p, c10::optional generator=c10::nullopt); +TORCH_API at::Tensor & geometric_out(at::Tensor & out, const at::Tensor & self, double p, c10::optional generator=c10::nullopt); +TORCH_API at::Tensor & geometric_outf(const at::Tensor & self, double p, c10::optional generator, at::Tensor & out); + +} // namespace compositeexplicitautograd +} // namespace at diff --git a/videollama2/lib/python3.10/site-packages/torch/include/ATen/ops/geqrf.h b/videollama2/lib/python3.10/site-packages/torch/include/ATen/ops/geqrf.h new file mode 100644 index 0000000000000000000000000000000000000000..e4d0402912153e347c69f2e750314b4d6c016152 --- /dev/null +++ b/videollama2/lib/python3.10/site-packages/torch/include/ATen/ops/geqrf.h @@ -0,0 +1,39 @@ +#pragma once + +// @generated by torchgen/gen.py from Function.h + +#include +#include +#include +#include +#include +#include +#include +#include +#include +#include +#include +#include + + + +#include + +namespace at { + + +// aten::geqrf.a(Tensor self, *, Tensor(a!) a, Tensor(b!) tau) -> (Tensor(a!) a, Tensor(b!) tau) +inline ::std::tuple geqrf_out(at::Tensor & a, at::Tensor & tau, const at::Tensor & self) { + return at::_ops::geqrf_a::call(self, a, tau); +} +// aten::geqrf.a(Tensor self, *, Tensor(a!) a, Tensor(b!) tau) -> (Tensor(a!) a, Tensor(b!) tau) +inline ::std::tuple geqrf_outf(const at::Tensor & self, at::Tensor & a, at::Tensor & tau) { + return at::_ops::geqrf_a::call(self, a, tau); +} + +// aten::geqrf(Tensor self) -> (Tensor a, Tensor tau) +inline ::std::tuple geqrf(const at::Tensor & self) { + return at::_ops::geqrf::call(self); +} + +} diff --git a/videollama2/lib/python3.10/site-packages/torch/include/ATen/ops/gru_cell.h b/videollama2/lib/python3.10/site-packages/torch/include/ATen/ops/gru_cell.h new file mode 100644 index 0000000000000000000000000000000000000000..7e228858a48c1853ea8acccc0c05c5a283ebcc61 --- /dev/null +++ b/videollama2/lib/python3.10/site-packages/torch/include/ATen/ops/gru_cell.h @@ -0,0 +1,30 @@ +#pragma once + +// @generated by torchgen/gen.py from Function.h + +#include +#include +#include +#include +#include +#include +#include +#include +#include +#include +#include +#include + + + +#include + +namespace at { + + +// aten::gru_cell(Tensor input, Tensor hx, Tensor w_ih, Tensor w_hh, Tensor? b_ih=None, Tensor? b_hh=None) -> Tensor +inline at::Tensor gru_cell(const at::Tensor & input, const at::Tensor & hx, const at::Tensor & w_ih, const at::Tensor & w_hh, const c10::optional & b_ih={}, const c10::optional & b_hh={}) { + return at::_ops::gru_cell::call(input, hx, w_ih, w_hh, b_ih, b_hh); +} + +} diff --git a/videollama2/lib/python3.10/site-packages/torch/include/ATen/ops/index_select_cpu_dispatch.h b/videollama2/lib/python3.10/site-packages/torch/include/ATen/ops/index_select_cpu_dispatch.h new file mode 100644 index 0000000000000000000000000000000000000000..771318844ccbb86af2de4dbbf4226a3450314e72 --- /dev/null +++ b/videollama2/lib/python3.10/site-packages/torch/include/ATen/ops/index_select_cpu_dispatch.h @@ -0,0 +1,25 @@ +#pragma once +// @generated by torchgen/gen.py from DispatchKeyFunction.h + +// NB: The implementing C++ file is RegisterDispatchKey.cpp + +// The only #includes we need are for custom classes that have defaults in the C++ API +#include +#include +#include + +// Forward declarations of any types needed in the operator signatures. +// We can't directly include these classes because it will cause circular include dependencies. +// This file is included by TensorBody.h, which defines the Tensor class. +#include + +namespace at { + +namespace cpu { + +TORCH_API at::Tensor index_select(const at::Tensor & self, int64_t dim, const at::Tensor & index); +TORCH_API at::Tensor & index_select_out(at::Tensor & out, const at::Tensor & self, int64_t dim, const at::Tensor & index); +TORCH_API at::Tensor & index_select_outf(const at::Tensor & self, int64_t dim, const at::Tensor & index, at::Tensor & out); + +} // namespace cpu +} // namespace at diff --git a/videollama2/lib/python3.10/site-packages/torch/include/ATen/ops/lcm_ops.h b/videollama2/lib/python3.10/site-packages/torch/include/ATen/ops/lcm_ops.h new file mode 100644 index 0000000000000000000000000000000000000000..ad560922ed827d4eeac753bd790fefe2f8b7ce94 --- /dev/null +++ b/videollama2/lib/python3.10/site-packages/torch/include/ATen/ops/lcm_ops.h @@ -0,0 +1,50 @@ +#pragma once + +// @generated by torchgen/gen.py from Operator.h + +#include +#include + +// Forward declarations of any types needed in the operator signatures. +// We can't directly include these classes because it will cause circular include dependencies. +// This file is included by TensorBody.h, which defines the Tensor class. +#include + +namespace at { +namespace _ops { + + +struct TORCH_API lcm_out { + using schema = at::Tensor & (const at::Tensor &, const at::Tensor &, at::Tensor &); + using ptr_schema = schema*; + // See Note [static constexpr char* members for windows NVCC] + STATIC_CONSTEXPR_STR_INL_EXCEPT_WIN_CUDA(name, "aten::lcm") + STATIC_CONSTEXPR_STR_INL_EXCEPT_WIN_CUDA(overload_name, "out") + STATIC_CONSTEXPR_STR_INL_EXCEPT_WIN_CUDA(schema_str, "lcm.out(Tensor self, Tensor other, *, Tensor(a!) out) -> Tensor(a!)") + static at::Tensor & call(const at::Tensor & self, const at::Tensor & other, at::Tensor & out); + static at::Tensor & redispatch(c10::DispatchKeySet dispatchKeySet, const at::Tensor & self, const at::Tensor & other, at::Tensor & out); +}; + +struct TORCH_API lcm { + using schema = at::Tensor (const at::Tensor &, const at::Tensor &); + using ptr_schema = schema*; + // See Note [static constexpr char* members for windows NVCC] + STATIC_CONSTEXPR_STR_INL_EXCEPT_WIN_CUDA(name, "aten::lcm") + STATIC_CONSTEXPR_STR_INL_EXCEPT_WIN_CUDA(overload_name, "") + STATIC_CONSTEXPR_STR_INL_EXCEPT_WIN_CUDA(schema_str, "lcm(Tensor self, Tensor other) -> Tensor") + static at::Tensor call(const at::Tensor & self, const at::Tensor & other); + static at::Tensor redispatch(c10::DispatchKeySet dispatchKeySet, const at::Tensor & self, const at::Tensor & other); +}; + +struct TORCH_API lcm_ { + using schema = at::Tensor & (at::Tensor &, const at::Tensor &); + using ptr_schema = schema*; + // See Note [static constexpr char* members for windows NVCC] + STATIC_CONSTEXPR_STR_INL_EXCEPT_WIN_CUDA(name, "aten::lcm_") + STATIC_CONSTEXPR_STR_INL_EXCEPT_WIN_CUDA(overload_name, "") + STATIC_CONSTEXPR_STR_INL_EXCEPT_WIN_CUDA(schema_str, "lcm_(Tensor(a!) self, Tensor other) -> Tensor(a!)") + static at::Tensor & call(at::Tensor & self, const at::Tensor & other); + static at::Tensor & redispatch(c10::DispatchKeySet dispatchKeySet, at::Tensor & self, const at::Tensor & other); +}; + +}} // namespace at::_ops diff --git a/videollama2/lib/python3.10/site-packages/torch/include/ATen/ops/max_pool3d_with_indices_cuda_dispatch.h b/videollama2/lib/python3.10/site-packages/torch/include/ATen/ops/max_pool3d_with_indices_cuda_dispatch.h new file mode 100644 index 0000000000000000000000000000000000000000..597459df1e8d0099a79baaab8e3bb01040d3ef34 --- /dev/null +++ b/videollama2/lib/python3.10/site-packages/torch/include/ATen/ops/max_pool3d_with_indices_cuda_dispatch.h @@ -0,0 +1,25 @@ +#pragma once +// @generated by torchgen/gen.py from DispatchKeyFunction.h + +// NB: The implementing C++ file is RegisterDispatchKey.cpp + +// The only #includes we need are for custom classes that have defaults in the C++ API +#include +#include +#include + +// Forward declarations of any types needed in the operator signatures. +// We can't directly include these classes because it will cause circular include dependencies. +// This file is included by TensorBody.h, which defines the Tensor class. +#include + +namespace at { + +namespace cuda { + +TORCH_API ::std::tuple max_pool3d_with_indices(const at::Tensor & self, at::IntArrayRef kernel_size, at::IntArrayRef stride={}, at::IntArrayRef padding=0, at::IntArrayRef dilation=1, bool ceil_mode=false); +TORCH_API ::std::tuple max_pool3d_with_indices_out(at::Tensor & out, at::Tensor & indices, const at::Tensor & self, at::IntArrayRef kernel_size, at::IntArrayRef stride={}, at::IntArrayRef padding=0, at::IntArrayRef dilation=1, bool ceil_mode=false); +TORCH_API ::std::tuple max_pool3d_with_indices_outf(const at::Tensor & self, at::IntArrayRef kernel_size, at::IntArrayRef stride, at::IntArrayRef padding, at::IntArrayRef dilation, bool ceil_mode, at::Tensor & out, at::Tensor & indices); + +} // namespace cuda +} // namespace at diff --git a/videollama2/lib/python3.10/site-packages/torch/include/ATen/ops/nanmedian_compositeimplicitautograd_dispatch.h b/videollama2/lib/python3.10/site-packages/torch/include/ATen/ops/nanmedian_compositeimplicitautograd_dispatch.h new file mode 100644 index 0000000000000000000000000000000000000000..0a30f6a14309a750af9e90ceb66ea1cc95c8e90b --- /dev/null +++ b/videollama2/lib/python3.10/site-packages/torch/include/ATen/ops/nanmedian_compositeimplicitautograd_dispatch.h @@ -0,0 +1,25 @@ +#pragma once +// @generated by torchgen/gen.py from DispatchKeyFunction.h + +// NB: The implementing C++ file is RegisterDispatchKey.cpp + +// The only #includes we need are for custom classes that have defaults in the C++ API +#include +#include +#include + +// Forward declarations of any types needed in the operator signatures. +// We can't directly include these classes because it will cause circular include dependencies. +// This file is included by TensorBody.h, which defines the Tensor class. +#include + +namespace at { + +namespace compositeimplicitautograd { + +TORCH_API ::std::tuple nanmedian(const at::Tensor & self, at::Dimname dim, bool keepdim=false); +TORCH_API ::std::tuple nanmedian_out(at::Tensor & values, at::Tensor & indices, const at::Tensor & self, at::Dimname dim, bool keepdim=false); +TORCH_API ::std::tuple nanmedian_outf(const at::Tensor & self, at::Dimname dim, bool keepdim, at::Tensor & values, at::Tensor & indices); + +} // namespace compositeimplicitautograd +} // namespace at diff --git a/videollama2/lib/python3.10/site-packages/torch/include/ATen/ops/quantize_per_tensor_dynamic_compositeexplicitautograd_dispatch.h b/videollama2/lib/python3.10/site-packages/torch/include/ATen/ops/quantize_per_tensor_dynamic_compositeexplicitautograd_dispatch.h new file mode 100644 index 0000000000000000000000000000000000000000..501532a20bc60c1705e345fea48c04adc6236a97 --- /dev/null +++ b/videollama2/lib/python3.10/site-packages/torch/include/ATen/ops/quantize_per_tensor_dynamic_compositeexplicitautograd_dispatch.h @@ -0,0 +1,24 @@ +#pragma once +// @generated by torchgen/gen.py from DispatchKeyFunction.h + +// NB: The implementing C++ file is RegisterDispatchKey.cpp + +// The only #includes we need are for custom classes that have defaults in the C++ API +#include +#include +#include + +// Forward declarations of any types needed in the operator signatures. +// We can't directly include these classes because it will cause circular include dependencies. +// This file is included by TensorBody.h, which defines the Tensor class. +#include + +namespace at { + +namespace compositeexplicitautograd { + +TORCH_API at::Tensor & quantize_per_tensor_dynamic_out(at::Tensor & out, const at::Tensor & self, at::ScalarType dtype, bool reduce_range); +TORCH_API at::Tensor & quantize_per_tensor_dynamic_outf(const at::Tensor & self, at::ScalarType dtype, bool reduce_range, at::Tensor & out); + +} // namespace compositeexplicitautograd +} // namespace at diff --git a/videollama2/lib/python3.10/site-packages/torch/include/ATen/ops/special_hermite_polynomial_h_ops.h b/videollama2/lib/python3.10/site-packages/torch/include/ATen/ops/special_hermite_polynomial_h_ops.h new file mode 100644 index 0000000000000000000000000000000000000000..9b1592b95c0d94f71545326de31c8508cba91a5c --- /dev/null +++ b/videollama2/lib/python3.10/site-packages/torch/include/ATen/ops/special_hermite_polynomial_h_ops.h @@ -0,0 +1,83 @@ +#pragma once + +// @generated by torchgen/gen.py from Operator.h + +#include +#include + +// Forward declarations of any types needed in the operator signatures. +// We can't directly include these classes because it will cause circular include dependencies. +// This file is included by TensorBody.h, which defines the Tensor class. +#include + +namespace at { +namespace _ops { + + +struct TORCH_API special_hermite_polynomial_h { + using schema = at::Tensor (const at::Tensor &, const at::Tensor &); + using ptr_schema = schema*; + // See Note [static constexpr char* members for windows NVCC] + STATIC_CONSTEXPR_STR_INL_EXCEPT_WIN_CUDA(name, "aten::special_hermite_polynomial_h") + STATIC_CONSTEXPR_STR_INL_EXCEPT_WIN_CUDA(overload_name, "") + STATIC_CONSTEXPR_STR_INL_EXCEPT_WIN_CUDA(schema_str, "special_hermite_polynomial_h(Tensor x, Tensor n) -> Tensor") + static at::Tensor call(const at::Tensor & x, const at::Tensor & n); + static at::Tensor redispatch(c10::DispatchKeySet dispatchKeySet, const at::Tensor & x, const at::Tensor & n); +}; + +struct TORCH_API special_hermite_polynomial_h_x_scalar { + using schema = at::Tensor (const at::Scalar &, const at::Tensor &); + using ptr_schema = schema*; + // See Note [static constexpr char* members for windows NVCC] + STATIC_CONSTEXPR_STR_INL_EXCEPT_WIN_CUDA(name, "aten::special_hermite_polynomial_h") + STATIC_CONSTEXPR_STR_INL_EXCEPT_WIN_CUDA(overload_name, "x_scalar") + STATIC_CONSTEXPR_STR_INL_EXCEPT_WIN_CUDA(schema_str, "special_hermite_polynomial_h.x_scalar(Scalar x, Tensor n) -> Tensor") + static at::Tensor call(const at::Scalar & x, const at::Tensor & n); + static at::Tensor redispatch(c10::DispatchKeySet dispatchKeySet, const at::Scalar & x, const at::Tensor & n); +}; + +struct TORCH_API special_hermite_polynomial_h_n_scalar { + using schema = at::Tensor (const at::Tensor &, const at::Scalar &); + using ptr_schema = schema*; + // See Note [static constexpr char* members for windows NVCC] + STATIC_CONSTEXPR_STR_INL_EXCEPT_WIN_CUDA(name, "aten::special_hermite_polynomial_h") + STATIC_CONSTEXPR_STR_INL_EXCEPT_WIN_CUDA(overload_name, "n_scalar") + STATIC_CONSTEXPR_STR_INL_EXCEPT_WIN_CUDA(schema_str, "special_hermite_polynomial_h.n_scalar(Tensor x, Scalar n) -> Tensor") + static at::Tensor call(const at::Tensor & x, const at::Scalar & n); + static at::Tensor redispatch(c10::DispatchKeySet dispatchKeySet, const at::Tensor & x, const at::Scalar & n); +}; + +struct TORCH_API special_hermite_polynomial_h_out { + using schema = at::Tensor & (const at::Tensor &, const at::Tensor &, at::Tensor &); + using ptr_schema = schema*; + // See Note [static constexpr char* members for windows NVCC] + STATIC_CONSTEXPR_STR_INL_EXCEPT_WIN_CUDA(name, "aten::special_hermite_polynomial_h") + STATIC_CONSTEXPR_STR_INL_EXCEPT_WIN_CUDA(overload_name, "out") + STATIC_CONSTEXPR_STR_INL_EXCEPT_WIN_CUDA(schema_str, "special_hermite_polynomial_h.out(Tensor x, Tensor n, *, Tensor(a!) out) -> Tensor(a!)") + static at::Tensor & call(const at::Tensor & x, const at::Tensor & n, at::Tensor & out); + static at::Tensor & redispatch(c10::DispatchKeySet dispatchKeySet, const at::Tensor & x, const at::Tensor & n, at::Tensor & out); +}; + +struct TORCH_API special_hermite_polynomial_h_x_scalar_out { + using schema = at::Tensor & (const at::Scalar &, const at::Tensor &, at::Tensor &); + using ptr_schema = schema*; + // See Note [static constexpr char* members for windows NVCC] + STATIC_CONSTEXPR_STR_INL_EXCEPT_WIN_CUDA(name, "aten::special_hermite_polynomial_h") + STATIC_CONSTEXPR_STR_INL_EXCEPT_WIN_CUDA(overload_name, "x_scalar_out") + STATIC_CONSTEXPR_STR_INL_EXCEPT_WIN_CUDA(schema_str, "special_hermite_polynomial_h.x_scalar_out(Scalar x, Tensor n, *, Tensor(a!) out) -> Tensor(a!)") + static at::Tensor & call(const at::Scalar & x, const at::Tensor & n, at::Tensor & out); + static at::Tensor & redispatch(c10::DispatchKeySet dispatchKeySet, const at::Scalar & x, const at::Tensor & n, at::Tensor & out); +}; + +struct TORCH_API special_hermite_polynomial_h_n_scalar_out { + using schema = at::Tensor & (const at::Tensor &, const at::Scalar &, at::Tensor &); + using ptr_schema = schema*; + // See Note [static constexpr char* members for windows NVCC] + STATIC_CONSTEXPR_STR_INL_EXCEPT_WIN_CUDA(name, "aten::special_hermite_polynomial_h") + STATIC_CONSTEXPR_STR_INL_EXCEPT_WIN_CUDA(overload_name, "n_scalar_out") + STATIC_CONSTEXPR_STR_INL_EXCEPT_WIN_CUDA(schema_str, "special_hermite_polynomial_h.n_scalar_out(Tensor x, Scalar n, *, Tensor(a!) out) -> Tensor(a!)") + static at::Tensor & call(const at::Tensor & x, const at::Scalar & n, at::Tensor & out); + static at::Tensor & redispatch(c10::DispatchKeySet dispatchKeySet, const at::Tensor & x, const at::Scalar & n, at::Tensor & out); +}; + +}} // namespace at::_ops diff --git a/videollama2/lib/python3.10/site-packages/torch/include/ATen/ops/special_hermite_polynomial_he_native.h b/videollama2/lib/python3.10/site-packages/torch/include/ATen/ops/special_hermite_polynomial_he_native.h new file mode 100644 index 0000000000000000000000000000000000000000..476663801cc3b3e1ffbf6c462394c7c385aba126 --- /dev/null +++ b/videollama2/lib/python3.10/site-packages/torch/include/ATen/ops/special_hermite_polynomial_he_native.h @@ -0,0 +1,27 @@ +#pragma once + +// @generated by torchgen/gen.py from NativeFunction.h + +#include +#include +#include +#include +#include +#include +#include +#include +#include +#include +#include + +namespace at { +namespace native { +struct TORCH_API structured_special_hermite_polynomial_he_out : public at::meta::structured_special_hermite_polynomial_he { +void impl(const at::Tensor & x, const at::Tensor & n, const at::Tensor & out); +}; +TORCH_API at::Tensor special_hermite_polynomial_he(const at::Scalar & x, const at::Tensor & n); +TORCH_API at::Tensor & special_hermite_polynomial_he_out(const at::Scalar & x, const at::Tensor & n, at::Tensor & out); +TORCH_API at::Tensor special_hermite_polynomial_he(const at::Tensor & x, const at::Scalar & n); +TORCH_API at::Tensor & special_hermite_polynomial_he_out(const at::Tensor & x, const at::Scalar & n, at::Tensor & out); +} // namespace native +} // namespace at diff --git a/videollama2/lib/python3.10/site-packages/torch/include/ATen/ops/special_psi.h b/videollama2/lib/python3.10/site-packages/torch/include/ATen/ops/special_psi.h new file mode 100644 index 0000000000000000000000000000000000000000..35ecd7f1dc24882da09d9e2dff9577a7d93d91d3 --- /dev/null +++ b/videollama2/lib/python3.10/site-packages/torch/include/ATen/ops/special_psi.h @@ -0,0 +1,39 @@ +#pragma once + +// @generated by torchgen/gen.py from Function.h + +#include +#include +#include +#include +#include +#include +#include +#include +#include +#include +#include +#include + + + +#include + +namespace at { + + +// aten::special_psi(Tensor self) -> Tensor +inline at::Tensor special_psi(const at::Tensor & self) { + return at::_ops::special_psi::call(self); +} + +// aten::special_psi.out(Tensor self, *, Tensor(a!) out) -> Tensor(a!) +inline at::Tensor & special_psi_out(at::Tensor & out, const at::Tensor & self) { + return at::_ops::special_psi_out::call(self, out); +} +// aten::special_psi.out(Tensor self, *, Tensor(a!) out) -> Tensor(a!) +inline at::Tensor & special_psi_outf(const at::Tensor & self, at::Tensor & out) { + return at::_ops::special_psi_out::call(self, out); +} + +} diff --git a/videollama2/lib/python3.10/site-packages/torch/include/ATen/ops/to_native.h b/videollama2/lib/python3.10/site-packages/torch/include/ATen/ops/to_native.h new file mode 100644 index 0000000000000000000000000000000000000000..e596bbec2f30becb2a0c5e73b9a733a7d3206231 --- /dev/null +++ b/videollama2/lib/python3.10/site-packages/torch/include/ATen/ops/to_native.h @@ -0,0 +1,24 @@ +#pragma once + +// @generated by torchgen/gen.py from NativeFunction.h + +#include +#include +#include +#include +#include +#include +#include +#include +#include +#include + + +namespace at { +namespace native { +TORCH_API at::Tensor to(const at::Tensor & self, c10::optional dtype={}, c10::optional layout={}, c10::optional device={}, c10::optional pin_memory={}, bool non_blocking=false, bool copy=false, c10::optional memory_format=c10::nullopt); +TORCH_API at::Tensor to(const at::Tensor & self, at::Device device, at::ScalarType dtype, bool non_blocking=false, bool copy=false, c10::optional memory_format=c10::nullopt); +TORCH_API at::Tensor to(const at::Tensor & self, at::ScalarType dtype, bool non_blocking=false, bool copy=false, c10::optional memory_format=c10::nullopt); +TORCH_API at::Tensor to(const at::Tensor & self, const at::Tensor & other, bool non_blocking=false, bool copy=false, c10::optional memory_format=c10::nullopt); +} // namespace native +} // namespace at diff --git a/videollama2/lib/python3.10/site-packages/torch/include/ATen/ops/view_copy.h b/videollama2/lib/python3.10/site-packages/torch/include/ATen/ops/view_copy.h new file mode 100644 index 0000000000000000000000000000000000000000..ecc673f8b0f560c43aa1a53a79f91aa9ad26b4fc --- /dev/null +++ b/videollama2/lib/python3.10/site-packages/torch/include/ATen/ops/view_copy.h @@ -0,0 +1,105 @@ +#pragma once + +// @generated by torchgen/gen.py from Function.h + +#include +#include +#include +#include +#include +#include +#include +#include +#include +#include +#include +#include + + + +#include + +namespace at { + + +// aten::view_copy(Tensor self, SymInt[] size) -> Tensor +inline at::Tensor view_copy(const at::Tensor & self, at::IntArrayRef size) { + return at::_ops::view_copy::call(self, c10::fromIntArrayRefSlow(size)); +} +namespace symint { + template ::value>> + at::Tensor view_copy(const at::Tensor & self, at::IntArrayRef size) { + return at::_ops::view_copy::call(self, c10::fromIntArrayRefSlow(size)); + } +} + +// aten::view_copy(Tensor self, SymInt[] size) -> Tensor +inline at::Tensor view_copy_symint(const at::Tensor & self, c10::SymIntArrayRef size) { + return at::_ops::view_copy::call(self, size); +} +namespace symint { + template ::value>> + at::Tensor view_copy(const at::Tensor & self, c10::SymIntArrayRef size) { + return at::_ops::view_copy::call(self, size); + } +} + +// aten::view_copy.dtype(Tensor self, ScalarType dtype) -> Tensor +inline at::Tensor view_copy(const at::Tensor & self, at::ScalarType dtype) { + return at::_ops::view_copy_dtype::call(self, dtype); +} + +// aten::view_copy.out(Tensor self, SymInt[] size, *, Tensor(a!) out) -> Tensor(a!) +inline at::Tensor & view_copy_out(at::Tensor & out, const at::Tensor & self, at::IntArrayRef size) { + return at::_ops::view_copy_out::call(self, c10::fromIntArrayRefSlow(size), out); +} +namespace symint { + template ::value>> + at::Tensor & view_copy_out(at::Tensor & out, const at::Tensor & self, at::IntArrayRef size) { + return at::_ops::view_copy_out::call(self, c10::fromIntArrayRefSlow(size), out); + } +} + +// aten::view_copy.out(Tensor self, SymInt[] size, *, Tensor(a!) out) -> Tensor(a!) +inline at::Tensor & view_copy_outf(const at::Tensor & self, at::IntArrayRef size, at::Tensor & out) { + return at::_ops::view_copy_out::call(self, c10::fromIntArrayRefSlow(size), out); +} +namespace symint { + template ::value>> + at::Tensor & view_copy_outf(const at::Tensor & self, at::IntArrayRef size, at::Tensor & out) { + return at::_ops::view_copy_out::call(self, c10::fromIntArrayRefSlow(size), out); + } +} + +// aten::view_copy.out(Tensor self, SymInt[] size, *, Tensor(a!) out) -> Tensor(a!) +inline at::Tensor & view_copy_symint_out(at::Tensor & out, const at::Tensor & self, c10::SymIntArrayRef size) { + return at::_ops::view_copy_out::call(self, size, out); +} +namespace symint { + template ::value>> + at::Tensor & view_copy_out(at::Tensor & out, const at::Tensor & self, c10::SymIntArrayRef size) { + return at::_ops::view_copy_out::call(self, size, out); + } +} + +// aten::view_copy.out(Tensor self, SymInt[] size, *, Tensor(a!) out) -> Tensor(a!) +inline at::Tensor & view_copy_symint_outf(const at::Tensor & self, c10::SymIntArrayRef size, at::Tensor & out) { + return at::_ops::view_copy_out::call(self, size, out); +} +namespace symint { + template ::value>> + at::Tensor & view_copy_outf(const at::Tensor & self, c10::SymIntArrayRef size, at::Tensor & out) { + return at::_ops::view_copy_out::call(self, size, out); + } +} + +// aten::view_copy.dtype_out(Tensor self, ScalarType dtype, *, Tensor(a!) out) -> Tensor(a!) +inline at::Tensor & view_copy_out(at::Tensor & out, const at::Tensor & self, at::ScalarType dtype) { + return at::_ops::view_copy_dtype_out::call(self, dtype, out); +} +// aten::view_copy.dtype_out(Tensor self, ScalarType dtype, *, Tensor(a!) out) -> Tensor(a!) +inline at::Tensor & view_copy_outf(const at::Tensor & self, at::ScalarType dtype, at::Tensor & out) { + return at::_ops::view_copy_dtype_out::call(self, dtype, out); +} + +} diff --git a/videollama2/lib/python3.10/site-packages/torch/include/ATen/ops/vsplit.h b/videollama2/lib/python3.10/site-packages/torch/include/ATen/ops/vsplit.h new file mode 100644 index 0000000000000000000000000000000000000000..c195d542458414da5feaaf272a336023896b0d0a --- /dev/null +++ b/videollama2/lib/python3.10/site-packages/torch/include/ATen/ops/vsplit.h @@ -0,0 +1,35 @@ +#pragma once + +// @generated by torchgen/gen.py from Function.h + +#include +#include +#include +#include +#include +#include +#include +#include +#include +#include +#include +#include + + + +#include + +namespace at { + + +// aten::vsplit.int(Tensor(a -> *) self, int sections) -> Tensor(a)[] +inline ::std::vector vsplit(const at::Tensor & self, int64_t sections) { + return at::_ops::vsplit_int::call(self, sections); +} + +// aten::vsplit.array(Tensor(a -> *) self, int[] indices) -> Tensor(a)[] +inline ::std::vector vsplit(const at::Tensor & self, at::IntArrayRef indices) { + return at::_ops::vsplit_array::call(self, indices); +} + +} diff --git a/vllm/lib/python3.10/site-packages/email_validator-2.2.0.dist-info/RECORD b/vllm/lib/python3.10/site-packages/email_validator-2.2.0.dist-info/RECORD new file mode 100644 index 0000000000000000000000000000000000000000..5b4f0f1b9d40095b146e0d5f1b476b46a69bce4d --- /dev/null +++ b/vllm/lib/python3.10/site-packages/email_validator-2.2.0.dist-info/RECORD @@ -0,0 +1,26 @@ +../../../bin/email_validator,sha256=0jDxxNmaF4w0-jMJmks7JAJe-GEfXRQWg96Ev5QVQho,233 +email_validator-2.2.0.dist-info/INSTALLER,sha256=zuuue4knoyJ-UwPPXg8fezS7VCrXJQrAP7zeNuwvFQg,4 +email_validator-2.2.0.dist-info/LICENSE,sha256=ZyF5dS4QkTSj-yvdB4Cyn9t6A5dPD1hqE66tUSlWLUw,1212 +email_validator-2.2.0.dist-info/METADATA,sha256=vELkkg-p-qMuqNFX6uzDmMaruT7Pe5PDAQexHLAB4XM,25741 +email_validator-2.2.0.dist-info/RECORD,, +email_validator-2.2.0.dist-info/REQUESTED,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0 +email_validator-2.2.0.dist-info/WHEEL,sha256=cpQTJ5IWu9CdaPViMhC9YzF8gZuS5-vlfoFihTBC86A,91 +email_validator-2.2.0.dist-info/entry_points.txt,sha256=zRM_6bNIUSHTbNx5u6M3nK1MAguvryrc9hICC6HyrBg,66 +email_validator-2.2.0.dist-info/top_level.txt,sha256=fYDOSWFZke46ut7WqdOAJjjhlpPYAaOwOwIsh3s8oWI,16 +email_validator/__init__.py,sha256=g-TFM6vzpEt4dMG93giGlS343yXXXIy7EOLNFEn6DfA,4360 +email_validator/__main__.py,sha256=TIvjaG_OSFRciH0J2pnEJEdX3uJy3ZgocmasEqh9EEI,2243 +email_validator/__pycache__/__init__.cpython-310.pyc,, +email_validator/__pycache__/__main__.cpython-310.pyc,, +email_validator/__pycache__/deliverability.cpython-310.pyc,, +email_validator/__pycache__/exceptions_types.cpython-310.pyc,, +email_validator/__pycache__/rfc_constants.cpython-310.pyc,, +email_validator/__pycache__/syntax.cpython-310.pyc,, +email_validator/__pycache__/validate_email.cpython-310.pyc,, +email_validator/__pycache__/version.cpython-310.pyc,, +email_validator/deliverability.py,sha256=e6eODNSaLMiM29EZ3bWYDFkQDlMIdicBaykjYQJwYig,7222 +email_validator/exceptions_types.py,sha256=yLxXqwtl5dXa-938K7skLP1pMFgi0oovzCs74mX7TGs,6024 +email_validator/py.typed,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0 +email_validator/rfc_constants.py,sha256=KVUshwIu699cle3UzDU2_fFBSQOO7p91Z_hrlNANtGM,2767 +email_validator/syntax.py,sha256=Mo5KLgEsbQcvNzs8zO5QbhzUK4MAjL9yJFDpwsF12lY,36005 +email_validator/validate_email.py,sha256=YUXY5Sv_mQ7Vuu_AmGdISza8v-VaABnNMLrlWv8EIl4,8401 +email_validator/version.py,sha256=DKk-1b-rZsJFxFi1JoJ7TmEvIEQ0rf-C9HAZWwvjuM0,22 diff --git a/vllm/lib/python3.10/site-packages/email_validator-2.2.0.dist-info/WHEEL b/vllm/lib/python3.10/site-packages/email_validator-2.2.0.dist-info/WHEEL new file mode 100644 index 0000000000000000000000000000000000000000..9086d2718e779a97169e6949d85028112e6a4f69 --- /dev/null +++ b/vllm/lib/python3.10/site-packages/email_validator-2.2.0.dist-info/WHEEL @@ -0,0 +1,5 @@ +Wheel-Version: 1.0 +Generator: setuptools (70.1.0) +Root-Is-Purelib: true +Tag: py3-none-any + diff --git a/vllm/lib/python3.10/site-packages/jsonschema/__init__.py b/vllm/lib/python3.10/site-packages/jsonschema/__init__.py new file mode 100644 index 0000000000000000000000000000000000000000..79924cf7e51665d03295ecdb7d0930bdeb806d4f --- /dev/null +++ b/vllm/lib/python3.10/site-packages/jsonschema/__init__.py @@ -0,0 +1,120 @@ +""" +An implementation of JSON Schema for Python. + +The main functionality is provided by the validator classes for each of the +supported JSON Schema versions. + +Most commonly, `jsonschema.validators.validate` is the quickest way to simply +validate a given instance under a schema, and will create a validator +for you. +""" +import warnings + +from jsonschema._format import FormatChecker +from jsonschema._types import TypeChecker +from jsonschema.exceptions import SchemaError, ValidationError +from jsonschema.validators import ( + Draft3Validator, + Draft4Validator, + Draft6Validator, + Draft7Validator, + Draft201909Validator, + Draft202012Validator, + validate, +) + + +def __getattr__(name): + if name == "__version__": + warnings.warn( + "Accessing jsonschema.__version__ is deprecated and will be " + "removed in a future release. Use importlib.metadata directly " + "to query for jsonschema's version.", + DeprecationWarning, + stacklevel=2, + ) + + from importlib import metadata + return metadata.version("jsonschema") + elif name == "RefResolver": + from jsonschema.validators import _RefResolver + warnings.warn( + _RefResolver._DEPRECATION_MESSAGE, + DeprecationWarning, + stacklevel=2, + ) + return _RefResolver + elif name == "ErrorTree": + warnings.warn( + "Importing ErrorTree directly from the jsonschema package " + "is deprecated and will become an ImportError. Import it from " + "jsonschema.exceptions instead.", + DeprecationWarning, + stacklevel=2, + ) + from jsonschema.exceptions import ErrorTree + return ErrorTree + elif name == "FormatError": + warnings.warn( + "Importing FormatError directly from the jsonschema package " + "is deprecated and will become an ImportError. Import it from " + "jsonschema.exceptions instead.", + DeprecationWarning, + stacklevel=2, + ) + from jsonschema.exceptions import FormatError + return FormatError + elif name == "Validator": + warnings.warn( + "Importing Validator directly from the jsonschema package " + "is deprecated and will become an ImportError. Import it from " + "jsonschema.protocols instead.", + DeprecationWarning, + stacklevel=2, + ) + from jsonschema.protocols import Validator + return Validator + elif name == "RefResolutionError": + from jsonschema.exceptions import _RefResolutionError + warnings.warn( + _RefResolutionError._DEPRECATION_MESSAGE, + DeprecationWarning, + stacklevel=2, + ) + return _RefResolutionError + + format_checkers = { + "draft3_format_checker": Draft3Validator, + "draft4_format_checker": Draft4Validator, + "draft6_format_checker": Draft6Validator, + "draft7_format_checker": Draft7Validator, + "draft201909_format_checker": Draft201909Validator, + "draft202012_format_checker": Draft202012Validator, + } + ValidatorForFormat = format_checkers.get(name) + if ValidatorForFormat is not None: + warnings.warn( + f"Accessing jsonschema.{name} is deprecated and will be " + "removed in a future release. Instead, use the FORMAT_CHECKER " + "attribute on the corresponding Validator.", + DeprecationWarning, + stacklevel=2, + ) + return ValidatorForFormat.FORMAT_CHECKER + + raise AttributeError(f"module {__name__} has no attribute {name}") + + +__all__ = [ + "Draft201909Validator", + "Draft202012Validator", + "Draft3Validator", + "Draft4Validator", + "Draft6Validator", + "Draft7Validator", + "FormatChecker", + "SchemaError", + "TypeChecker", + "ValidationError", + "validate", +] diff --git a/vllm/lib/python3.10/site-packages/jsonschema/__main__.py b/vllm/lib/python3.10/site-packages/jsonschema/__main__.py new file mode 100644 index 0000000000000000000000000000000000000000..fb260ae145d7518f8e8fa16ed13cc2c6173e7404 --- /dev/null +++ b/vllm/lib/python3.10/site-packages/jsonschema/__main__.py @@ -0,0 +1,6 @@ +""" +The jsonschema CLI is now deprecated in favor of check-jsonschema. +""" +from jsonschema.cli import main + +main() diff --git a/vllm/lib/python3.10/site-packages/jsonschema/__pycache__/_format.cpython-310.pyc b/vllm/lib/python3.10/site-packages/jsonschema/__pycache__/_format.cpython-310.pyc new file mode 100644 index 0000000000000000000000000000000000000000..53d9bbd85d0ad7cf86245e862d8871b3caeda11f Binary files /dev/null and b/vllm/lib/python3.10/site-packages/jsonschema/__pycache__/_format.cpython-310.pyc differ diff --git a/vllm/lib/python3.10/site-packages/jsonschema/__pycache__/_keywords.cpython-310.pyc b/vllm/lib/python3.10/site-packages/jsonschema/__pycache__/_keywords.cpython-310.pyc new file mode 100644 index 0000000000000000000000000000000000000000..d74387206413e8bcad66e2a881039d1520f8256c Binary files /dev/null and b/vllm/lib/python3.10/site-packages/jsonschema/__pycache__/_keywords.cpython-310.pyc differ diff --git a/vllm/lib/python3.10/site-packages/jsonschema/__pycache__/_legacy_keywords.cpython-310.pyc b/vllm/lib/python3.10/site-packages/jsonschema/__pycache__/_legacy_keywords.cpython-310.pyc new file mode 100644 index 0000000000000000000000000000000000000000..71e818cdf9c8049ac822ce61ae27a197f2417353 Binary files /dev/null and b/vllm/lib/python3.10/site-packages/jsonschema/__pycache__/_legacy_keywords.cpython-310.pyc differ diff --git a/vllm/lib/python3.10/site-packages/jsonschema/__pycache__/_types.cpython-310.pyc b/vllm/lib/python3.10/site-packages/jsonschema/__pycache__/_types.cpython-310.pyc new file mode 100644 index 0000000000000000000000000000000000000000..56ef7744735230ae3ef91c6b357d3b747d5e9a14 Binary files /dev/null and b/vllm/lib/python3.10/site-packages/jsonschema/__pycache__/_types.cpython-310.pyc differ diff --git a/vllm/lib/python3.10/site-packages/jsonschema/__pycache__/_typing.cpython-310.pyc b/vllm/lib/python3.10/site-packages/jsonschema/__pycache__/_typing.cpython-310.pyc new file mode 100644 index 0000000000000000000000000000000000000000..5def4f06cbffc5cc6fc6f0aa9124eb52a1ed9fd0 Binary files /dev/null and b/vllm/lib/python3.10/site-packages/jsonschema/__pycache__/_typing.cpython-310.pyc differ diff --git a/vllm/lib/python3.10/site-packages/jsonschema/__pycache__/_utils.cpython-310.pyc b/vllm/lib/python3.10/site-packages/jsonschema/__pycache__/_utils.cpython-310.pyc new file mode 100644 index 0000000000000000000000000000000000000000..a8f31f429b81734a87ec27762831b5f9d72ed7e2 Binary files /dev/null and b/vllm/lib/python3.10/site-packages/jsonschema/__pycache__/_utils.cpython-310.pyc differ diff --git a/vllm/lib/python3.10/site-packages/jsonschema/__pycache__/cli.cpython-310.pyc b/vllm/lib/python3.10/site-packages/jsonschema/__pycache__/cli.cpython-310.pyc new file mode 100644 index 0000000000000000000000000000000000000000..c533aa6170d6cf9583e9b0772a85902f40d37914 Binary files /dev/null and b/vllm/lib/python3.10/site-packages/jsonschema/__pycache__/cli.cpython-310.pyc differ diff --git a/vllm/lib/python3.10/site-packages/jsonschema/__pycache__/exceptions.cpython-310.pyc b/vllm/lib/python3.10/site-packages/jsonschema/__pycache__/exceptions.cpython-310.pyc new file mode 100644 index 0000000000000000000000000000000000000000..dbc505537ddb582fd6a234e9f0a0c646642d0b96 Binary files /dev/null and b/vllm/lib/python3.10/site-packages/jsonschema/__pycache__/exceptions.cpython-310.pyc differ diff --git a/vllm/lib/python3.10/site-packages/jsonschema/__pycache__/protocols.cpython-310.pyc b/vllm/lib/python3.10/site-packages/jsonschema/__pycache__/protocols.cpython-310.pyc new file mode 100644 index 0000000000000000000000000000000000000000..231ed031237886df60729091a401fd773f41d279 Binary files /dev/null and b/vllm/lib/python3.10/site-packages/jsonschema/__pycache__/protocols.cpython-310.pyc differ diff --git a/vllm/lib/python3.10/site-packages/jsonschema/_format.py b/vllm/lib/python3.10/site-packages/jsonschema/_format.py new file mode 100644 index 0000000000000000000000000000000000000000..6e87620cc9d5fcf0102fa1022f3afcefca0cbfa3 --- /dev/null +++ b/vllm/lib/python3.10/site-packages/jsonschema/_format.py @@ -0,0 +1,519 @@ +from __future__ import annotations + +from contextlib import suppress +from datetime import date, datetime +from uuid import UUID +import ipaddress +import re +import typing +import warnings + +from jsonschema.exceptions import FormatError + +_FormatCheckCallable = typing.Callable[[object], bool] +#: A format checker callable. +_F = typing.TypeVar("_F", bound=_FormatCheckCallable) +_RaisesType = typing.Union[ + typing.Type[Exception], typing.Tuple[typing.Type[Exception], ...], +] + +_RE_DATE = re.compile(r"^\d{4}-\d{2}-\d{2}$", re.ASCII) + + +class FormatChecker: + """ + A ``format`` property checker. + + JSON Schema does not mandate that the ``format`` property actually do any + validation. If validation is desired however, instances of this class can + be hooked into validators to enable format validation. + + `FormatChecker` objects always return ``True`` when asked about + formats that they do not know how to validate. + + To add a check for a custom format use the `FormatChecker.checks` + decorator. + + Arguments: + + formats: + + The known formats to validate. This argument can be used to + limit which formats will be used during validation. + + """ + + checkers: dict[ + str, + tuple[_FormatCheckCallable, _RaisesType], + ] = {} # noqa: RUF012 + + def __init__(self, formats: typing.Iterable[str] | None = None): + if formats is None: + formats = self.checkers.keys() + self.checkers = {k: self.checkers[k] for k in formats} + + def __repr__(self): + return f"" + + def checks( + self, format: str, raises: _RaisesType = (), + ) -> typing.Callable[[_F], _F]: + """ + Register a decorated function as validating a new format. + + Arguments: + + format: + + The format that the decorated function will check. + + raises: + + The exception(s) raised by the decorated function when an + invalid instance is found. + + The exception object will be accessible as the + `jsonschema.exceptions.ValidationError.cause` attribute of the + resulting validation error. + + """ + + def _checks(func: _F) -> _F: + self.checkers[format] = (func, raises) + return func + + return _checks + + @classmethod + def cls_checks( + cls, format: str, raises: _RaisesType = (), + ) -> typing.Callable[[_F], _F]: + warnings.warn( + ( + "FormatChecker.cls_checks is deprecated. Call " + "FormatChecker.checks on a specific FormatChecker instance " + "instead." + ), + DeprecationWarning, + stacklevel=2, + ) + return cls._cls_checks(format=format, raises=raises) + + @classmethod + def _cls_checks( + cls, format: str, raises: _RaisesType = (), + ) -> typing.Callable[[_F], _F]: + def _checks(func: _F) -> _F: + cls.checkers[format] = (func, raises) + return func + + return _checks + + def check(self, instance: object, format: str) -> None: + """ + Check whether the instance conforms to the given format. + + Arguments: + + instance (*any primitive type*, i.e. str, number, bool): + + The instance to check + + format: + + The format that instance should conform to + + Raises: + + FormatError: + + if the instance does not conform to ``format`` + + """ + if format not in self.checkers: + return + + func, raises = self.checkers[format] + result, cause = None, None + try: + result = func(instance) + except raises as e: + cause = e + if not result: + raise FormatError(f"{instance!r} is not a {format!r}", cause=cause) + + def conforms(self, instance: object, format: str) -> bool: + """ + Check whether the instance conforms to the given format. + + Arguments: + + instance (*any primitive type*, i.e. str, number, bool): + + The instance to check + + format: + + The format that instance should conform to + + Returns: + + bool: whether it conformed + + """ + try: + self.check(instance, format) + except FormatError: + return False + else: + return True + + +draft3_format_checker = FormatChecker() +draft4_format_checker = FormatChecker() +draft6_format_checker = FormatChecker() +draft7_format_checker = FormatChecker() +draft201909_format_checker = FormatChecker() +draft202012_format_checker = FormatChecker() + +_draft_checkers: dict[str, FormatChecker] = dict( + draft3=draft3_format_checker, + draft4=draft4_format_checker, + draft6=draft6_format_checker, + draft7=draft7_format_checker, + draft201909=draft201909_format_checker, + draft202012=draft202012_format_checker, +) + + +def _checks_drafts( + name=None, + draft3=None, + draft4=None, + draft6=None, + draft7=None, + draft201909=None, + draft202012=None, + raises=(), +) -> typing.Callable[[_F], _F]: + draft3 = draft3 or name + draft4 = draft4 or name + draft6 = draft6 or name + draft7 = draft7 or name + draft201909 = draft201909 or name + draft202012 = draft202012 or name + + def wrap(func: _F) -> _F: + if draft3: + func = _draft_checkers["draft3"].checks(draft3, raises)(func) + if draft4: + func = _draft_checkers["draft4"].checks(draft4, raises)(func) + if draft6: + func = _draft_checkers["draft6"].checks(draft6, raises)(func) + if draft7: + func = _draft_checkers["draft7"].checks(draft7, raises)(func) + if draft201909: + func = _draft_checkers["draft201909"].checks(draft201909, raises)( + func, + ) + if draft202012: + func = _draft_checkers["draft202012"].checks(draft202012, raises)( + func, + ) + + # Oy. This is bad global state, but relied upon for now, until + # deprecation. See #519 and test_format_checkers_come_with_defaults + FormatChecker._cls_checks( + draft202012 or draft201909 or draft7 or draft6 or draft4 or draft3, + raises, + )(func) + return func + + return wrap + + +@_checks_drafts(name="idn-email") +@_checks_drafts(name="email") +def is_email(instance: object) -> bool: + if not isinstance(instance, str): + return True + return "@" in instance + + +@_checks_drafts( + draft3="ip-address", + draft4="ipv4", + draft6="ipv4", + draft7="ipv4", + draft201909="ipv4", + draft202012="ipv4", + raises=ipaddress.AddressValueError, +) +def is_ipv4(instance: object) -> bool: + if not isinstance(instance, str): + return True + return bool(ipaddress.IPv4Address(instance)) + + +@_checks_drafts(name="ipv6", raises=ipaddress.AddressValueError) +def is_ipv6(instance: object) -> bool: + if not isinstance(instance, str): + return True + address = ipaddress.IPv6Address(instance) + return not getattr(address, "scope_id", "") + + +with suppress(ImportError): + from fqdn import FQDN + + @_checks_drafts( + draft3="host-name", + draft4="hostname", + draft6="hostname", + draft7="hostname", + draft201909="hostname", + draft202012="hostname", + ) + def is_host_name(instance: object) -> bool: + if not isinstance(instance, str): + return True + return FQDN(instance, min_labels=1).is_valid + + +with suppress(ImportError): + # The built-in `idna` codec only implements RFC 3890, so we go elsewhere. + import idna + + @_checks_drafts( + draft7="idn-hostname", + draft201909="idn-hostname", + draft202012="idn-hostname", + raises=(idna.IDNAError, UnicodeError), + ) + def is_idn_host_name(instance: object) -> bool: + if not isinstance(instance, str): + return True + idna.encode(instance) + return True + + +try: + import rfc3987 +except ImportError: + with suppress(ImportError): + from rfc3986_validator import validate_rfc3986 + + @_checks_drafts(name="uri") + def is_uri(instance: object) -> bool: + if not isinstance(instance, str): + return True + return validate_rfc3986(instance, rule="URI") + + @_checks_drafts( + draft6="uri-reference", + draft7="uri-reference", + draft201909="uri-reference", + draft202012="uri-reference", + raises=ValueError, + ) + def is_uri_reference(instance: object) -> bool: + if not isinstance(instance, str): + return True + return validate_rfc3986(instance, rule="URI_reference") + +else: + + @_checks_drafts( + draft7="iri", + draft201909="iri", + draft202012="iri", + raises=ValueError, + ) + def is_iri(instance: object) -> bool: + if not isinstance(instance, str): + return True + return rfc3987.parse(instance, rule="IRI") + + @_checks_drafts( + draft7="iri-reference", + draft201909="iri-reference", + draft202012="iri-reference", + raises=ValueError, + ) + def is_iri_reference(instance: object) -> bool: + if not isinstance(instance, str): + return True + return rfc3987.parse(instance, rule="IRI_reference") + + @_checks_drafts(name="uri", raises=ValueError) + def is_uri(instance: object) -> bool: + if not isinstance(instance, str): + return True + return rfc3987.parse(instance, rule="URI") + + @_checks_drafts( + draft6="uri-reference", + draft7="uri-reference", + draft201909="uri-reference", + draft202012="uri-reference", + raises=ValueError, + ) + def is_uri_reference(instance: object) -> bool: + if not isinstance(instance, str): + return True + return rfc3987.parse(instance, rule="URI_reference") + + +with suppress(ImportError): + from rfc3339_validator import validate_rfc3339 + + @_checks_drafts(name="date-time") + def is_datetime(instance: object) -> bool: + if not isinstance(instance, str): + return True + return validate_rfc3339(instance.upper()) + + @_checks_drafts( + draft7="time", + draft201909="time", + draft202012="time", + ) + def is_time(instance: object) -> bool: + if not isinstance(instance, str): + return True + return is_datetime("1970-01-01T" + instance) + + +@_checks_drafts(name="regex", raises=re.error) +def is_regex(instance: object) -> bool: + if not isinstance(instance, str): + return True + return bool(re.compile(instance)) + + +@_checks_drafts( + draft3="date", + draft7="date", + draft201909="date", + draft202012="date", + raises=ValueError, +) +def is_date(instance: object) -> bool: + if not isinstance(instance, str): + return True + return bool(_RE_DATE.fullmatch(instance) and date.fromisoformat(instance)) + + +@_checks_drafts(draft3="time", raises=ValueError) +def is_draft3_time(instance: object) -> bool: + if not isinstance(instance, str): + return True + return bool(datetime.strptime(instance, "%H:%M:%S")) # noqa: DTZ007 + + +with suppress(ImportError): + import webcolors + + @_checks_drafts(draft3="color", raises=(ValueError, TypeError)) + def is_css21_color(instance: object) -> bool: + if isinstance(instance, str): + try: + webcolors.name_to_hex(instance) + except ValueError: + webcolors.normalize_hex(instance.lower()) + return True + + +with suppress(ImportError): + import jsonpointer + + @_checks_drafts( + draft6="json-pointer", + draft7="json-pointer", + draft201909="json-pointer", + draft202012="json-pointer", + raises=jsonpointer.JsonPointerException, + ) + def is_json_pointer(instance: object) -> bool: + if not isinstance(instance, str): + return True + return bool(jsonpointer.JsonPointer(instance)) + + # TODO: I don't want to maintain this, so it + # needs to go either into jsonpointer (pending + # https://github.com/stefankoegl/python-json-pointer/issues/34) or + # into a new external library. + @_checks_drafts( + draft7="relative-json-pointer", + draft201909="relative-json-pointer", + draft202012="relative-json-pointer", + raises=jsonpointer.JsonPointerException, + ) + def is_relative_json_pointer(instance: object) -> bool: + # Definition taken from: + # https://tools.ietf.org/html/draft-handrews-relative-json-pointer-01#section-3 + if not isinstance(instance, str): + return True + if not instance: + return False + + non_negative_integer, rest = [], "" + for i, character in enumerate(instance): + if character.isdigit(): + # digits with a leading "0" are not allowed + if i > 0 and int(instance[i - 1]) == 0: + return False + + non_negative_integer.append(character) + continue + + if not non_negative_integer: + return False + + rest = instance[i:] + break + return (rest == "#") or bool(jsonpointer.JsonPointer(rest)) + + +with suppress(ImportError): + import uri_template + + @_checks_drafts( + draft6="uri-template", + draft7="uri-template", + draft201909="uri-template", + draft202012="uri-template", + ) + def is_uri_template(instance: object) -> bool: + if not isinstance(instance, str): + return True + return uri_template.validate(instance) + + +with suppress(ImportError): + import isoduration + + @_checks_drafts( + draft201909="duration", + draft202012="duration", + raises=isoduration.DurationParsingException, + ) + def is_duration(instance: object) -> bool: + if not isinstance(instance, str): + return True + isoduration.parse_duration(instance) + # FIXME: See bolsote/isoduration#25 and bolsote/isoduration#21 + return instance.endswith(tuple("DMYWHMS")) + + +@_checks_drafts( + draft201909="uuid", + draft202012="uuid", + raises=ValueError, +) +def is_uuid(instance: object) -> bool: + if not isinstance(instance, str): + return True + UUID(instance) + return all(instance[position] == "-" for position in (8, 13, 18, 23)) diff --git a/vllm/lib/python3.10/site-packages/jsonschema/_keywords.py b/vllm/lib/python3.10/site-packages/jsonschema/_keywords.py new file mode 100644 index 0000000000000000000000000000000000000000..f30f954192abeef3a031c37ddcfd6a9831e4e247 --- /dev/null +++ b/vllm/lib/python3.10/site-packages/jsonschema/_keywords.py @@ -0,0 +1,449 @@ +from fractions import Fraction +import re + +from jsonschema._utils import ( + ensure_list, + equal, + extras_msg, + find_additional_properties, + find_evaluated_item_indexes_by_schema, + find_evaluated_property_keys_by_schema, + uniq, +) +from jsonschema.exceptions import FormatError, ValidationError + + +def patternProperties(validator, patternProperties, instance, schema): + if not validator.is_type(instance, "object"): + return + + for pattern, subschema in patternProperties.items(): + for k, v in instance.items(): + if re.search(pattern, k): + yield from validator.descend( + v, subschema, path=k, schema_path=pattern, + ) + + +def propertyNames(validator, propertyNames, instance, schema): + if not validator.is_type(instance, "object"): + return + + for property in instance: + yield from validator.descend(instance=property, schema=propertyNames) + + +def additionalProperties(validator, aP, instance, schema): + if not validator.is_type(instance, "object"): + return + + extras = set(find_additional_properties(instance, schema)) + + if validator.is_type(aP, "object"): + for extra in extras: + yield from validator.descend(instance[extra], aP, path=extra) + elif not aP and extras: + if "patternProperties" in schema: + verb = "does" if len(extras) == 1 else "do" + joined = ", ".join(repr(each) for each in sorted(extras)) + patterns = ", ".join( + repr(each) for each in sorted(schema["patternProperties"]) + ) + error = f"{joined} {verb} not match any of the regexes: {patterns}" + yield ValidationError(error) + else: + error = "Additional properties are not allowed (%s %s unexpected)" + yield ValidationError(error % extras_msg(sorted(extras, key=str))) + + +def items(validator, items, instance, schema): + if not validator.is_type(instance, "array"): + return + + prefix = len(schema.get("prefixItems", [])) + total = len(instance) + extra = total - prefix + if extra <= 0: + return + + if items is False: + rest = instance[prefix:] if extra != 1 else instance[prefix] + item = "items" if prefix != 1 else "item" + yield ValidationError( + f"Expected at most {prefix} {item} but found {extra} " + f"extra: {rest!r}", + ) + else: + for index in range(prefix, total): + yield from validator.descend( + instance=instance[index], + schema=items, + path=index, + ) + + +def const(validator, const, instance, schema): + if not equal(instance, const): + yield ValidationError(f"{const!r} was expected") + + +def contains(validator, contains, instance, schema): + if not validator.is_type(instance, "array"): + return + + matches = 0 + min_contains = schema.get("minContains", 1) + max_contains = schema.get("maxContains", len(instance)) + + contains_validator = validator.evolve(schema=contains) + + for each in instance: + if contains_validator.is_valid(each): + matches += 1 + if matches > max_contains: + yield ValidationError( + "Too many items match the given schema " + f"(expected at most {max_contains})", + validator="maxContains", + validator_value=max_contains, + ) + return + + if matches < min_contains: + if not matches: + yield ValidationError( + f"{instance!r} does not contain items " + "matching the given schema", + ) + else: + yield ValidationError( + "Too few items match the given schema (expected at least " + f"{min_contains} but only {matches} matched)", + validator="minContains", + validator_value=min_contains, + ) + + +def exclusiveMinimum(validator, minimum, instance, schema): + if not validator.is_type(instance, "number"): + return + + if instance <= minimum: + yield ValidationError( + f"{instance!r} is less than or equal to " + f"the minimum of {minimum!r}", + ) + + +def exclusiveMaximum(validator, maximum, instance, schema): + if not validator.is_type(instance, "number"): + return + + if instance >= maximum: + yield ValidationError( + f"{instance!r} is greater than or equal " + f"to the maximum of {maximum!r}", + ) + + +def minimum(validator, minimum, instance, schema): + if not validator.is_type(instance, "number"): + return + + if instance < minimum: + message = f"{instance!r} is less than the minimum of {minimum!r}" + yield ValidationError(message) + + +def maximum(validator, maximum, instance, schema): + if not validator.is_type(instance, "number"): + return + + if instance > maximum: + message = f"{instance!r} is greater than the maximum of {maximum!r}" + yield ValidationError(message) + + +def multipleOf(validator, dB, instance, schema): + if not validator.is_type(instance, "number"): + return + + if isinstance(dB, float): + quotient = instance / dB + try: + failed = int(quotient) != quotient + except OverflowError: + # When `instance` is large and `dB` is less than one, + # quotient can overflow to infinity; and then casting to int + # raises an error. + # + # In this case we fall back to Fraction logic, which is + # exact and cannot overflow. The performance is also + # acceptable: we try the fast all-float option first, and + # we know that fraction(dB) can have at most a few hundred + # digits in each part. The worst-case slowdown is therefore + # for already-slow enormous integers or Decimals. + failed = (Fraction(instance) / Fraction(dB)).denominator != 1 + else: + failed = instance % dB + + if failed: + yield ValidationError(f"{instance!r} is not a multiple of {dB}") + + +def minItems(validator, mI, instance, schema): + if validator.is_type(instance, "array") and len(instance) < mI: + message = "should be non-empty" if mI == 1 else "is too short" + yield ValidationError(f"{instance!r} {message}") + + +def maxItems(validator, mI, instance, schema): + if validator.is_type(instance, "array") and len(instance) > mI: + message = "is expected to be empty" if mI == 0 else "is too long" + yield ValidationError(f"{instance!r} {message}") + + +def uniqueItems(validator, uI, instance, schema): + if ( + uI + and validator.is_type(instance, "array") + and not uniq(instance) + ): + yield ValidationError(f"{instance!r} has non-unique elements") + + +def pattern(validator, patrn, instance, schema): + if ( + validator.is_type(instance, "string") + and not re.search(patrn, instance) + ): + yield ValidationError(f"{instance!r} does not match {patrn!r}") + + +def format(validator, format, instance, schema): + if validator.format_checker is not None: + try: + validator.format_checker.check(instance, format) + except FormatError as error: + yield ValidationError(error.message, cause=error.cause) + + +def minLength(validator, mL, instance, schema): + if validator.is_type(instance, "string") and len(instance) < mL: + message = "should be non-empty" if mL == 1 else "is too short" + yield ValidationError(f"{instance!r} {message}") + + +def maxLength(validator, mL, instance, schema): + if validator.is_type(instance, "string") and len(instance) > mL: + message = "is expected to be empty" if mL == 0 else "is too long" + yield ValidationError(f"{instance!r} {message}") + + +def dependentRequired(validator, dependentRequired, instance, schema): + if not validator.is_type(instance, "object"): + return + + for property, dependency in dependentRequired.items(): + if property not in instance: + continue + + for each in dependency: + if each not in instance: + message = f"{each!r} is a dependency of {property!r}" + yield ValidationError(message) + + +def dependentSchemas(validator, dependentSchemas, instance, schema): + if not validator.is_type(instance, "object"): + return + + for property, dependency in dependentSchemas.items(): + if property not in instance: + continue + yield from validator.descend( + instance, dependency, schema_path=property, + ) + + +def enum(validator, enums, instance, schema): + if all(not equal(each, instance) for each in enums): + yield ValidationError(f"{instance!r} is not one of {enums!r}") + + +def ref(validator, ref, instance, schema): + yield from validator._validate_reference(ref=ref, instance=instance) + + +def dynamicRef(validator, dynamicRef, instance, schema): + yield from validator._validate_reference(ref=dynamicRef, instance=instance) + + +def type(validator, types, instance, schema): + types = ensure_list(types) + + if not any(validator.is_type(instance, type) for type in types): + reprs = ", ".join(repr(type) for type in types) + yield ValidationError(f"{instance!r} is not of type {reprs}") + + +def properties(validator, properties, instance, schema): + if not validator.is_type(instance, "object"): + return + + for property, subschema in properties.items(): + if property in instance: + yield from validator.descend( + instance[property], + subschema, + path=property, + schema_path=property, + ) + + +def required(validator, required, instance, schema): + if not validator.is_type(instance, "object"): + return + for property in required: + if property not in instance: + yield ValidationError(f"{property!r} is a required property") + + +def minProperties(validator, mP, instance, schema): + if validator.is_type(instance, "object") and len(instance) < mP: + message = ( + "should be non-empty" if mP == 1 + else "does not have enough properties" + ) + yield ValidationError(f"{instance!r} {message}") + + +def maxProperties(validator, mP, instance, schema): + if not validator.is_type(instance, "object"): + return + if validator.is_type(instance, "object") and len(instance) > mP: + message = ( + "is expected to be empty" if mP == 0 + else "has too many properties" + ) + yield ValidationError(f"{instance!r} {message}") + + +def allOf(validator, allOf, instance, schema): + for index, subschema in enumerate(allOf): + yield from validator.descend(instance, subschema, schema_path=index) + + +def anyOf(validator, anyOf, instance, schema): + all_errors = [] + for index, subschema in enumerate(anyOf): + errs = list(validator.descend(instance, subschema, schema_path=index)) + if not errs: + break + all_errors.extend(errs) + else: + yield ValidationError( + f"{instance!r} is not valid under any of the given schemas", + context=all_errors, + ) + + +def oneOf(validator, oneOf, instance, schema): + subschemas = enumerate(oneOf) + all_errors = [] + for index, subschema in subschemas: + errs = list(validator.descend(instance, subschema, schema_path=index)) + if not errs: + first_valid = subschema + break + all_errors.extend(errs) + else: + yield ValidationError( + f"{instance!r} is not valid under any of the given schemas", + context=all_errors, + ) + + more_valid = [ + each for _, each in subschemas + if validator.evolve(schema=each).is_valid(instance) + ] + if more_valid: + more_valid.append(first_valid) + reprs = ", ".join(repr(schema) for schema in more_valid) + yield ValidationError(f"{instance!r} is valid under each of {reprs}") + + +def not_(validator, not_schema, instance, schema): + if validator.evolve(schema=not_schema).is_valid(instance): + message = f"{instance!r} should not be valid under {not_schema!r}" + yield ValidationError(message) + + +def if_(validator, if_schema, instance, schema): + if validator.evolve(schema=if_schema).is_valid(instance): + if "then" in schema: + then = schema["then"] + yield from validator.descend(instance, then, schema_path="then") + elif "else" in schema: + else_ = schema["else"] + yield from validator.descend(instance, else_, schema_path="else") + + +def unevaluatedItems(validator, unevaluatedItems, instance, schema): + if not validator.is_type(instance, "array"): + return + evaluated_item_indexes = find_evaluated_item_indexes_by_schema( + validator, instance, schema, + ) + unevaluated_items = [ + item for index, item in enumerate(instance) + if index not in evaluated_item_indexes + ] + if unevaluated_items: + error = "Unevaluated items are not allowed (%s %s unexpected)" + yield ValidationError(error % extras_msg(unevaluated_items)) + + +def unevaluatedProperties(validator, unevaluatedProperties, instance, schema): + if not validator.is_type(instance, "object"): + return + evaluated_keys = find_evaluated_property_keys_by_schema( + validator, instance, schema, + ) + unevaluated_keys = [] + for property in instance: + if property not in evaluated_keys: + for _ in validator.descend( + instance[property], + unevaluatedProperties, + path=property, + schema_path=property, + ): + # FIXME: Include context for each unevaluated property + # indicating why it's invalid under the subschema. + unevaluated_keys.append(property) # noqa: PERF401 + + if unevaluated_keys: + if unevaluatedProperties is False: + error = "Unevaluated properties are not allowed (%s %s unexpected)" + extras = sorted(unevaluated_keys, key=str) + yield ValidationError(error % extras_msg(extras)) + else: + error = ( + "Unevaluated properties are not valid under " + "the given schema (%s %s unevaluated and invalid)" + ) + yield ValidationError(error % extras_msg(unevaluated_keys)) + + +def prefixItems(validator, prefixItems, instance, schema): + if not validator.is_type(instance, "array"): + return + + for (index, item), subschema in zip(enumerate(instance), prefixItems): + yield from validator.descend( + instance=item, + schema=subschema, + schema_path=index, + path=index, + ) diff --git a/vllm/lib/python3.10/site-packages/jsonschema/_legacy_keywords.py b/vllm/lib/python3.10/site-packages/jsonschema/_legacy_keywords.py new file mode 100644 index 0000000000000000000000000000000000000000..c691589f8f69fdbbc0152728121361665de08f00 --- /dev/null +++ b/vllm/lib/python3.10/site-packages/jsonschema/_legacy_keywords.py @@ -0,0 +1,449 @@ +import re + +from referencing.jsonschema import lookup_recursive_ref + +from jsonschema import _utils +from jsonschema.exceptions import ValidationError + + +def ignore_ref_siblings(schema): + """ + Ignore siblings of ``$ref`` if it is present. + + Otherwise, return all keywords. + + Suitable for use with `create`'s ``applicable_validators`` argument. + """ + ref = schema.get("$ref") + if ref is not None: + return [("$ref", ref)] + else: + return schema.items() + + +def dependencies_draft3(validator, dependencies, instance, schema): + if not validator.is_type(instance, "object"): + return + + for property, dependency in dependencies.items(): + if property not in instance: + continue + + if validator.is_type(dependency, "object"): + yield from validator.descend( + instance, dependency, schema_path=property, + ) + elif validator.is_type(dependency, "string"): + if dependency not in instance: + message = f"{dependency!r} is a dependency of {property!r}" + yield ValidationError(message) + else: + for each in dependency: + if each not in instance: + message = f"{each!r} is a dependency of {property!r}" + yield ValidationError(message) + + +def dependencies_draft4_draft6_draft7( + validator, + dependencies, + instance, + schema, +): + """ + Support for the ``dependencies`` keyword from pre-draft 2019-09. + + In later drafts, the keyword was split into separate + ``dependentRequired`` and ``dependentSchemas`` validators. + """ + if not validator.is_type(instance, "object"): + return + + for property, dependency in dependencies.items(): + if property not in instance: + continue + + if validator.is_type(dependency, "array"): + for each in dependency: + if each not in instance: + message = f"{each!r} is a dependency of {property!r}" + yield ValidationError(message) + else: + yield from validator.descend( + instance, dependency, schema_path=property, + ) + + +def disallow_draft3(validator, disallow, instance, schema): + for disallowed in _utils.ensure_list(disallow): + if validator.evolve(schema={"type": [disallowed]}).is_valid(instance): + message = f"{disallowed!r} is disallowed for {instance!r}" + yield ValidationError(message) + + +def extends_draft3(validator, extends, instance, schema): + if validator.is_type(extends, "object"): + yield from validator.descend(instance, extends) + return + for index, subschema in enumerate(extends): + yield from validator.descend(instance, subschema, schema_path=index) + + +def items_draft3_draft4(validator, items, instance, schema): + if not validator.is_type(instance, "array"): + return + + if validator.is_type(items, "object"): + for index, item in enumerate(instance): + yield from validator.descend(item, items, path=index) + else: + for (index, item), subschema in zip(enumerate(instance), items): + yield from validator.descend( + item, subschema, path=index, schema_path=index, + ) + + +def additionalItems(validator, aI, instance, schema): + if ( + not validator.is_type(instance, "array") + or validator.is_type(schema.get("items", {}), "object") + ): + return + + len_items = len(schema.get("items", [])) + if validator.is_type(aI, "object"): + for index, item in enumerate(instance[len_items:], start=len_items): + yield from validator.descend(item, aI, path=index) + elif not aI and len(instance) > len(schema.get("items", [])): + error = "Additional items are not allowed (%s %s unexpected)" + yield ValidationError( + error % _utils.extras_msg(instance[len(schema.get("items", [])):]), + ) + + +def items_draft6_draft7_draft201909(validator, items, instance, schema): + if not validator.is_type(instance, "array"): + return + + if validator.is_type(items, "array"): + for (index, item), subschema in zip(enumerate(instance), items): + yield from validator.descend( + item, subschema, path=index, schema_path=index, + ) + else: + for index, item in enumerate(instance): + yield from validator.descend(item, items, path=index) + + +def minimum_draft3_draft4(validator, minimum, instance, schema): + if not validator.is_type(instance, "number"): + return + + if schema.get("exclusiveMinimum", False): + failed = instance <= minimum + cmp = "less than or equal to" + else: + failed = instance < minimum + cmp = "less than" + + if failed: + message = f"{instance!r} is {cmp} the minimum of {minimum!r}" + yield ValidationError(message) + + +def maximum_draft3_draft4(validator, maximum, instance, schema): + if not validator.is_type(instance, "number"): + return + + if schema.get("exclusiveMaximum", False): + failed = instance >= maximum + cmp = "greater than or equal to" + else: + failed = instance > maximum + cmp = "greater than" + + if failed: + message = f"{instance!r} is {cmp} the maximum of {maximum!r}" + yield ValidationError(message) + + +def properties_draft3(validator, properties, instance, schema): + if not validator.is_type(instance, "object"): + return + + for property, subschema in properties.items(): + if property in instance: + yield from validator.descend( + instance[property], + subschema, + path=property, + schema_path=property, + ) + elif subschema.get("required", False): + error = ValidationError(f"{property!r} is a required property") + error._set( + validator="required", + validator_value=subschema["required"], + instance=instance, + schema=schema, + ) + error.path.appendleft(property) + error.schema_path.extend([property, "required"]) + yield error + + +def type_draft3(validator, types, instance, schema): + types = _utils.ensure_list(types) + + all_errors = [] + for index, type in enumerate(types): + if validator.is_type(type, "object"): + errors = list(validator.descend(instance, type, schema_path=index)) + if not errors: + return + all_errors.extend(errors) + elif validator.is_type(instance, type): + return + + reprs = [] + for type in types: + try: + reprs.append(repr(type["name"])) + except Exception: # noqa: BLE001 + reprs.append(repr(type)) + yield ValidationError( + f"{instance!r} is not of type {', '.join(reprs)}", + context=all_errors, + ) + + +def contains_draft6_draft7(validator, contains, instance, schema): + if not validator.is_type(instance, "array"): + return + + if not any( + validator.evolve(schema=contains).is_valid(element) + for element in instance + ): + yield ValidationError( + f"None of {instance!r} are valid under the given schema", + ) + + +def recursiveRef(validator, recursiveRef, instance, schema): + resolved = lookup_recursive_ref(validator._resolver) + yield from validator.descend( + instance, + resolved.contents, + resolver=resolved.resolver, + ) + + +def find_evaluated_item_indexes_by_schema(validator, instance, schema): + """ + Get all indexes of items that get evaluated under the current schema. + + Covers all keywords related to unevaluatedItems: items, prefixItems, if, + then, else, contains, unevaluatedItems, allOf, oneOf, anyOf + """ + if validator.is_type(schema, "boolean"): + return [] + evaluated_indexes = [] + + ref = schema.get("$ref") + if ref is not None: + resolved = validator._resolver.lookup(ref) + evaluated_indexes.extend( + find_evaluated_item_indexes_by_schema( + validator.evolve( + schema=resolved.contents, + _resolver=resolved.resolver, + ), + instance, + resolved.contents, + ), + ) + + if "$recursiveRef" in schema: + resolved = lookup_recursive_ref(validator._resolver) + evaluated_indexes.extend( + find_evaluated_item_indexes_by_schema( + validator.evolve( + schema=resolved.contents, + _resolver=resolved.resolver, + ), + instance, + resolved.contents, + ), + ) + + if "items" in schema: + if "additionalItems" in schema: + return list(range(len(instance))) + + if validator.is_type(schema["items"], "object"): + return list(range(len(instance))) + evaluated_indexes += list(range(len(schema["items"]))) + + if "if" in schema: + if validator.evolve(schema=schema["if"]).is_valid(instance): + evaluated_indexes += find_evaluated_item_indexes_by_schema( + validator, instance, schema["if"], + ) + if "then" in schema: + evaluated_indexes += find_evaluated_item_indexes_by_schema( + validator, instance, schema["then"], + ) + elif "else" in schema: + evaluated_indexes += find_evaluated_item_indexes_by_schema( + validator, instance, schema["else"], + ) + + for keyword in ["contains", "unevaluatedItems"]: + if keyword in schema: + for k, v in enumerate(instance): + if validator.evolve(schema=schema[keyword]).is_valid(v): + evaluated_indexes.append(k) + + for keyword in ["allOf", "oneOf", "anyOf"]: + if keyword in schema: + for subschema in schema[keyword]: + errs = next(validator.descend(instance, subschema), None) + if errs is None: + evaluated_indexes += find_evaluated_item_indexes_by_schema( + validator, instance, subschema, + ) + + return evaluated_indexes + + +def unevaluatedItems_draft2019(validator, unevaluatedItems, instance, schema): + if not validator.is_type(instance, "array"): + return + evaluated_item_indexes = find_evaluated_item_indexes_by_schema( + validator, instance, schema, + ) + unevaluated_items = [ + item for index, item in enumerate(instance) + if index not in evaluated_item_indexes + ] + if unevaluated_items: + error = "Unevaluated items are not allowed (%s %s unexpected)" + yield ValidationError(error % _utils.extras_msg(unevaluated_items)) + + +def find_evaluated_property_keys_by_schema(validator, instance, schema): + if validator.is_type(schema, "boolean"): + return [] + evaluated_keys = [] + + ref = schema.get("$ref") + if ref is not None: + resolved = validator._resolver.lookup(ref) + evaluated_keys.extend( + find_evaluated_property_keys_by_schema( + validator.evolve( + schema=resolved.contents, + _resolver=resolved.resolver, + ), + instance, + resolved.contents, + ), + ) + + if "$recursiveRef" in schema: + resolved = lookup_recursive_ref(validator._resolver) + evaluated_keys.extend( + find_evaluated_property_keys_by_schema( + validator.evolve( + schema=resolved.contents, + _resolver=resolved.resolver, + ), + instance, + resolved.contents, + ), + ) + + for keyword in [ + "properties", "additionalProperties", "unevaluatedProperties", + ]: + if keyword in schema: + schema_value = schema[keyword] + if validator.is_type(schema_value, "boolean") and schema_value: + evaluated_keys += instance.keys() + + elif validator.is_type(schema_value, "object"): + for property in schema_value: + if property in instance: + evaluated_keys.append(property) + + if "patternProperties" in schema: + for property in instance: + for pattern in schema["patternProperties"]: + if re.search(pattern, property): + evaluated_keys.append(property) + + if "dependentSchemas" in schema: + for property, subschema in schema["dependentSchemas"].items(): + if property not in instance: + continue + evaluated_keys += find_evaluated_property_keys_by_schema( + validator, instance, subschema, + ) + + for keyword in ["allOf", "oneOf", "anyOf"]: + if keyword in schema: + for subschema in schema[keyword]: + errs = next(validator.descend(instance, subschema), None) + if errs is None: + evaluated_keys += find_evaluated_property_keys_by_schema( + validator, instance, subschema, + ) + + if "if" in schema: + if validator.evolve(schema=schema["if"]).is_valid(instance): + evaluated_keys += find_evaluated_property_keys_by_schema( + validator, instance, schema["if"], + ) + if "then" in schema: + evaluated_keys += find_evaluated_property_keys_by_schema( + validator, instance, schema["then"], + ) + elif "else" in schema: + evaluated_keys += find_evaluated_property_keys_by_schema( + validator, instance, schema["else"], + ) + + return evaluated_keys + + +def unevaluatedProperties_draft2019(validator, uP, instance, schema): + if not validator.is_type(instance, "object"): + return + evaluated_keys = find_evaluated_property_keys_by_schema( + validator, instance, schema, + ) + unevaluated_keys = [] + for property in instance: + if property not in evaluated_keys: + for _ in validator.descend( + instance[property], + uP, + path=property, + schema_path=property, + ): + # FIXME: Include context for each unevaluated property + # indicating why it's invalid under the subschema. + unevaluated_keys.append(property) # noqa: PERF401 + + if unevaluated_keys: + if uP is False: + error = "Unevaluated properties are not allowed (%s %s unexpected)" + extras = sorted(unevaluated_keys, key=str) + yield ValidationError(error % _utils.extras_msg(extras)) + else: + error = ( + "Unevaluated properties are not valid under " + "the given schema (%s %s unevaluated and invalid)" + ) + yield ValidationError(error % _utils.extras_msg(unevaluated_keys)) diff --git a/vllm/lib/python3.10/site-packages/jsonschema/_types.py b/vllm/lib/python3.10/site-packages/jsonschema/_types.py new file mode 100644 index 0000000000000000000000000000000000000000..bf25e7e6fea8f77fcc296e31384b404f1b8a7d3b --- /dev/null +++ b/vllm/lib/python3.10/site-packages/jsonschema/_types.py @@ -0,0 +1,200 @@ +from __future__ import annotations + +from typing import Any, Callable, Mapping +import numbers + +from attrs import evolve, field, frozen +from rpds import HashTrieMap + +from jsonschema.exceptions import UndefinedTypeCheck + + +# unfortunately, the type of HashTrieMap is generic, and if used as an attrs +# converter, the generic type is presented to mypy, which then fails to match +# the concrete type of a type checker mapping +# this "do nothing" wrapper presents the correct information to mypy +def _typed_map_converter( + init_val: Mapping[str, Callable[[TypeChecker, Any], bool]], +) -> HashTrieMap[str, Callable[[TypeChecker, Any], bool]]: + return HashTrieMap.convert(init_val) + + +def is_array(checker, instance): + return isinstance(instance, list) + + +def is_bool(checker, instance): + return isinstance(instance, bool) + + +def is_integer(checker, instance): + # bool inherits from int, so ensure bools aren't reported as ints + if isinstance(instance, bool): + return False + return isinstance(instance, int) + + +def is_null(checker, instance): + return instance is None + + +def is_number(checker, instance): + # bool inherits from int, so ensure bools aren't reported as ints + if isinstance(instance, bool): + return False + return isinstance(instance, numbers.Number) + + +def is_object(checker, instance): + return isinstance(instance, dict) + + +def is_string(checker, instance): + return isinstance(instance, str) + + +def is_any(checker, instance): + return True + + +@frozen(repr=False) +class TypeChecker: + """ + A :kw:`type` property checker. + + A `TypeChecker` performs type checking for a `Validator`, converting + between the defined JSON Schema types and some associated Python types or + objects. + + Modifying the behavior just mentioned by redefining which Python objects + are considered to be of which JSON Schema types can be done using + `TypeChecker.redefine` or `TypeChecker.redefine_many`, and types can be + removed via `TypeChecker.remove`. Each of these return a new `TypeChecker`. + + Arguments: + + type_checkers: + + The initial mapping of types to their checking functions. + + """ + + _type_checkers: HashTrieMap[ + str, Callable[[TypeChecker, Any], bool], + ] = field(default=HashTrieMap(), converter=_typed_map_converter) + + def __repr__(self): + types = ", ".join(repr(k) for k in sorted(self._type_checkers)) + return f"<{self.__class__.__name__} types={{{types}}}>" + + def is_type(self, instance, type: str) -> bool: + """ + Check if the instance is of the appropriate type. + + Arguments: + + instance: + + The instance to check + + type: + + The name of the type that is expected. + + Raises: + + `jsonschema.exceptions.UndefinedTypeCheck`: + + if ``type`` is unknown to this object. + + """ + try: + fn = self._type_checkers[type] + except KeyError: + raise UndefinedTypeCheck(type) from None + + return fn(self, instance) + + def redefine(self, type: str, fn) -> TypeChecker: + """ + Produce a new checker with the given type redefined. + + Arguments: + + type: + + The name of the type to check. + + fn (collections.abc.Callable): + + A callable taking exactly two parameters - the type + checker calling the function and the instance to check. + The function should return true if instance is of this + type and false otherwise. + + """ + return self.redefine_many({type: fn}) + + def redefine_many(self, definitions=()) -> TypeChecker: + """ + Produce a new checker with the given types redefined. + + Arguments: + + definitions (dict): + + A dictionary mapping types to their checking functions. + + """ + type_checkers = self._type_checkers.update(definitions) + return evolve(self, type_checkers=type_checkers) + + def remove(self, *types) -> TypeChecker: + """ + Produce a new checker with the given types forgotten. + + Arguments: + + types: + + the names of the types to remove. + + Raises: + + `jsonschema.exceptions.UndefinedTypeCheck`: + + if any given type is unknown to this object + + """ + type_checkers = self._type_checkers + for each in types: + try: + type_checkers = type_checkers.remove(each) + except KeyError: + raise UndefinedTypeCheck(each) from None + return evolve(self, type_checkers=type_checkers) + + +draft3_type_checker = TypeChecker( + { + "any": is_any, + "array": is_array, + "boolean": is_bool, + "integer": is_integer, + "object": is_object, + "null": is_null, + "number": is_number, + "string": is_string, + }, +) +draft4_type_checker = draft3_type_checker.remove("any") +draft6_type_checker = draft4_type_checker.redefine( + "integer", + lambda checker, instance: ( + is_integer(checker, instance) + or isinstance(instance, float) and instance.is_integer() + ), +) +draft7_type_checker = draft6_type_checker +draft201909_type_checker = draft7_type_checker +draft202012_type_checker = draft201909_type_checker diff --git a/vllm/lib/python3.10/site-packages/jsonschema/_typing.py b/vllm/lib/python3.10/site-packages/jsonschema/_typing.py new file mode 100644 index 0000000000000000000000000000000000000000..d283dc48d10489baf7516182bfb1b111faf12ba0 --- /dev/null +++ b/vllm/lib/python3.10/site-packages/jsonschema/_typing.py @@ -0,0 +1,28 @@ +""" +Some (initially private) typing helpers for jsonschema's types. +""" +from typing import Any, Callable, Iterable, Protocol, Tuple, Union + +import referencing.jsonschema + +from jsonschema.protocols import Validator + + +class SchemaKeywordValidator(Protocol): + def __call__( + self, + validator: Validator, + value: Any, + instance: Any, + schema: referencing.jsonschema.Schema, + ) -> None: + ... + + +id_of = Callable[[referencing.jsonschema.Schema], Union[str, None]] + + +ApplicableValidators = Callable[ + [referencing.jsonschema.Schema], + Iterable[Tuple[str, Any]], +] diff --git a/vllm/lib/python3.10/site-packages/jsonschema/_utils.py b/vllm/lib/python3.10/site-packages/jsonschema/_utils.py new file mode 100644 index 0000000000000000000000000000000000000000..54d28c041c11c22f0e2c6fc2fa35b9a40b0a10f5 --- /dev/null +++ b/vllm/lib/python3.10/site-packages/jsonschema/_utils.py @@ -0,0 +1,351 @@ +from collections.abc import Mapping, MutableMapping, Sequence +from urllib.parse import urlsplit +import itertools +import re + + +class URIDict(MutableMapping): + """ + Dictionary which uses normalized URIs as keys. + """ + + def normalize(self, uri): + return urlsplit(uri).geturl() + + def __init__(self, *args, **kwargs): + self.store = dict() + self.store.update(*args, **kwargs) + + def __getitem__(self, uri): + return self.store[self.normalize(uri)] + + def __setitem__(self, uri, value): + self.store[self.normalize(uri)] = value + + def __delitem__(self, uri): + del self.store[self.normalize(uri)] + + def __iter__(self): + return iter(self.store) + + def __len__(self): # pragma: no cover -- untested, but to be removed + return len(self.store) + + def __repr__(self): # pragma: no cover -- untested, but to be removed + return repr(self.store) + + +class Unset: + """ + An as-of-yet unset attribute or unprovided default parameter. + """ + + def __repr__(self): # pragma: no cover + return "" + + +def format_as_index(container, indices): + """ + Construct a single string containing indexing operations for the indices. + + For example for a container ``bar``, [1, 2, "foo"] -> bar[1][2]["foo"] + + Arguments: + + container (str): + + A word to use for the thing being indexed + + indices (sequence): + + The indices to format. + + """ + if not indices: + return container + return f"{container}[{']['.join(repr(index) for index in indices)}]" + + +def find_additional_properties(instance, schema): + """ + Return the set of additional properties for the given ``instance``. + + Weeds out properties that should have been validated by ``properties`` and + / or ``patternProperties``. + + Assumes ``instance`` is dict-like already. + """ + properties = schema.get("properties", {}) + patterns = "|".join(schema.get("patternProperties", {})) + for property in instance: + if property not in properties: + if patterns and re.search(patterns, property): + continue + yield property + + +def extras_msg(extras): + """ + Create an error message for extra items or properties. + """ + verb = "was" if len(extras) == 1 else "were" + return ", ".join(repr(extra) for extra in extras), verb + + +def ensure_list(thing): + """ + Wrap ``thing`` in a list if it's a single str. + + Otherwise, return it unchanged. + """ + if isinstance(thing, str): + return [thing] + return thing + + +def _mapping_equal(one, two): + """ + Check if two mappings are equal using the semantics of `equal`. + """ + if len(one) != len(two): + return False + return all( + key in two and equal(value, two[key]) + for key, value in one.items() + ) + + +def _sequence_equal(one, two): + """ + Check if two sequences are equal using the semantics of `equal`. + """ + if len(one) != len(two): + return False + return all(equal(i, j) for i, j in zip(one, two)) + + +def equal(one, two): + """ + Check if two things are equal evading some Python type hierarchy semantics. + + Specifically in JSON Schema, evade `bool` inheriting from `int`, + recursing into sequences to do the same. + """ + if one is two: + return True + if isinstance(one, str) or isinstance(two, str): + return one == two + if isinstance(one, Sequence) and isinstance(two, Sequence): + return _sequence_equal(one, two) + if isinstance(one, Mapping) and isinstance(two, Mapping): + return _mapping_equal(one, two) + return unbool(one) == unbool(two) + + +def unbool(element, true=object(), false=object()): + """ + A hack to make True and 1 and False and 0 unique for ``uniq``. + """ + if element is True: + return true + elif element is False: + return false + return element + + +def uniq(container): + """ + Check if all of a container's elements are unique. + + Tries to rely on the container being recursively sortable, or otherwise + falls back on (slow) brute force. + """ + try: + sort = sorted(unbool(i) for i in container) + sliced = itertools.islice(sort, 1, None) + + for i, j in zip(sort, sliced): + if equal(i, j): + return False + + except (NotImplementedError, TypeError): + seen = [] + for e in container: + e = unbool(e) + + for i in seen: + if equal(i, e): + return False + + seen.append(e) + return True + + +def find_evaluated_item_indexes_by_schema(validator, instance, schema): + """ + Get all indexes of items that get evaluated under the current schema. + + Covers all keywords related to unevaluatedItems: items, prefixItems, if, + then, else, contains, unevaluatedItems, allOf, oneOf, anyOf + """ + if validator.is_type(schema, "boolean"): + return [] + evaluated_indexes = [] + + if "items" in schema: + return list(range(len(instance))) + + ref = schema.get("$ref") + if ref is not None: + resolved = validator._resolver.lookup(ref) + evaluated_indexes.extend( + find_evaluated_item_indexes_by_schema( + validator.evolve( + schema=resolved.contents, + _resolver=resolved.resolver, + ), + instance, + resolved.contents, + ), + ) + + dynamicRef = schema.get("$dynamicRef") + if dynamicRef is not None: + resolved = validator._resolver.lookup(dynamicRef) + evaluated_indexes.extend( + find_evaluated_item_indexes_by_schema( + validator.evolve( + schema=resolved.contents, + _resolver=resolved.resolver, + ), + instance, + resolved.contents, + ), + ) + + if "prefixItems" in schema: + evaluated_indexes += list(range(len(schema["prefixItems"]))) + + if "if" in schema: + if validator.evolve(schema=schema["if"]).is_valid(instance): + evaluated_indexes += find_evaluated_item_indexes_by_schema( + validator, instance, schema["if"], + ) + if "then" in schema: + evaluated_indexes += find_evaluated_item_indexes_by_schema( + validator, instance, schema["then"], + ) + elif "else" in schema: + evaluated_indexes += find_evaluated_item_indexes_by_schema( + validator, instance, schema["else"], + ) + + for keyword in ["contains", "unevaluatedItems"]: + if keyword in schema: + for k, v in enumerate(instance): + if validator.evolve(schema=schema[keyword]).is_valid(v): + evaluated_indexes.append(k) + + for keyword in ["allOf", "oneOf", "anyOf"]: + if keyword in schema: + for subschema in schema[keyword]: + errs = next(validator.descend(instance, subschema), None) + if errs is None: + evaluated_indexes += find_evaluated_item_indexes_by_schema( + validator, instance, subschema, + ) + + return evaluated_indexes + + +def find_evaluated_property_keys_by_schema(validator, instance, schema): + """ + Get all keys of items that get evaluated under the current schema. + + Covers all keywords related to unevaluatedProperties: properties, + additionalProperties, unevaluatedProperties, patternProperties, + dependentSchemas, allOf, oneOf, anyOf, if, then, else + """ + if validator.is_type(schema, "boolean"): + return [] + evaluated_keys = [] + + ref = schema.get("$ref") + if ref is not None: + resolved = validator._resolver.lookup(ref) + evaluated_keys.extend( + find_evaluated_property_keys_by_schema( + validator.evolve( + schema=resolved.contents, + _resolver=resolved.resolver, + ), + instance, + resolved.contents, + ), + ) + + dynamicRef = schema.get("$dynamicRef") + if dynamicRef is not None: + resolved = validator._resolver.lookup(dynamicRef) + evaluated_keys.extend( + find_evaluated_property_keys_by_schema( + validator.evolve( + schema=resolved.contents, + _resolver=resolved.resolver, + ), + instance, + resolved.contents, + ), + ) + + for keyword in [ + "properties", "additionalProperties", "unevaluatedProperties", + ]: + if keyword in schema: + schema_value = schema[keyword] + if validator.is_type(schema_value, "boolean") and schema_value: + evaluated_keys += instance.keys() + + elif validator.is_type(schema_value, "object"): + for property in schema_value: + if property in instance: + evaluated_keys.append(property) + + if "patternProperties" in schema: + for property in instance: + for pattern in schema["patternProperties"]: + if re.search(pattern, property): + evaluated_keys.append(property) + + if "dependentSchemas" in schema: + for property, subschema in schema["dependentSchemas"].items(): + if property not in instance: + continue + evaluated_keys += find_evaluated_property_keys_by_schema( + validator, instance, subschema, + ) + + for keyword in ["allOf", "oneOf", "anyOf"]: + if keyword in schema: + for subschema in schema[keyword]: + errs = next(validator.descend(instance, subschema), None) + if errs is None: + evaluated_keys += find_evaluated_property_keys_by_schema( + validator, instance, subschema, + ) + + if "if" in schema: + if validator.evolve(schema=schema["if"]).is_valid(instance): + evaluated_keys += find_evaluated_property_keys_by_schema( + validator, instance, schema["if"], + ) + if "then" in schema: + evaluated_keys += find_evaluated_property_keys_by_schema( + validator, instance, schema["then"], + ) + elif "else" in schema: + evaluated_keys += find_evaluated_property_keys_by_schema( + validator, instance, schema["else"], + ) + + return evaluated_keys diff --git a/vllm/lib/python3.10/site-packages/jsonschema/benchmarks/contains.py b/vllm/lib/python3.10/site-packages/jsonschema/benchmarks/contains.py new file mode 100644 index 0000000000000000000000000000000000000000..739cd044cceb807b4029dca9447e954214a24809 --- /dev/null +++ b/vllm/lib/python3.10/site-packages/jsonschema/benchmarks/contains.py @@ -0,0 +1,28 @@ +""" +A benchmark for validation of the `contains` keyword. +""" + +from pyperf import Runner + +from jsonschema import Draft202012Validator + +schema = { + "type": "array", + "contains": {"const": 37}, +} +validator = Draft202012Validator(schema) + +size = 1000 +beginning = [37] + [0] * (size - 1) +middle = [0] * (size // 2) + [37] + [0] * (size // 2) +end = [0] * (size - 1) + [37] +invalid = [0] * size + + +if __name__ == "__main__": + runner = Runner() + runner.bench_func("baseline", lambda: validator.is_valid([])) + runner.bench_func("beginning", lambda: validator.is_valid(beginning)) + runner.bench_func("middle", lambda: validator.is_valid(middle)) + runner.bench_func("end", lambda: validator.is_valid(end)) + runner.bench_func("invalid", lambda: validator.is_valid(invalid)) diff --git a/vllm/lib/python3.10/site-packages/jsonschema/cli.py b/vllm/lib/python3.10/site-packages/jsonschema/cli.py new file mode 100644 index 0000000000000000000000000000000000000000..cf6298eb0dbbe2015c0b7c7d04ec030962b2928a --- /dev/null +++ b/vllm/lib/python3.10/site-packages/jsonschema/cli.py @@ -0,0 +1,296 @@ +""" +The ``jsonschema`` command line. +""" + +from importlib import metadata +from json import JSONDecodeError +from textwrap import dedent +import argparse +import json +import sys +import traceback +import warnings + +try: + from pkgutil import resolve_name +except ImportError: + from pkgutil_resolve_name import resolve_name # type: ignore[no-redef] + +from attrs import define, field + +from jsonschema.exceptions import SchemaError +from jsonschema.validators import _RefResolver, validator_for + +warnings.warn( + ( + "The jsonschema CLI is deprecated and will be removed in a future " + "version. Please use check-jsonschema instead, which can be installed " + "from https://pypi.org/project/check-jsonschema/" + ), + DeprecationWarning, + stacklevel=2, +) + + +class _CannotLoadFile(Exception): + pass + + +@define +class _Outputter: + + _formatter = field() + _stdout = field() + _stderr = field() + + @classmethod + def from_arguments(cls, arguments, stdout, stderr): + if arguments["output"] == "plain": + formatter = _PlainFormatter(arguments["error_format"]) + elif arguments["output"] == "pretty": + formatter = _PrettyFormatter() + return cls(formatter=formatter, stdout=stdout, stderr=stderr) + + def load(self, path): + try: + file = open(path) # noqa: SIM115, PTH123 + except FileNotFoundError as error: + self.filenotfound_error(path=path, exc_info=sys.exc_info()) + raise _CannotLoadFile() from error + + with file: + try: + return json.load(file) + except JSONDecodeError as error: + self.parsing_error(path=path, exc_info=sys.exc_info()) + raise _CannotLoadFile() from error + + def filenotfound_error(self, **kwargs): + self._stderr.write(self._formatter.filenotfound_error(**kwargs)) + + def parsing_error(self, **kwargs): + self._stderr.write(self._formatter.parsing_error(**kwargs)) + + def validation_error(self, **kwargs): + self._stderr.write(self._formatter.validation_error(**kwargs)) + + def validation_success(self, **kwargs): + self._stdout.write(self._formatter.validation_success(**kwargs)) + + +@define +class _PrettyFormatter: + + _ERROR_MSG = dedent( + """\ + ===[{type}]===({path})=== + + {body} + ----------------------------- + """, + ) + _SUCCESS_MSG = "===[SUCCESS]===({path})===\n" + + def filenotfound_error(self, path, exc_info): + return self._ERROR_MSG.format( + path=path, + type="FileNotFoundError", + body=f"{path!r} does not exist.", + ) + + def parsing_error(self, path, exc_info): + exc_type, exc_value, exc_traceback = exc_info + exc_lines = "".join( + traceback.format_exception(exc_type, exc_value, exc_traceback), + ) + return self._ERROR_MSG.format( + path=path, + type=exc_type.__name__, + body=exc_lines, + ) + + def validation_error(self, instance_path, error): + return self._ERROR_MSG.format( + path=instance_path, + type=error.__class__.__name__, + body=error, + ) + + def validation_success(self, instance_path): + return self._SUCCESS_MSG.format(path=instance_path) + + +@define +class _PlainFormatter: + + _error_format = field() + + def filenotfound_error(self, path, exc_info): + return f"{path!r} does not exist.\n" + + def parsing_error(self, path, exc_info): + return "Failed to parse {}: {}\n".format( + "" if path == "" else repr(path), + exc_info[1], + ) + + def validation_error(self, instance_path, error): + return self._error_format.format(file_name=instance_path, error=error) + + def validation_success(self, instance_path): + return "" + + +def _resolve_name_with_default(name): + if "." not in name: + name = "jsonschema." + name + return resolve_name(name) + + +parser = argparse.ArgumentParser( + description="JSON Schema Validation CLI", +) +parser.add_argument( + "-i", "--instance", + action="append", + dest="instances", + help=""" + a path to a JSON instance (i.e. filename.json) to validate (may + be specified multiple times). If no instances are provided via this + option, one will be expected on standard input. + """, +) +parser.add_argument( + "-F", "--error-format", + help=""" + the format to use for each validation error message, specified + in a form suitable for str.format. This string will be passed + one formatted object named 'error' for each ValidationError. + Only provide this option when using --output=plain, which is the + default. If this argument is unprovided and --output=plain is + used, a simple default representation will be used. + """, +) +parser.add_argument( + "-o", "--output", + choices=["plain", "pretty"], + default="plain", + help=""" + an output format to use. 'plain' (default) will produce minimal + text with one line for each error, while 'pretty' will produce + more detailed human-readable output on multiple lines. + """, +) +parser.add_argument( + "-V", "--validator", + type=_resolve_name_with_default, + help=""" + the fully qualified object name of a validator to use, or, for + validators that are registered with jsonschema, simply the name + of the class. + """, +) +parser.add_argument( + "--base-uri", + help=""" + a base URI to assign to the provided schema, even if it does not + declare one (via e.g. $id). This option can be used if you wish to + resolve relative references to a particular URI (or local path) + """, +) +parser.add_argument( + "--version", + action="version", + version=metadata.version("jsonschema"), +) +parser.add_argument( + "schema", + help="the path to a JSON Schema to validate with (i.e. schema.json)", +) + + +def parse_args(args): # noqa: D103 + arguments = vars(parser.parse_args(args=args or ["--help"])) + if arguments["output"] != "plain" and arguments["error_format"]: + raise parser.error( + "--error-format can only be used with --output plain", + ) + if arguments["output"] == "plain" and arguments["error_format"] is None: + arguments["error_format"] = "{error.instance}: {error.message}\n" + return arguments + + +def _validate_instance(instance_path, instance, validator, outputter): + invalid = False + for error in validator.iter_errors(instance): + invalid = True + outputter.validation_error(instance_path=instance_path, error=error) + + if not invalid: + outputter.validation_success(instance_path=instance_path) + return invalid + + +def main(args=sys.argv[1:]): # noqa: D103 + sys.exit(run(arguments=parse_args(args=args))) + + +def run(arguments, stdout=sys.stdout, stderr=sys.stderr, stdin=sys.stdin): # noqa: D103 + outputter = _Outputter.from_arguments( + arguments=arguments, + stdout=stdout, + stderr=stderr, + ) + + try: + schema = outputter.load(arguments["schema"]) + except _CannotLoadFile: + return 1 + + Validator = arguments["validator"] + if Validator is None: + Validator = validator_for(schema) + + try: + Validator.check_schema(schema) + except SchemaError as error: + outputter.validation_error( + instance_path=arguments["schema"], + error=error, + ) + return 1 + + if arguments["instances"]: + load, instances = outputter.load, arguments["instances"] + else: + def load(_): + try: + return json.load(stdin) + except JSONDecodeError as error: + outputter.parsing_error( + path="", exc_info=sys.exc_info(), + ) + raise _CannotLoadFile() from error + instances = [""] + + resolver = _RefResolver( + base_uri=arguments["base_uri"], + referrer=schema, + ) if arguments["base_uri"] is not None else None + + validator = Validator(schema, resolver=resolver) + exit_code = 0 + for each in instances: + try: + instance = load(each) + except _CannotLoadFile: + exit_code = 1 + else: + exit_code |= _validate_instance( + instance_path=each, + instance=instance, + validator=validator, + outputter=outputter, + ) + + return exit_code diff --git a/vllm/lib/python3.10/site-packages/jsonschema/exceptions.py b/vllm/lib/python3.10/site-packages/jsonschema/exceptions.py new file mode 100644 index 0000000000000000000000000000000000000000..78da49fcdf69ec72cfd92b558936d010c10044fc --- /dev/null +++ b/vllm/lib/python3.10/site-packages/jsonschema/exceptions.py @@ -0,0 +1,487 @@ +""" +Validation errors, and some surrounding helpers. +""" +from __future__ import annotations + +from collections import defaultdict, deque +from pprint import pformat +from textwrap import dedent, indent +from typing import TYPE_CHECKING, Any, ClassVar +import heapq +import itertools +import warnings + +from attrs import define +from referencing.exceptions import Unresolvable as _Unresolvable + +from jsonschema import _utils + +if TYPE_CHECKING: + from collections.abc import Iterable, Mapping, MutableMapping, Sequence + + from jsonschema import _types + +WEAK_MATCHES: frozenset[str] = frozenset(["anyOf", "oneOf"]) +STRONG_MATCHES: frozenset[str] = frozenset() + +_unset = _utils.Unset() + + +def _pretty(thing: Any, prefix: str): + """ + Format something for an error message as prettily as we currently can. + """ + return indent(pformat(thing, width=72, sort_dicts=False), prefix).lstrip() + + +def __getattr__(name): + if name == "RefResolutionError": + warnings.warn( + _RefResolutionError._DEPRECATION_MESSAGE, + DeprecationWarning, + stacklevel=2, + ) + return _RefResolutionError + raise AttributeError(f"module {__name__} has no attribute {name}") + + +class _Error(Exception): + + _word_for_schema_in_error_message: ClassVar[str] + _word_for_instance_in_error_message: ClassVar[str] + + def __init__( + self, + message: str, + validator: str = _unset, # type: ignore[assignment] + path: Iterable[str | int] = (), + cause: Exception | None = None, + context=(), + validator_value: Any = _unset, + instance: Any = _unset, + schema: Mapping[str, Any] | bool = _unset, # type: ignore[assignment] + schema_path: Iterable[str | int] = (), + parent: _Error | None = None, + type_checker: _types.TypeChecker = _unset, # type: ignore[assignment] + ) -> None: + super().__init__( + message, + validator, + path, + cause, + context, + validator_value, + instance, + schema, + schema_path, + parent, + ) + self.message = message + self.path = self.relative_path = deque(path) + self.schema_path = self.relative_schema_path = deque(schema_path) + self.context = list(context) + self.cause = self.__cause__ = cause + self.validator = validator + self.validator_value = validator_value + self.instance = instance + self.schema = schema + self.parent = parent + self._type_checker = type_checker + + for error in context: + error.parent = self + + def __repr__(self) -> str: + return f"<{self.__class__.__name__}: {self.message!r}>" + + def __str__(self) -> str: + essential_for_verbose = ( + self.validator, self.validator_value, self.instance, self.schema, + ) + if any(m is _unset for m in essential_for_verbose): + return self.message + + schema_path = _utils.format_as_index( + container=self._word_for_schema_in_error_message, + indices=list(self.relative_schema_path)[:-1], + ) + instance_path = _utils.format_as_index( + container=self._word_for_instance_in_error_message, + indices=self.relative_path, + ) + prefix = 16 * " " + + return dedent( + f"""\ + {self.message} + + Failed validating {self.validator!r} in {schema_path}: + {_pretty(self.schema, prefix=prefix)} + + On {instance_path}: + {_pretty(self.instance, prefix=prefix)} + """.rstrip(), + ) + + @classmethod + def create_from(cls, other: _Error): + return cls(**other._contents()) + + @property + def absolute_path(self) -> Sequence[str | int]: + parent = self.parent + if parent is None: + return self.relative_path + + path = deque(self.relative_path) + path.extendleft(reversed(parent.absolute_path)) + return path + + @property + def absolute_schema_path(self) -> Sequence[str | int]: + parent = self.parent + if parent is None: + return self.relative_schema_path + + path = deque(self.relative_schema_path) + path.extendleft(reversed(parent.absolute_schema_path)) + return path + + @property + def json_path(self) -> str: + path = "$" + for elem in self.absolute_path: + if isinstance(elem, int): + path += "[" + str(elem) + "]" + else: + path += "." + elem + return path + + def _set( + self, + type_checker: _types.TypeChecker | None = None, + **kwargs: Any, + ) -> None: + if type_checker is not None and self._type_checker is _unset: + self._type_checker = type_checker + + for k, v in kwargs.items(): + if getattr(self, k) is _unset: + setattr(self, k, v) + + def _contents(self): + attrs = ( + "message", "cause", "context", "validator", "validator_value", + "path", "schema_path", "instance", "schema", "parent", + ) + return {attr: getattr(self, attr) for attr in attrs} + + def _matches_type(self) -> bool: + try: + # We ignore this as we want to simply crash if this happens + expected = self.schema["type"] # type: ignore[index] + except (KeyError, TypeError): + return False + + if isinstance(expected, str): + return self._type_checker.is_type(self.instance, expected) + + return any( + self._type_checker.is_type(self.instance, expected_type) + for expected_type in expected + ) + + +class ValidationError(_Error): + """ + An instance was invalid under a provided schema. + """ + + _word_for_schema_in_error_message = "schema" + _word_for_instance_in_error_message = "instance" + + +class SchemaError(_Error): + """ + A schema was invalid under its corresponding metaschema. + """ + + _word_for_schema_in_error_message = "metaschema" + _word_for_instance_in_error_message = "schema" + + +@define(slots=False) +class _RefResolutionError(Exception): + """ + A ref could not be resolved. + """ + + _DEPRECATION_MESSAGE = ( + "jsonschema.exceptions.RefResolutionError is deprecated as of version " + "4.18.0. If you wish to catch potential reference resolution errors, " + "directly catch referencing.exceptions.Unresolvable." + ) + + _cause: Exception + + def __eq__(self, other): + if self.__class__ is not other.__class__: + return NotImplemented # pragma: no cover -- uncovered but deprecated # noqa: E501 + return self._cause == other._cause + + def __str__(self) -> str: + return str(self._cause) + + +class _WrappedReferencingError(_RefResolutionError, _Unresolvable): # pragma: no cover -- partially uncovered but to be removed # noqa: E501 + def __init__(self, cause: _Unresolvable): + object.__setattr__(self, "_wrapped", cause) + + def __eq__(self, other): + if other.__class__ is self.__class__: + return self._wrapped == other._wrapped + elif other.__class__ is self._wrapped.__class__: + return self._wrapped == other + return NotImplemented + + def __getattr__(self, attr): + return getattr(self._wrapped, attr) + + def __hash__(self): + return hash(self._wrapped) + + def __repr__(self): + return f"" + + def __str__(self): + return f"{self._wrapped.__class__.__name__}: {self._wrapped}" + + +class UndefinedTypeCheck(Exception): + """ + A type checker was asked to check a type it did not have registered. + """ + + def __init__(self, type: str) -> None: + self.type = type + + def __str__(self) -> str: + return f"Type {self.type!r} is unknown to this type checker" + + +class UnknownType(Exception): + """ + A validator was asked to validate an instance against an unknown type. + """ + + def __init__(self, type, instance, schema): + self.type = type + self.instance = instance + self.schema = schema + + def __str__(self): + prefix = 16 * " " + + return dedent( + f"""\ + Unknown type {self.type!r} for validator with schema: + {_pretty(self.schema, prefix=prefix)} + + While checking instance: + {_pretty(self.instance, prefix=prefix)} + """.rstrip(), + ) + + +class FormatError(Exception): + """ + Validating a format failed. + """ + + def __init__(self, message, cause=None): + super().__init__(message, cause) + self.message = message + self.cause = self.__cause__ = cause + + def __str__(self): + return self.message + + +class ErrorTree: + """ + ErrorTrees make it easier to check which validations failed. + """ + + _instance = _unset + + def __init__(self, errors: Iterable[ValidationError] = ()): + self.errors: MutableMapping[str, ValidationError] = {} + self._contents: Mapping[str, ErrorTree] = defaultdict(self.__class__) + + for error in errors: + container = self + for element in error.path: + container = container[element] + container.errors[error.validator] = error + + container._instance = error.instance + + def __contains__(self, index: str | int): + """ + Check whether ``instance[index]`` has any errors. + """ + return index in self._contents + + def __getitem__(self, index): + """ + Retrieve the child tree one level down at the given ``index``. + + If the index is not in the instance that this tree corresponds + to and is not known by this tree, whatever error would be raised + by ``instance.__getitem__`` will be propagated (usually this is + some subclass of `LookupError`. + """ + if self._instance is not _unset and index not in self: + self._instance[index] + return self._contents[index] + + def __setitem__(self, index: str | int, value: ErrorTree): + """ + Add an error to the tree at the given ``index``. + + .. deprecated:: v4.20.0 + + Setting items on an `ErrorTree` is deprecated without replacement. + To populate a tree, provide all of its sub-errors when you + construct the tree. + """ + warnings.warn( + "ErrorTree.__setitem__ is deprecated without replacement.", + DeprecationWarning, + stacklevel=2, + ) + self._contents[index] = value # type: ignore[index] + + def __iter__(self): + """ + Iterate (non-recursively) over the indices in the instance with errors. + """ + return iter(self._contents) + + def __len__(self): + """ + Return the `total_errors`. + """ + return self.total_errors + + def __repr__(self): + total = len(self) + errors = "error" if total == 1 else "errors" + return f"<{self.__class__.__name__} ({total} total {errors})>" + + @property + def total_errors(self): + """ + The total number of errors in the entire tree, including children. + """ + child_errors = sum(len(tree) for _, tree in self._contents.items()) + return len(self.errors) + child_errors + + +def by_relevance(weak=WEAK_MATCHES, strong=STRONG_MATCHES): + """ + Create a key function that can be used to sort errors by relevance. + + Arguments: + weak (set): + a collection of validation keywords to consider to be + "weak". If there are two errors at the same level of the + instance and one is in the set of weak validation keywords, + the other error will take priority. By default, :kw:`anyOf` + and :kw:`oneOf` are considered weak keywords and will be + superseded by other same-level validation errors. + + strong (set): + a collection of validation keywords to consider to be + "strong" + + """ + + def relevance(error): + validator = error.validator + return ( # prefer errors which are ... + -len(error.path), # 'deeper' and thereby more specific + error.path, # earlier (for sibling errors) + validator not in weak, # for a non-low-priority keyword + validator in strong, # for a high priority keyword + not error._matches_type(), # at least match the instance's type + ) # otherwise we'll treat them the same + + return relevance + + +relevance = by_relevance() +""" +A key function (e.g. to use with `sorted`) which sorts errors by relevance. + +Example: + +.. code:: python + + sorted(validator.iter_errors(12), key=jsonschema.exceptions.relevance) +""" + + +def best_match(errors, key=relevance): + """ + Try to find an error that appears to be the best match among given errors. + + In general, errors that are higher up in the instance (i.e. for which + `ValidationError.path` is shorter) are considered better matches, + since they indicate "more" is wrong with the instance. + + If the resulting match is either :kw:`oneOf` or :kw:`anyOf`, the + *opposite* assumption is made -- i.e. the deepest error is picked, + since these keywords only need to match once, and any other errors + may not be relevant. + + Arguments: + errors (collections.abc.Iterable): + + the errors to select from. Do not provide a mixture of + errors from different validation attempts (i.e. from + different instances or schemas), since it won't produce + sensical output. + + key (collections.abc.Callable): + + the key to use when sorting errors. See `relevance` and + transitively `by_relevance` for more details (the default is + to sort with the defaults of that function). Changing the + default is only useful if you want to change the function + that rates errors but still want the error context descent + done by this function. + + Returns: + the best matching error, or ``None`` if the iterable was empty + + .. note:: + + This function is a heuristic. Its return value may change for a given + set of inputs from version to version if better heuristics are added. + + """ + errors = iter(errors) + best = next(errors, None) + if best is None: + return + best = max(itertools.chain([best], errors), key=key) + + while best.context: + # Calculate the minimum via nsmallest, because we don't recurse if + # all nested errors have the same relevance (i.e. if min == max == all) + smallest = heapq.nsmallest(2, best.context, key=key) + if len(smallest) == 2 and key(smallest[0]) == key(smallest[1]): # noqa: PLR2004 + return best + best = smallest[0] + return best diff --git a/vllm/lib/python3.10/site-packages/jsonschema/protocols.py b/vllm/lib/python3.10/site-packages/jsonschema/protocols.py new file mode 100644 index 0000000000000000000000000000000000000000..39e56d0fac02529e4516f2fd792256cee1fc0af7 --- /dev/null +++ b/vllm/lib/python3.10/site-packages/jsonschema/protocols.py @@ -0,0 +1,236 @@ +""" +typing.Protocol classes for jsonschema interfaces. +""" + +# for reference material on Protocols, see +# https://www.python.org/dev/peps/pep-0544/ + +from __future__ import annotations + +from typing import ( + TYPE_CHECKING, + Any, + ClassVar, + Iterable, + Protocol, + runtime_checkable, +) + +# in order for Sphinx to resolve references accurately from type annotations, +# it needs to see names like `jsonschema.TypeChecker` +# therefore, only import at type-checking time (to avoid circular references), +# but use `jsonschema` for any types which will otherwise not be resolvable +if TYPE_CHECKING: + from collections.abc import Mapping + + import referencing.jsonschema + + from jsonschema import _typing + from jsonschema.exceptions import ValidationError + import jsonschema + import jsonschema.validators + +# For code authors working on the validator protocol, these are the three +# use-cases which should be kept in mind: +# +# 1. As a protocol class, it can be used in type annotations to describe the +# available methods and attributes of a validator +# 2. It is the source of autodoc for the validator documentation +# 3. It is runtime_checkable, meaning that it can be used in isinstance() +# checks. +# +# Since protocols are not base classes, isinstance() checking is limited in +# its capabilities. See docs on runtime_checkable for detail + + +@runtime_checkable +class Validator(Protocol): + """ + The protocol to which all validator classes adhere. + + Arguments: + + schema: + + The schema that the validator object will validate with. + It is assumed to be valid, and providing + an invalid schema can lead to undefined behavior. See + `Validator.check_schema` to validate a schema first. + + registry: + + a schema registry that will be used for looking up JSON references + + resolver: + + a resolver that will be used to resolve :kw:`$ref` + properties (JSON references). If unprovided, one will be created. + + .. deprecated:: v4.18.0 + + `RefResolver <_RefResolver>` has been deprecated in favor of + `referencing`, and with it, this argument. + + format_checker: + + if provided, a checker which will be used to assert about + :kw:`format` properties present in the schema. If unprovided, + *no* format validation is done, and the presence of format + within schemas is strictly informational. Certain formats + require additional packages to be installed in order to assert + against instances. Ensure you've installed `jsonschema` with + its `extra (optional) dependencies ` when + invoking ``pip``. + + .. deprecated:: v4.12.0 + + Subclassing validator classes now explicitly warns this is not part of + their public API. + + """ + + #: An object representing the validator's meta schema (the schema that + #: describes valid schemas in the given version). + META_SCHEMA: ClassVar[Mapping] + + #: A mapping of validation keywords (`str`\s) to functions that + #: validate the keyword with that name. For more information see + #: `creating-validators`. + VALIDATORS: ClassVar[Mapping] + + #: A `jsonschema.TypeChecker` that will be used when validating + #: :kw:`type` keywords in JSON schemas. + TYPE_CHECKER: ClassVar[jsonschema.TypeChecker] + + #: A `jsonschema.FormatChecker` that will be used when validating + #: :kw:`format` keywords in JSON schemas. + FORMAT_CHECKER: ClassVar[jsonschema.FormatChecker] + + #: A function which given a schema returns its ID. + ID_OF: _typing.id_of + + #: The schema that will be used to validate instances + schema: Mapping | bool + + def __init__( + self, + schema: Mapping | bool, + registry: referencing.jsonschema.SchemaRegistry, + format_checker: jsonschema.FormatChecker | None = None, + ) -> None: + ... + + @classmethod + def check_schema(cls, schema: Mapping | bool) -> None: + """ + Validate the given schema against the validator's `META_SCHEMA`. + + Raises: + + `jsonschema.exceptions.SchemaError`: + + if the schema is invalid + + """ + + def is_type(self, instance: Any, type: str) -> bool: + """ + Check if the instance is of the given (JSON Schema) type. + + Arguments: + + instance: + + the value to check + + type: + + the name of a known (JSON Schema) type + + Returns: + + whether the instance is of the given type + + Raises: + + `jsonschema.exceptions.UnknownType`: + + if ``type`` is not a known type + + """ + + def is_valid(self, instance: Any) -> bool: + """ + Check if the instance is valid under the current `schema`. + + Returns: + + whether the instance is valid or not + + >>> schema = {"maxItems" : 2} + >>> Draft202012Validator(schema).is_valid([2, 3, 4]) + False + + """ + + def iter_errors(self, instance: Any) -> Iterable[ValidationError]: + r""" + Lazily yield each of the validation errors in the given instance. + + >>> schema = { + ... "type" : "array", + ... "items" : {"enum" : [1, 2, 3]}, + ... "maxItems" : 2, + ... } + >>> v = Draft202012Validator(schema) + >>> for error in sorted(v.iter_errors([2, 3, 4]), key=str): + ... print(error.message) + 4 is not one of [1, 2, 3] + [2, 3, 4] is too long + + .. deprecated:: v4.0.0 + + Calling this function with a second schema argument is deprecated. + Use `Validator.evolve` instead. + """ + + def validate(self, instance: Any) -> None: + """ + Check if the instance is valid under the current `schema`. + + Raises: + + `jsonschema.exceptions.ValidationError`: + + if the instance is invalid + + >>> schema = {"maxItems" : 2} + >>> Draft202012Validator(schema).validate([2, 3, 4]) + Traceback (most recent call last): + ... + ValidationError: [2, 3, 4] is too long + + """ + + def evolve(self, **kwargs) -> Validator: + """ + Create a new validator like this one, but with given changes. + + Preserves all other attributes, so can be used to e.g. create a + validator with a different schema but with the same :kw:`$ref` + resolution behavior. + + >>> validator = Draft202012Validator({}) + >>> validator.evolve(schema={"type": "number"}) + Draft202012Validator(schema={'type': 'number'}, format_checker=None) + + The returned object satisfies the validator protocol, but may not + be of the same concrete class! In particular this occurs + when a :kw:`$ref` occurs to a schema with a different + :kw:`$schema` than this one (i.e. for a different draft). + + >>> validator.evolve( + ... schema={"$schema": Draft7Validator.META_SCHEMA["$id"]} + ... ) + Draft7Validator(schema=..., format_checker=None) + """ diff --git a/vllm/lib/python3.10/site-packages/jsonschema/tests/__init__.py b/vllm/lib/python3.10/site-packages/jsonschema/tests/__init__.py new file mode 100644 index 0000000000000000000000000000000000000000..e69de29bb2d1d6434b8b29ae775ad8c2e48c5391 diff --git a/vllm/lib/python3.10/site-packages/jsonschema/tests/__pycache__/_suite.cpython-310.pyc b/vllm/lib/python3.10/site-packages/jsonschema/tests/__pycache__/_suite.cpython-310.pyc new file mode 100644 index 0000000000000000000000000000000000000000..f6a419b66f32a6a0534b6c091e64b2a91ad5202a Binary files /dev/null and b/vllm/lib/python3.10/site-packages/jsonschema/tests/__pycache__/_suite.cpython-310.pyc differ diff --git a/vllm/lib/python3.10/site-packages/jsonschema/tests/__pycache__/test_jsonschema_test_suite.cpython-310.pyc b/vllm/lib/python3.10/site-packages/jsonschema/tests/__pycache__/test_jsonschema_test_suite.cpython-310.pyc new file mode 100644 index 0000000000000000000000000000000000000000..638ef23d7e6a059627271aa7cf63fa8b6eb074b2 Binary files /dev/null and b/vllm/lib/python3.10/site-packages/jsonschema/tests/__pycache__/test_jsonschema_test_suite.cpython-310.pyc differ diff --git a/vllm/lib/python3.10/site-packages/jsonschema/tests/__pycache__/test_types.cpython-310.pyc b/vllm/lib/python3.10/site-packages/jsonschema/tests/__pycache__/test_types.cpython-310.pyc new file mode 100644 index 0000000000000000000000000000000000000000..2b43b7b36963d9181b483ebc2ff12be7a1b1711c Binary files /dev/null and b/vllm/lib/python3.10/site-packages/jsonschema/tests/__pycache__/test_types.cpython-310.pyc differ diff --git a/vllm/lib/python3.10/site-packages/jsonschema/tests/_suite.py b/vllm/lib/python3.10/site-packages/jsonschema/tests/_suite.py new file mode 100644 index 0000000000000000000000000000000000000000..0da6503c1cb4a6f677a5c33d3f7c5c44208dda75 --- /dev/null +++ b/vllm/lib/python3.10/site-packages/jsonschema/tests/_suite.py @@ -0,0 +1,276 @@ +""" +Python representations of the JSON Schema Test Suite tests. +""" +from __future__ import annotations + +from contextlib import suppress +from functools import partial +from pathlib import Path +from typing import TYPE_CHECKING, Any +import json +import os +import re +import subprocess +import sys +import unittest + +from attrs import field, frozen +from referencing import Registry +import referencing.jsonschema + +if TYPE_CHECKING: + from collections.abc import Iterable, Mapping, Sequence + + import pyperf + +from jsonschema.validators import _VALIDATORS +import jsonschema + +_DELIMITERS = re.compile(r"[\W\- ]+") + + +def _find_suite(): + root = os.environ.get("JSON_SCHEMA_TEST_SUITE") + if root is not None: + return Path(root) + + root = Path(jsonschema.__file__).parent.parent / "json" + if not root.is_dir(): # pragma: no cover + raise ValueError( + ( + "Can't find the JSON-Schema-Test-Suite directory. " + "Set the 'JSON_SCHEMA_TEST_SUITE' environment " + "variable or run the tests from alongside a checkout " + "of the suite." + ), + ) + return root + + +@frozen +class Suite: + + _root: Path = field(factory=_find_suite) + _remotes: referencing.jsonschema.SchemaRegistry = field(init=False) + + def __attrs_post_init__(self): + jsonschema_suite = self._root.joinpath("bin", "jsonschema_suite") + argv = [sys.executable, str(jsonschema_suite), "remotes"] + remotes = subprocess.check_output(argv).decode("utf-8") + + resources = json.loads(remotes) + + li = "http://localhost:1234/locationIndependentIdentifierPre2019.json" + li4 = "http://localhost:1234/locationIndependentIdentifierDraft4.json" + + registry = Registry().with_resources( + [ + ( + li, + referencing.jsonschema.DRAFT7.create_resource( + contents=resources.pop(li), + ), + ), + ( + li4, + referencing.jsonschema.DRAFT4.create_resource( + contents=resources.pop(li4), + ), + ), + ], + ).with_contents( + resources.items(), + default_specification=referencing.jsonschema.DRAFT202012, + ) + object.__setattr__(self, "_remotes", registry) + + def benchmark(self, runner: pyperf.Runner): # pragma: no cover + for name, Validator in _VALIDATORS.items(): + self.version(name=name).benchmark( + runner=runner, + Validator=Validator, + ) + + def version(self, name) -> Version: + return Version( + name=name, + path=self._root / "tests" / name, + remotes=self._remotes, + ) + + +@frozen +class Version: + + _path: Path + _remotes: referencing.jsonschema.SchemaRegistry + + name: str + + def benchmark(self, **kwargs): # pragma: no cover + for case in self.cases(): + case.benchmark(**kwargs) + + def cases(self) -> Iterable[_Case]: + return self._cases_in(paths=self._path.glob("*.json")) + + def format_cases(self) -> Iterable[_Case]: + return self._cases_in(paths=self._path.glob("optional/format/*.json")) + + def optional_cases_of(self, name: str) -> Iterable[_Case]: + return self._cases_in(paths=[self._path / "optional" / f"{name}.json"]) + + def to_unittest_testcase(self, *groups, **kwargs): + name = kwargs.pop("name", "Test" + self.name.title().replace("-", "")) + methods = { + method.__name__: method + for method in ( + test.to_unittest_method(**kwargs) + for group in groups + for case in group + for test in case.tests + ) + } + cls = type(name, (unittest.TestCase,), methods) + + # We're doing crazy things, so if they go wrong, like a function + # behaving differently on some other interpreter, just make them + # not happen. + with suppress(Exception): + cls.__module__ = _someone_save_us_the_module_of_the_caller() + + return cls + + def _cases_in(self, paths: Iterable[Path]) -> Iterable[_Case]: + for path in paths: + for case in json.loads(path.read_text(encoding="utf-8")): + yield _Case.from_dict( + case, + version=self, + subject=path.stem, + remotes=self._remotes, + ) + + +@frozen +class _Case: + + version: Version + + subject: str + description: str + schema: Mapping[str, Any] | bool + tests: list[_Test] + comment: str | None = None + specification: Sequence[dict[str, str]] = () + + @classmethod + def from_dict(cls, data, remotes, **kwargs): + data.update(kwargs) + tests = [ + _Test( + version=data["version"], + subject=data["subject"], + case_description=data["description"], + schema=data["schema"], + remotes=remotes, + **test, + ) for test in data.pop("tests") + ] + return cls(tests=tests, **data) + + def benchmark(self, runner: pyperf.Runner, **kwargs): # pragma: no cover + for test in self.tests: + runner.bench_func( + test.fully_qualified_name, + partial(test.validate_ignoring_errors, **kwargs), + ) + + +@frozen(repr=False) +class _Test: + + version: Version + + subject: str + case_description: str + description: str + + data: Any + schema: Mapping[str, Any] | bool + + valid: bool + + _remotes: referencing.jsonschema.SchemaRegistry + + comment: str | None = None + + def __repr__(self): # pragma: no cover + return f"" + + @property + def fully_qualified_name(self): # pragma: no cover + return " > ".join( # noqa: FLY002 + [ + self.version.name, + self.subject, + self.case_description, + self.description, + ], + ) + + def to_unittest_method(self, skip=lambda test: None, **kwargs): + if self.valid: + def fn(this): + self.validate(**kwargs) + else: + def fn(this): + with this.assertRaises(jsonschema.ValidationError): + self.validate(**kwargs) + + fn.__name__ = "_".join( + [ + "test", + _DELIMITERS.sub("_", self.subject), + _DELIMITERS.sub("_", self.case_description), + _DELIMITERS.sub("_", self.description), + ], + ) + reason = skip(self) + if reason is None or os.environ.get("JSON_SCHEMA_DEBUG", "0") != "0": + return fn + elif os.environ.get("JSON_SCHEMA_EXPECTED_FAILURES", "0") != "0": # pragma: no cover # noqa: E501 + return unittest.expectedFailure(fn) + else: + return unittest.skip(reason)(fn) + + def validate(self, Validator, **kwargs): + Validator.check_schema(self.schema) + validator = Validator( + schema=self.schema, + registry=self._remotes, + **kwargs, + ) + if os.environ.get("JSON_SCHEMA_DEBUG", "0") != "0": # pragma: no cover + breakpoint() # noqa: T100 + validator.validate(instance=self.data) + + def validate_ignoring_errors(self, Validator): # pragma: no cover + with suppress(jsonschema.ValidationError): + self.validate(Validator=Validator) + + +def _someone_save_us_the_module_of_the_caller(): + """ + The FQON of the module 2nd stack frames up from here. + + This is intended to allow us to dynamically return test case classes that + are indistinguishable from being defined in the module that wants them. + + Otherwise, trial will mis-print the FQON, and copy pasting it won't re-run + the class that really is running. + + Save us all, this is all so so so so so terrible. + """ + + return sys._getframe(2).f_globals["__name__"] diff --git a/vllm/lib/python3.10/site-packages/jsonschema/tests/fuzz_validate.py b/vllm/lib/python3.10/site-packages/jsonschema/tests/fuzz_validate.py new file mode 100644 index 0000000000000000000000000000000000000000..c12e88bcfe9bfdc0e0ffaab502789a6b585d4be2 --- /dev/null +++ b/vllm/lib/python3.10/site-packages/jsonschema/tests/fuzz_validate.py @@ -0,0 +1,50 @@ +""" +Fuzzing setup for OSS-Fuzz. + +See https://github.com/google/oss-fuzz/tree/master/projects/jsonschema for the +other half of the setup here. +""" +import sys + +from hypothesis import given, strategies + +import jsonschema + +PRIM = strategies.one_of( + strategies.booleans(), + strategies.integers(), + strategies.floats(allow_nan=False, allow_infinity=False), + strategies.text(), +) +DICT = strategies.recursive( + base=strategies.one_of( + strategies.booleans(), + strategies.dictionaries(strategies.text(), PRIM), + ), + extend=lambda inner: strategies.dictionaries(strategies.text(), inner), +) + + +@given(obj1=DICT, obj2=DICT) +def test_schemas(obj1, obj2): + try: + jsonschema.validate(instance=obj1, schema=obj2) + except jsonschema.exceptions.ValidationError: + pass + except jsonschema.exceptions.SchemaError: + pass + + +def main(): + atheris.instrument_all() + atheris.Setup( + sys.argv, + test_schemas.hypothesis.fuzz_one_input, + enable_python_coverage=True, + ) + atheris.Fuzz() + + +if __name__ == "__main__": + import atheris + main() diff --git a/vllm/lib/python3.10/site-packages/jsonschema/tests/test_cli.py b/vllm/lib/python3.10/site-packages/jsonschema/tests/test_cli.py new file mode 100644 index 0000000000000000000000000000000000000000..79d2a1584b85a2020034963c93a3c430baab9542 --- /dev/null +++ b/vllm/lib/python3.10/site-packages/jsonschema/tests/test_cli.py @@ -0,0 +1,907 @@ +from contextlib import redirect_stderr, redirect_stdout +from importlib import metadata +from io import StringIO +from json import JSONDecodeError +from pathlib import Path +from textwrap import dedent +from unittest import TestCase +import json +import os +import subprocess +import sys +import tempfile +import warnings + +from jsonschema import Draft4Validator, Draft202012Validator +from jsonschema.exceptions import ( + SchemaError, + ValidationError, + _RefResolutionError, +) +from jsonschema.validators import _LATEST_VERSION, validate + +with warnings.catch_warnings(): + warnings.simplefilter("ignore") + from jsonschema import cli + + +def fake_validator(*errors): + errors = list(reversed(errors)) + + class FakeValidator: + def __init__(self, *args, **kwargs): + pass + + def iter_errors(self, instance): + if errors: + return errors.pop() + return [] # pragma: no cover + + @classmethod + def check_schema(self, schema): + pass + + return FakeValidator + + +def fake_open(all_contents): + def open(path): + contents = all_contents.get(path) + if contents is None: + raise FileNotFoundError(path) + return StringIO(contents) + return open + + +def _message_for(non_json): + try: + json.loads(non_json) + except JSONDecodeError as error: + return str(error) + else: # pragma: no cover + raise RuntimeError("Tried and failed to capture a JSON dump error.") + + +class TestCLI(TestCase): + def run_cli( + self, argv, files=None, stdin=StringIO(), exit_code=0, **override, + ): + arguments = cli.parse_args(argv) + arguments.update(override) + + self.assertFalse(hasattr(cli, "open")) + cli.open = fake_open(files or {}) + try: + stdout, stderr = StringIO(), StringIO() + actual_exit_code = cli.run( + arguments, + stdin=stdin, + stdout=stdout, + stderr=stderr, + ) + finally: + del cli.open + + self.assertEqual( + actual_exit_code, exit_code, msg=dedent( + f""" + Expected an exit code of {exit_code} != {actual_exit_code}. + + stdout: {stdout.getvalue()} + + stderr: {stderr.getvalue()} + """, + ), + ) + return stdout.getvalue(), stderr.getvalue() + + def assertOutputs(self, stdout="", stderr="", **kwargs): + self.assertEqual( + self.run_cli(**kwargs), + (dedent(stdout), dedent(stderr)), + ) + + def test_invalid_instance(self): + error = ValidationError("I am an error!", instance=12) + self.assertOutputs( + files=dict( + some_schema='{"does not": "matter since it is stubbed"}', + some_instance=json.dumps(error.instance), + ), + validator=fake_validator([error]), + + argv=["-i", "some_instance", "some_schema"], + + exit_code=1, + stderr="12: I am an error!\n", + ) + + def test_invalid_instance_pretty_output(self): + error = ValidationError("I am an error!", instance=12) + self.assertOutputs( + files=dict( + some_schema='{"does not": "matter since it is stubbed"}', + some_instance=json.dumps(error.instance), + ), + validator=fake_validator([error]), + + argv=["-i", "some_instance", "--output", "pretty", "some_schema"], + + exit_code=1, + stderr="""\ + ===[ValidationError]===(some_instance)=== + + I am an error! + ----------------------------- + """, + ) + + def test_invalid_instance_explicit_plain_output(self): + error = ValidationError("I am an error!", instance=12) + self.assertOutputs( + files=dict( + some_schema='{"does not": "matter since it is stubbed"}', + some_instance=json.dumps(error.instance), + ), + validator=fake_validator([error]), + + argv=["--output", "plain", "-i", "some_instance", "some_schema"], + + exit_code=1, + stderr="12: I am an error!\n", + ) + + def test_invalid_instance_multiple_errors(self): + instance = 12 + first = ValidationError("First error", instance=instance) + second = ValidationError("Second error", instance=instance) + + self.assertOutputs( + files=dict( + some_schema='{"does not": "matter since it is stubbed"}', + some_instance=json.dumps(instance), + ), + validator=fake_validator([first, second]), + + argv=["-i", "some_instance", "some_schema"], + + exit_code=1, + stderr="""\ + 12: First error + 12: Second error + """, + ) + + def test_invalid_instance_multiple_errors_pretty_output(self): + instance = 12 + first = ValidationError("First error", instance=instance) + second = ValidationError("Second error", instance=instance) + + self.assertOutputs( + files=dict( + some_schema='{"does not": "matter since it is stubbed"}', + some_instance=json.dumps(instance), + ), + validator=fake_validator([first, second]), + + argv=["-i", "some_instance", "--output", "pretty", "some_schema"], + + exit_code=1, + stderr="""\ + ===[ValidationError]===(some_instance)=== + + First error + ----------------------------- + ===[ValidationError]===(some_instance)=== + + Second error + ----------------------------- + """, + ) + + def test_multiple_invalid_instances(self): + first_instance = 12 + first_errors = [ + ValidationError("An error", instance=first_instance), + ValidationError("Another error", instance=first_instance), + ] + second_instance = "foo" + second_errors = [ValidationError("BOOM", instance=second_instance)] + + self.assertOutputs( + files=dict( + some_schema='{"does not": "matter since it is stubbed"}', + some_first_instance=json.dumps(first_instance), + some_second_instance=json.dumps(second_instance), + ), + validator=fake_validator(first_errors, second_errors), + + argv=[ + "-i", "some_first_instance", + "-i", "some_second_instance", + "some_schema", + ], + + exit_code=1, + stderr="""\ + 12: An error + 12: Another error + foo: BOOM + """, + ) + + def test_multiple_invalid_instances_pretty_output(self): + first_instance = 12 + first_errors = [ + ValidationError("An error", instance=first_instance), + ValidationError("Another error", instance=first_instance), + ] + second_instance = "foo" + second_errors = [ValidationError("BOOM", instance=second_instance)] + + self.assertOutputs( + files=dict( + some_schema='{"does not": "matter since it is stubbed"}', + some_first_instance=json.dumps(first_instance), + some_second_instance=json.dumps(second_instance), + ), + validator=fake_validator(first_errors, second_errors), + + argv=[ + "--output", "pretty", + "-i", "some_first_instance", + "-i", "some_second_instance", + "some_schema", + ], + + exit_code=1, + stderr="""\ + ===[ValidationError]===(some_first_instance)=== + + An error + ----------------------------- + ===[ValidationError]===(some_first_instance)=== + + Another error + ----------------------------- + ===[ValidationError]===(some_second_instance)=== + + BOOM + ----------------------------- + """, + ) + + def test_custom_error_format(self): + first_instance = 12 + first_errors = [ + ValidationError("An error", instance=first_instance), + ValidationError("Another error", instance=first_instance), + ] + second_instance = "foo" + second_errors = [ValidationError("BOOM", instance=second_instance)] + + self.assertOutputs( + files=dict( + some_schema='{"does not": "matter since it is stubbed"}', + some_first_instance=json.dumps(first_instance), + some_second_instance=json.dumps(second_instance), + ), + validator=fake_validator(first_errors, second_errors), + + argv=[ + "--error-format", ":{error.message}._-_.{error.instance}:", + "-i", "some_first_instance", + "-i", "some_second_instance", + "some_schema", + ], + + exit_code=1, + stderr=":An error._-_.12::Another error._-_.12::BOOM._-_.foo:", + ) + + def test_invalid_schema(self): + self.assertOutputs( + files=dict(some_schema='{"type": 12}'), + argv=["some_schema"], + + exit_code=1, + stderr="""\ + 12: 12 is not valid under any of the given schemas + """, + ) + + def test_invalid_schema_pretty_output(self): + schema = {"type": 12} + + with self.assertRaises(SchemaError) as e: + validate(schema=schema, instance="") + error = str(e.exception) + + self.assertOutputs( + files=dict(some_schema=json.dumps(schema)), + argv=["--output", "pretty", "some_schema"], + + exit_code=1, + stderr=( + "===[SchemaError]===(some_schema)===\n\n" + + str(error) + + "\n-----------------------------\n" + ), + ) + + def test_invalid_schema_multiple_errors(self): + self.assertOutputs( + files=dict(some_schema='{"type": 12, "items": 57}'), + argv=["some_schema"], + + exit_code=1, + stderr="""\ + 57: 57 is not of type 'object', 'boolean' + """, + ) + + def test_invalid_schema_multiple_errors_pretty_output(self): + schema = {"type": 12, "items": 57} + + with self.assertRaises(SchemaError) as e: + validate(schema=schema, instance="") + error = str(e.exception) + + self.assertOutputs( + files=dict(some_schema=json.dumps(schema)), + argv=["--output", "pretty", "some_schema"], + + exit_code=1, + stderr=( + "===[SchemaError]===(some_schema)===\n\n" + + str(error) + + "\n-----------------------------\n" + ), + ) + + def test_invalid_schema_with_invalid_instance(self): + """ + "Validating" an instance that's invalid under an invalid schema + just shows the schema error. + """ + self.assertOutputs( + files=dict( + some_schema='{"type": 12, "minimum": 30}', + some_instance="13", + ), + argv=["-i", "some_instance", "some_schema"], + + exit_code=1, + stderr="""\ + 12: 12 is not valid under any of the given schemas + """, + ) + + def test_invalid_schema_with_invalid_instance_pretty_output(self): + instance, schema = 13, {"type": 12, "minimum": 30} + + with self.assertRaises(SchemaError) as e: + validate(schema=schema, instance=instance) + error = str(e.exception) + + self.assertOutputs( + files=dict( + some_schema=json.dumps(schema), + some_instance=json.dumps(instance), + ), + argv=["--output", "pretty", "-i", "some_instance", "some_schema"], + + exit_code=1, + stderr=( + "===[SchemaError]===(some_schema)===\n\n" + + str(error) + + "\n-----------------------------\n" + ), + ) + + def test_invalid_instance_continues_with_the_rest(self): + self.assertOutputs( + files=dict( + some_schema='{"minimum": 30}', + first_instance="not valid JSON!", + second_instance="12", + ), + argv=[ + "-i", "first_instance", + "-i", "second_instance", + "some_schema", + ], + + exit_code=1, + stderr="""\ + Failed to parse 'first_instance': {} + 12: 12 is less than the minimum of 30 + """.format(_message_for("not valid JSON!")), + ) + + def test_custom_error_format_applies_to_schema_errors(self): + instance, schema = 13, {"type": 12, "minimum": 30} + + with self.assertRaises(SchemaError): + validate(schema=schema, instance=instance) + + self.assertOutputs( + files=dict(some_schema=json.dumps(schema)), + + argv=[ + "--error-format", ":{error.message}._-_.{error.instance}:", + "some_schema", + ], + + exit_code=1, + stderr=":12 is not valid under any of the given schemas._-_.12:", + ) + + def test_instance_is_invalid_JSON(self): + instance = "not valid JSON!" + + self.assertOutputs( + files=dict(some_schema="{}", some_instance=instance), + argv=["-i", "some_instance", "some_schema"], + + exit_code=1, + stderr=f"""\ + Failed to parse 'some_instance': {_message_for(instance)} + """, + ) + + def test_instance_is_invalid_JSON_pretty_output(self): + stdout, stderr = self.run_cli( + files=dict( + some_schema="{}", + some_instance="not valid JSON!", + ), + + argv=["--output", "pretty", "-i", "some_instance", "some_schema"], + + exit_code=1, + ) + self.assertFalse(stdout) + self.assertIn( + "(some_instance)===\n\nTraceback (most recent call last):\n", + stderr, + ) + self.assertNotIn("some_schema", stderr) + + def test_instance_is_invalid_JSON_on_stdin(self): + instance = "not valid JSON!" + + self.assertOutputs( + files=dict(some_schema="{}"), + stdin=StringIO(instance), + + argv=["some_schema"], + + exit_code=1, + stderr=f"""\ + Failed to parse : {_message_for(instance)} + """, + ) + + def test_instance_is_invalid_JSON_on_stdin_pretty_output(self): + stdout, stderr = self.run_cli( + files=dict(some_schema="{}"), + stdin=StringIO("not valid JSON!"), + + argv=["--output", "pretty", "some_schema"], + + exit_code=1, + ) + self.assertFalse(stdout) + self.assertIn( + "()===\n\nTraceback (most recent call last):\n", + stderr, + ) + self.assertNotIn("some_schema", stderr) + + def test_schema_is_invalid_JSON(self): + schema = "not valid JSON!" + + self.assertOutputs( + files=dict(some_schema=schema), + + argv=["some_schema"], + + exit_code=1, + stderr=f"""\ + Failed to parse 'some_schema': {_message_for(schema)} + """, + ) + + def test_schema_is_invalid_JSON_pretty_output(self): + stdout, stderr = self.run_cli( + files=dict(some_schema="not valid JSON!"), + + argv=["--output", "pretty", "some_schema"], + + exit_code=1, + ) + self.assertFalse(stdout) + self.assertIn( + "(some_schema)===\n\nTraceback (most recent call last):\n", + stderr, + ) + + def test_schema_and_instance_are_both_invalid_JSON(self): + """ + Only the schema error is reported, as we abort immediately. + """ + schema, instance = "not valid JSON!", "also not valid JSON!" + self.assertOutputs( + files=dict(some_schema=schema, some_instance=instance), + + argv=["some_schema"], + + exit_code=1, + stderr=f"""\ + Failed to parse 'some_schema': {_message_for(schema)} + """, + ) + + def test_schema_and_instance_are_both_invalid_JSON_pretty_output(self): + """ + Only the schema error is reported, as we abort immediately. + """ + stdout, stderr = self.run_cli( + files=dict( + some_schema="not valid JSON!", + some_instance="also not valid JSON!", + ), + + argv=["--output", "pretty", "-i", "some_instance", "some_schema"], + + exit_code=1, + ) + self.assertFalse(stdout) + self.assertIn( + "(some_schema)===\n\nTraceback (most recent call last):\n", + stderr, + ) + self.assertNotIn("some_instance", stderr) + + def test_instance_does_not_exist(self): + self.assertOutputs( + files=dict(some_schema="{}"), + argv=["-i", "nonexisting_instance", "some_schema"], + + exit_code=1, + stderr="""\ + 'nonexisting_instance' does not exist. + """, + ) + + def test_instance_does_not_exist_pretty_output(self): + self.assertOutputs( + files=dict(some_schema="{}"), + argv=[ + "--output", "pretty", + "-i", "nonexisting_instance", + "some_schema", + ], + + exit_code=1, + stderr="""\ + ===[FileNotFoundError]===(nonexisting_instance)=== + + 'nonexisting_instance' does not exist. + ----------------------------- + """, + ) + + def test_schema_does_not_exist(self): + self.assertOutputs( + argv=["nonexisting_schema"], + + exit_code=1, + stderr="'nonexisting_schema' does not exist.\n", + ) + + def test_schema_does_not_exist_pretty_output(self): + self.assertOutputs( + argv=["--output", "pretty", "nonexisting_schema"], + + exit_code=1, + stderr="""\ + ===[FileNotFoundError]===(nonexisting_schema)=== + + 'nonexisting_schema' does not exist. + ----------------------------- + """, + ) + + def test_neither_instance_nor_schema_exist(self): + self.assertOutputs( + argv=["-i", "nonexisting_instance", "nonexisting_schema"], + + exit_code=1, + stderr="'nonexisting_schema' does not exist.\n", + ) + + def test_neither_instance_nor_schema_exist_pretty_output(self): + self.assertOutputs( + argv=[ + "--output", "pretty", + "-i", "nonexisting_instance", + "nonexisting_schema", + ], + + exit_code=1, + stderr="""\ + ===[FileNotFoundError]===(nonexisting_schema)=== + + 'nonexisting_schema' does not exist. + ----------------------------- + """, + ) + + def test_successful_validation(self): + self.assertOutputs( + files=dict(some_schema="{}", some_instance="{}"), + argv=["-i", "some_instance", "some_schema"], + stdout="", + stderr="", + ) + + def test_successful_validation_pretty_output(self): + self.assertOutputs( + files=dict(some_schema="{}", some_instance="{}"), + argv=["--output", "pretty", "-i", "some_instance", "some_schema"], + stdout="===[SUCCESS]===(some_instance)===\n", + stderr="", + ) + + def test_successful_validation_of_stdin(self): + self.assertOutputs( + files=dict(some_schema="{}"), + stdin=StringIO("{}"), + argv=["some_schema"], + stdout="", + stderr="", + ) + + def test_successful_validation_of_stdin_pretty_output(self): + self.assertOutputs( + files=dict(some_schema="{}"), + stdin=StringIO("{}"), + argv=["--output", "pretty", "some_schema"], + stdout="===[SUCCESS]===()===\n", + stderr="", + ) + + def test_successful_validation_of_just_the_schema(self): + self.assertOutputs( + files=dict(some_schema="{}", some_instance="{}"), + argv=["-i", "some_instance", "some_schema"], + stdout="", + stderr="", + ) + + def test_successful_validation_of_just_the_schema_pretty_output(self): + self.assertOutputs( + files=dict(some_schema="{}", some_instance="{}"), + argv=["--output", "pretty", "-i", "some_instance", "some_schema"], + stdout="===[SUCCESS]===(some_instance)===\n", + stderr="", + ) + + def test_successful_validation_via_explicit_base_uri(self): + ref_schema_file = tempfile.NamedTemporaryFile(delete=False) + ref_schema_file.close() + self.addCleanup(os.remove, ref_schema_file.name) + + ref_path = Path(ref_schema_file.name) + ref_path.write_text('{"definitions": {"num": {"type": "integer"}}}') + + schema = f'{{"$ref": "{ref_path.name}#/definitions/num"}}' + + self.assertOutputs( + files=dict(some_schema=schema, some_instance="1"), + argv=[ + "-i", "some_instance", + "--base-uri", ref_path.parent.as_uri() + "/", + "some_schema", + ], + stdout="", + stderr="", + ) + + def test_unsuccessful_validation_via_explicit_base_uri(self): + ref_schema_file = tempfile.NamedTemporaryFile(delete=False) + ref_schema_file.close() + self.addCleanup(os.remove, ref_schema_file.name) + + ref_path = Path(ref_schema_file.name) + ref_path.write_text('{"definitions": {"num": {"type": "integer"}}}') + + schema = f'{{"$ref": "{ref_path.name}#/definitions/num"}}' + + self.assertOutputs( + files=dict(some_schema=schema, some_instance='"1"'), + argv=[ + "-i", "some_instance", + "--base-uri", ref_path.parent.as_uri() + "/", + "some_schema", + ], + exit_code=1, + stdout="", + stderr="1: '1' is not of type 'integer'\n", + ) + + def test_nonexistent_file_with_explicit_base_uri(self): + schema = '{"$ref": "someNonexistentFile.json#definitions/num"}' + instance = "1" + + with self.assertRaises(_RefResolutionError) as e: + self.assertOutputs( + files=dict( + some_schema=schema, + some_instance=instance, + ), + argv=[ + "-i", "some_instance", + "--base-uri", Path.cwd().as_uri(), + "some_schema", + ], + ) + error = str(e.exception) + self.assertIn(f"{os.sep}someNonexistentFile.json'", error) + + def test_invalid_explicit_base_uri(self): + schema = '{"$ref": "foo.json#definitions/num"}' + instance = "1" + + with self.assertRaises(_RefResolutionError) as e: + self.assertOutputs( + files=dict( + some_schema=schema, + some_instance=instance, + ), + argv=[ + "-i", "some_instance", + "--base-uri", "not@UR1", + "some_schema", + ], + ) + error = str(e.exception) + self.assertEqual( + error, "unknown url type: 'foo.json'", + ) + + def test_it_validates_using_the_latest_validator_when_unspecified(self): + # There isn't a better way now I can think of to ensure that the + # latest version was used, given that the call to validator_for + # is hidden inside the CLI, so guard that that's the case, and + # this test will have to be updated when versions change until + # we can think of a better way to ensure this behavior. + self.assertIs(Draft202012Validator, _LATEST_VERSION) + + self.assertOutputs( + files=dict(some_schema='{"const": "check"}', some_instance='"a"'), + argv=["-i", "some_instance", "some_schema"], + exit_code=1, + stdout="", + stderr="a: 'check' was expected\n", + ) + + def test_it_validates_using_draft7_when_specified(self): + """ + Specifically, `const` validation applies for Draft 7. + """ + schema = """ + { + "$schema": "http://json-schema.org/draft-07/schema#", + "const": "check" + } + """ + instance = '"foo"' + self.assertOutputs( + files=dict(some_schema=schema, some_instance=instance), + argv=["-i", "some_instance", "some_schema"], + exit_code=1, + stdout="", + stderr="foo: 'check' was expected\n", + ) + + def test_it_validates_using_draft4_when_specified(self): + """ + Specifically, `const` validation *does not* apply for Draft 4. + """ + schema = """ + { + "$schema": "http://json-schema.org/draft-04/schema#", + "const": "check" + } + """ + instance = '"foo"' + self.assertOutputs( + files=dict(some_schema=schema, some_instance=instance), + argv=["-i", "some_instance", "some_schema"], + stdout="", + stderr="", + ) + + +class TestParser(TestCase): + + FakeValidator = fake_validator() + + def test_find_validator_by_fully_qualified_object_name(self): + arguments = cli.parse_args( + [ + "--validator", + "jsonschema.tests.test_cli.TestParser.FakeValidator", + "--instance", "mem://some/instance", + "mem://some/schema", + ], + ) + self.assertIs(arguments["validator"], self.FakeValidator) + + def test_find_validator_in_jsonschema(self): + arguments = cli.parse_args( + [ + "--validator", "Draft4Validator", + "--instance", "mem://some/instance", + "mem://some/schema", + ], + ) + self.assertIs(arguments["validator"], Draft4Validator) + + def cli_output_for(self, *argv): + stdout, stderr = StringIO(), StringIO() + with redirect_stdout(stdout), redirect_stderr(stderr): # noqa: SIM117 + with self.assertRaises(SystemExit): + cli.parse_args(argv) + return stdout.getvalue(), stderr.getvalue() + + def test_unknown_output(self): + stdout, stderr = self.cli_output_for( + "--output", "foo", + "mem://some/schema", + ) + self.assertIn("invalid choice: 'foo'", stderr) + self.assertFalse(stdout) + + def test_useless_error_format(self): + stdout, stderr = self.cli_output_for( + "--output", "pretty", + "--error-format", "foo", + "mem://some/schema", + ) + self.assertIn( + "--error-format can only be used with --output plain", + stderr, + ) + self.assertFalse(stdout) + + +class TestCLIIntegration(TestCase): + def test_license(self): + output = subprocess.check_output( + [sys.executable, "-m", "pip", "show", "jsonschema"], + stderr=subprocess.STDOUT, + ) + self.assertIn(b"License: MIT", output) + + def test_version(self): + version = subprocess.check_output( + [sys.executable, "-W", "ignore", "-m", "jsonschema", "--version"], + stderr=subprocess.STDOUT, + ) + version = version.decode("utf-8").strip() + self.assertEqual(version, metadata.version("jsonschema")) + + def test_no_arguments_shows_usage_notes(self): + output = subprocess.check_output( + [sys.executable, "-m", "jsonschema"], + stderr=subprocess.STDOUT, + ) + output_for_help = subprocess.check_output( + [sys.executable, "-m", "jsonschema", "--help"], + stderr=subprocess.STDOUT, + ) + self.assertEqual(output, output_for_help) diff --git a/vllm/lib/python3.10/site-packages/jsonschema/tests/test_deprecations.py b/vllm/lib/python3.10/site-packages/jsonschema/tests/test_deprecations.py new file mode 100644 index 0000000000000000000000000000000000000000..aea922d238944f29b4be67ffbef7ed0b74d250db --- /dev/null +++ b/vllm/lib/python3.10/site-packages/jsonschema/tests/test_deprecations.py @@ -0,0 +1,432 @@ +from contextlib import contextmanager +from io import BytesIO +from unittest import TestCase, mock +import importlib.metadata +import json +import subprocess +import sys +import urllib.request + +import referencing.exceptions + +from jsonschema import FormatChecker, exceptions, protocols, validators + + +class TestDeprecations(TestCase): + def test_version(self): + """ + As of v4.0.0, __version__ is deprecated in favor of importlib.metadata. + """ + + message = "Accessing jsonschema.__version__ is deprecated" + with self.assertWarnsRegex(DeprecationWarning, message) as w: + from jsonschema import __version__ + + self.assertEqual(__version__, importlib.metadata.version("jsonschema")) + self.assertEqual(w.filename, __file__) + + def test_validators_ErrorTree(self): + """ + As of v4.0.0, importing ErrorTree from jsonschema.validators is + deprecated in favor of doing so from jsonschema.exceptions. + """ + + message = "Importing ErrorTree from jsonschema.validators is " + with self.assertWarnsRegex(DeprecationWarning, message) as w: + from jsonschema.validators import ErrorTree + + self.assertEqual(ErrorTree, exceptions.ErrorTree) + self.assertEqual(w.filename, __file__) + + def test_import_ErrorTree(self): + """ + As of v4.18.0, importing ErrorTree from the package root is + deprecated in favor of doing so from jsonschema.exceptions. + """ + + message = "Importing ErrorTree directly from the jsonschema package " + with self.assertWarnsRegex(DeprecationWarning, message) as w: + from jsonschema import ErrorTree + + self.assertEqual(ErrorTree, exceptions.ErrorTree) + self.assertEqual(w.filename, __file__) + + def test_ErrorTree_setitem(self): + """ + As of v4.20.0, setting items on an ErrorTree is deprecated. + """ + + e = exceptions.ValidationError("some error", path=["foo"]) + tree = exceptions.ErrorTree() + subtree = exceptions.ErrorTree(errors=[e]) + + message = "ErrorTree.__setitem__ is " + with self.assertWarnsRegex(DeprecationWarning, message) as w: + tree["foo"] = subtree + + self.assertEqual(tree["foo"], subtree) + self.assertEqual(w.filename, __file__) + + def test_import_FormatError(self): + """ + As of v4.18.0, importing FormatError from the package root is + deprecated in favor of doing so from jsonschema.exceptions. + """ + + message = "Importing FormatError directly from the jsonschema package " + with self.assertWarnsRegex(DeprecationWarning, message) as w: + from jsonschema import FormatError + + self.assertEqual(FormatError, exceptions.FormatError) + self.assertEqual(w.filename, __file__) + + def test_import_Validator(self): + """ + As of v4.19.0, importing Validator from the package root is + deprecated in favor of doing so from jsonschema.protocols. + """ + + message = "Importing Validator directly from the jsonschema package " + with self.assertWarnsRegex(DeprecationWarning, message) as w: + from jsonschema import Validator + + self.assertEqual(Validator, protocols.Validator) + self.assertEqual(w.filename, __file__) + + def test_validators_validators(self): + """ + As of v4.0.0, accessing jsonschema.validators.validators is + deprecated. + """ + + message = "Accessing jsonschema.validators.validators is deprecated" + with self.assertWarnsRegex(DeprecationWarning, message) as w: + value = validators.validators + + self.assertEqual(value, validators._VALIDATORS) + self.assertEqual(w.filename, __file__) + + def test_validators_meta_schemas(self): + """ + As of v4.0.0, accessing jsonschema.validators.meta_schemas is + deprecated. + """ + + message = "Accessing jsonschema.validators.meta_schemas is deprecated" + with self.assertWarnsRegex(DeprecationWarning, message) as w: + value = validators.meta_schemas + + self.assertEqual(value, validators._META_SCHEMAS) + self.assertEqual(w.filename, __file__) + + def test_RefResolver_in_scope(self): + """ + As of v4.0.0, RefResolver.in_scope is deprecated. + """ + + resolver = validators._RefResolver.from_schema({}) + message = "jsonschema.RefResolver.in_scope is deprecated " + with self.assertWarnsRegex(DeprecationWarning, message) as w: # noqa: SIM117 + with resolver.in_scope("foo"): + pass + + self.assertEqual(w.filename, __file__) + + def test_Validator_is_valid_two_arguments(self): + """ + As of v4.0.0, calling is_valid with two arguments (to provide a + different schema) is deprecated. + """ + + validator = validators.Draft7Validator({}) + message = "Passing a schema to Validator.is_valid is deprecated " + with self.assertWarnsRegex(DeprecationWarning, message) as w: + result = validator.is_valid("foo", {"type": "number"}) + + self.assertFalse(result) + self.assertEqual(w.filename, __file__) + + def test_Validator_iter_errors_two_arguments(self): + """ + As of v4.0.0, calling iter_errors with two arguments (to provide a + different schema) is deprecated. + """ + + validator = validators.Draft7Validator({}) + message = "Passing a schema to Validator.iter_errors is deprecated " + with self.assertWarnsRegex(DeprecationWarning, message) as w: + error, = validator.iter_errors("foo", {"type": "number"}) + + self.assertEqual(error.validator, "type") + self.assertEqual(w.filename, __file__) + + def test_Validator_resolver(self): + """ + As of v4.18.0, accessing Validator.resolver is deprecated. + """ + + validator = validators.Draft7Validator({}) + message = "Accessing Draft7Validator.resolver is " + with self.assertWarnsRegex(DeprecationWarning, message) as w: + self.assertIsInstance(validator.resolver, validators._RefResolver) + + self.assertEqual(w.filename, __file__) + + def test_RefResolver(self): + """ + As of v4.18.0, RefResolver is fully deprecated. + """ + + message = "jsonschema.RefResolver is deprecated" + with self.assertWarnsRegex(DeprecationWarning, message) as w: + from jsonschema import RefResolver + self.assertEqual(w.filename, __file__) + + with self.assertWarnsRegex(DeprecationWarning, message) as w: + from jsonschema.validators import RefResolver # noqa: F401, F811 + self.assertEqual(w.filename, __file__) + + def test_RefResolutionError(self): + """ + As of v4.18.0, RefResolutionError is deprecated in favor of directly + catching errors from the referencing library. + """ + + message = "jsonschema.exceptions.RefResolutionError is deprecated" + with self.assertWarnsRegex(DeprecationWarning, message) as w: + from jsonschema import RefResolutionError + + self.assertEqual(RefResolutionError, exceptions._RefResolutionError) + self.assertEqual(w.filename, __file__) + + with self.assertWarnsRegex(DeprecationWarning, message) as w: + from jsonschema.exceptions import RefResolutionError + + self.assertEqual(RefResolutionError, exceptions._RefResolutionError) + self.assertEqual(w.filename, __file__) + + def test_catching_Unresolvable_directly(self): + """ + This behavior is the intended behavior (i.e. it's not deprecated), but + given we do "tricksy" things in the iterim to wrap exceptions in a + multiple inheritance subclass, we need to be extra sure it works and + stays working. + """ + validator = validators.Draft202012Validator({"$ref": "urn:nothing"}) + + with self.assertRaises(referencing.exceptions.Unresolvable) as e: + validator.validate(12) + + expected = referencing.exceptions.Unresolvable(ref="urn:nothing") + self.assertEqual( + (e.exception, str(e.exception)), + (expected, "Unresolvable: urn:nothing"), + ) + + def test_catching_Unresolvable_via_RefResolutionError(self): + """ + Until RefResolutionError is removed, it is still possible to catch + exceptions from reference resolution using it, even though they may + have been raised by referencing. + """ + with self.assertWarns(DeprecationWarning): + from jsonschema import RefResolutionError + + validator = validators.Draft202012Validator({"$ref": "urn:nothing"}) + + with self.assertRaises(referencing.exceptions.Unresolvable) as u: + validator.validate(12) + + with self.assertRaises(RefResolutionError) as e: + validator.validate(12) + + self.assertEqual( + (e.exception, str(e.exception)), + (u.exception, "Unresolvable: urn:nothing"), + ) + + def test_WrappedReferencingError_hashability(self): + """ + Ensure the wrapped referencing errors are hashable when possible. + """ + with self.assertWarns(DeprecationWarning): + from jsonschema import RefResolutionError + + validator = validators.Draft202012Validator({"$ref": "urn:nothing"}) + + with self.assertRaises(referencing.exceptions.Unresolvable) as u: + validator.validate(12) + + with self.assertRaises(RefResolutionError) as e: + validator.validate(12) + + self.assertIn(e.exception, {u.exception}) + self.assertIn(u.exception, {e.exception}) + + def test_Validator_subclassing(self): + """ + As of v4.12.0, subclassing a validator class produces an explicit + deprecation warning. + + This was never intended to be public API (and some comments over the + years in issues said so, but obviously that's not a great way to make + sure it's followed). + + A future version will explicitly raise an error. + """ + + message = "Subclassing validator classes is " + with self.assertWarnsRegex(DeprecationWarning, message) as w: + class Subclass(validators.Draft202012Validator): + pass + + self.assertEqual(w.filename, __file__) + + with self.assertWarnsRegex(DeprecationWarning, message) as w: + class AnotherSubclass(validators.create(meta_schema={})): + pass + + def test_FormatChecker_cls_checks(self): + """ + As of v4.14.0, FormatChecker.cls_checks is deprecated without + replacement. + """ + + self.addCleanup(FormatChecker.checkers.pop, "boom", None) + + message = "FormatChecker.cls_checks " + with self.assertWarnsRegex(DeprecationWarning, message) as w: + FormatChecker.cls_checks("boom") + + self.assertEqual(w.filename, __file__) + + def test_draftN_format_checker(self): + """ + As of v4.16.0, accessing jsonschema.draftn_format_checker is deprecated + in favor of Validator.FORMAT_CHECKER. + """ + + message = "Accessing jsonschema.draft202012_format_checker is " + with self.assertWarnsRegex(DeprecationWarning, message) as w: + from jsonschema import draft202012_format_checker + + self.assertIs( + draft202012_format_checker, + validators.Draft202012Validator.FORMAT_CHECKER, + ) + self.assertEqual(w.filename, __file__) + + message = "Accessing jsonschema.draft201909_format_checker is " + with self.assertWarnsRegex(DeprecationWarning, message) as w: + from jsonschema import draft201909_format_checker + + self.assertIs( + draft201909_format_checker, + validators.Draft201909Validator.FORMAT_CHECKER, + ) + self.assertEqual(w.filename, __file__) + + message = "Accessing jsonschema.draft7_format_checker is " + with self.assertWarnsRegex(DeprecationWarning, message) as w: + from jsonschema import draft7_format_checker + + self.assertIs( + draft7_format_checker, + validators.Draft7Validator.FORMAT_CHECKER, + ) + self.assertEqual(w.filename, __file__) + + message = "Accessing jsonschema.draft6_format_checker is " + with self.assertWarnsRegex(DeprecationWarning, message) as w: + from jsonschema import draft6_format_checker + + self.assertIs( + draft6_format_checker, + validators.Draft6Validator.FORMAT_CHECKER, + ) + self.assertEqual(w.filename, __file__) + + message = "Accessing jsonschema.draft4_format_checker is " + with self.assertWarnsRegex(DeprecationWarning, message) as w: + from jsonschema import draft4_format_checker + + self.assertIs( + draft4_format_checker, + validators.Draft4Validator.FORMAT_CHECKER, + ) + self.assertEqual(w.filename, __file__) + + message = "Accessing jsonschema.draft3_format_checker is " + with self.assertWarnsRegex(DeprecationWarning, message) as w: + from jsonschema import draft3_format_checker + + self.assertIs( + draft3_format_checker, + validators.Draft3Validator.FORMAT_CHECKER, + ) + self.assertEqual(w.filename, __file__) + + with self.assertRaises(ImportError): + from jsonschema import draft1234_format_checker # noqa: F401 + + def test_import_cli(self): + """ + As of v4.17.0, importing jsonschema.cli is deprecated. + """ + + message = "The jsonschema CLI is deprecated and will be removed " + with self.assertWarnsRegex(DeprecationWarning, message) as w: + import jsonschema.cli + importlib.reload(jsonschema.cli) + + self.assertEqual(w.filename, importlib.__file__) + + def test_cli(self): + """ + As of v4.17.0, the jsonschema CLI is deprecated. + """ + + process = subprocess.run( + [sys.executable, "-m", "jsonschema"], + capture_output=True, + check=True, + ) + self.assertIn(b"The jsonschema CLI is deprecated ", process.stderr) + + def test_automatic_remote_retrieval(self): + """ + Automatic retrieval of remote references is deprecated as of v4.18.0. + """ + ref = "http://bar#/$defs/baz" + schema = {"$defs": {"baz": {"type": "integer"}}} + + if "requests" in sys.modules: # pragma: no cover + self.addCleanup( + sys.modules.__setitem__, "requests", sys.modules["requests"], + ) + sys.modules["requests"] = None + + @contextmanager + def fake_urlopen(request): + self.assertIsInstance(request, urllib.request.Request) + self.assertEqual(request.full_url, "http://bar") + + # Ha ha urllib.request.Request "normalizes" header names and + # Request.get_header does not also normalize them... + (header, value), = request.header_items() + self.assertEqual(header.lower(), "user-agent") + self.assertEqual( + value, "python-jsonschema (deprecated $ref resolution)", + ) + yield BytesIO(json.dumps(schema).encode("utf8")) + + validator = validators.Draft202012Validator({"$ref": ref}) + + message = "Automatically retrieving remote references " + patch = mock.patch.object(urllib.request, "urlopen", new=fake_urlopen) + + with patch, self.assertWarnsRegex(DeprecationWarning, message): + self.assertEqual( + (validator.is_valid({}), validator.is_valid(37)), + (False, True), + ) diff --git a/vllm/lib/python3.10/site-packages/jsonschema/tests/test_exceptions.py b/vllm/lib/python3.10/site-packages/jsonschema/tests/test_exceptions.py new file mode 100644 index 0000000000000000000000000000000000000000..69114e182afbd2c221281d14824966034acd2a7f --- /dev/null +++ b/vllm/lib/python3.10/site-packages/jsonschema/tests/test_exceptions.py @@ -0,0 +1,702 @@ +from unittest import TestCase +import textwrap + +from jsonschema import exceptions +from jsonschema.validators import _LATEST_VERSION + + +class TestBestMatch(TestCase): + def best_match_of(self, instance, schema): + errors = list(_LATEST_VERSION(schema).iter_errors(instance)) + msg = f"No errors found for {instance} under {schema!r}!" + self.assertTrue(errors, msg=msg) + + best = exceptions.best_match(iter(errors)) + reversed_best = exceptions.best_match(reversed(errors)) + + self.assertEqual( + best._contents(), + reversed_best._contents(), + f"No consistent best match!\nGot: {best}\n\nThen: {reversed_best}", + ) + return best + + def test_shallower_errors_are_better_matches(self): + schema = { + "properties": { + "foo": { + "minProperties": 2, + "properties": {"bar": {"type": "object"}}, + }, + }, + } + best = self.best_match_of(instance={"foo": {"bar": []}}, schema=schema) + self.assertEqual(best.validator, "minProperties") + + def test_oneOf_and_anyOf_are_weak_matches(self): + """ + A property you *must* match is probably better than one you have to + match a part of. + """ + + schema = { + "minProperties": 2, + "anyOf": [{"type": "string"}, {"type": "number"}], + "oneOf": [{"type": "string"}, {"type": "number"}], + } + best = self.best_match_of(instance={}, schema=schema) + self.assertEqual(best.validator, "minProperties") + + def test_if_the_most_relevant_error_is_anyOf_it_is_traversed(self): + """ + If the most relevant error is an anyOf, then we traverse its context + and select the otherwise *least* relevant error, since in this case + that means the most specific, deep, error inside the instance. + + I.e. since only one of the schemas must match, we look for the most + relevant one. + """ + + schema = { + "properties": { + "foo": { + "anyOf": [ + {"type": "string"}, + {"properties": {"bar": {"type": "array"}}}, + ], + }, + }, + } + best = self.best_match_of(instance={"foo": {"bar": 12}}, schema=schema) + self.assertEqual(best.validator_value, "array") + + def test_no_anyOf_traversal_for_equally_relevant_errors(self): + """ + We don't traverse into an anyOf (as above) if all of its context errors + seem to be equally "wrong" against the instance. + """ + + schema = { + "anyOf": [ + {"type": "string"}, + {"type": "integer"}, + {"type": "object"}, + ], + } + best = self.best_match_of(instance=[], schema=schema) + self.assertEqual(best.validator, "anyOf") + + def test_anyOf_traversal_for_single_equally_relevant_error(self): + """ + We *do* traverse anyOf with a single nested error, even though it is + vacuously equally relevant to itself. + """ + + schema = { + "anyOf": [ + {"type": "string"}, + ], + } + best = self.best_match_of(instance=[], schema=schema) + self.assertEqual(best.validator, "type") + + def test_anyOf_traversal_for_single_sibling_errors(self): + """ + We *do* traverse anyOf with a single subschema that fails multiple + times (e.g. on multiple items). + """ + + schema = { + "anyOf": [ + {"items": {"const": 37}}, + ], + } + best = self.best_match_of(instance=[12, 12], schema=schema) + self.assertEqual(best.validator, "const") + + def test_anyOf_traversal_for_non_type_matching_sibling_errors(self): + """ + We *do* traverse anyOf with multiple subschemas when one does not type + match. + """ + + schema = { + "anyOf": [ + {"type": "object"}, + {"items": {"const": 37}}, + ], + } + best = self.best_match_of(instance=[12, 12], schema=schema) + self.assertEqual(best.validator, "const") + + def test_if_the_most_relevant_error_is_oneOf_it_is_traversed(self): + """ + If the most relevant error is an oneOf, then we traverse its context + and select the otherwise *least* relevant error, since in this case + that means the most specific, deep, error inside the instance. + + I.e. since only one of the schemas must match, we look for the most + relevant one. + """ + + schema = { + "properties": { + "foo": { + "oneOf": [ + {"type": "string"}, + {"properties": {"bar": {"type": "array"}}}, + ], + }, + }, + } + best = self.best_match_of(instance={"foo": {"bar": 12}}, schema=schema) + self.assertEqual(best.validator_value, "array") + + def test_no_oneOf_traversal_for_equally_relevant_errors(self): + """ + We don't traverse into an oneOf (as above) if all of its context errors + seem to be equally "wrong" against the instance. + """ + + schema = { + "oneOf": [ + {"type": "string"}, + {"type": "integer"}, + {"type": "object"}, + ], + } + best = self.best_match_of(instance=[], schema=schema) + self.assertEqual(best.validator, "oneOf") + + def test_oneOf_traversal_for_single_equally_relevant_error(self): + """ + We *do* traverse oneOf with a single nested error, even though it is + vacuously equally relevant to itself. + """ + + schema = { + "oneOf": [ + {"type": "string"}, + ], + } + best = self.best_match_of(instance=[], schema=schema) + self.assertEqual(best.validator, "type") + + def test_oneOf_traversal_for_single_sibling_errors(self): + """ + We *do* traverse oneOf with a single subschema that fails multiple + times (e.g. on multiple items). + """ + + schema = { + "oneOf": [ + {"items": {"const": 37}}, + ], + } + best = self.best_match_of(instance=[12, 12], schema=schema) + self.assertEqual(best.validator, "const") + + def test_oneOf_traversal_for_non_type_matching_sibling_errors(self): + """ + We *do* traverse oneOf with multiple subschemas when one does not type + match. + """ + + schema = { + "oneOf": [ + {"type": "object"}, + {"items": {"const": 37}}, + ], + } + best = self.best_match_of(instance=[12, 12], schema=schema) + self.assertEqual(best.validator, "const") + + def test_if_the_most_relevant_error_is_allOf_it_is_traversed(self): + """ + Now, if the error is allOf, we traverse but select the *most* relevant + error from the context, because all schemas here must match anyways. + """ + + schema = { + "properties": { + "foo": { + "allOf": [ + {"type": "string"}, + {"properties": {"bar": {"type": "array"}}}, + ], + }, + }, + } + best = self.best_match_of(instance={"foo": {"bar": 12}}, schema=schema) + self.assertEqual(best.validator_value, "string") + + def test_nested_context_for_oneOf(self): + """ + We traverse into nested contexts (a oneOf containing an error in a + nested oneOf here). + """ + + schema = { + "properties": { + "foo": { + "oneOf": [ + {"type": "string"}, + { + "oneOf": [ + {"type": "string"}, + { + "properties": { + "bar": {"type": "array"}, + }, + }, + ], + }, + ], + }, + }, + } + best = self.best_match_of(instance={"foo": {"bar": 12}}, schema=schema) + self.assertEqual(best.validator_value, "array") + + def test_it_prioritizes_matching_types(self): + schema = { + "properties": { + "foo": { + "anyOf": [ + {"type": "array", "minItems": 2}, + {"type": "string", "minLength": 10}, + ], + }, + }, + } + best = self.best_match_of(instance={"foo": "bar"}, schema=schema) + self.assertEqual(best.validator, "minLength") + + reordered = { + "properties": { + "foo": { + "anyOf": [ + {"type": "string", "minLength": 10}, + {"type": "array", "minItems": 2}, + ], + }, + }, + } + best = self.best_match_of(instance={"foo": "bar"}, schema=reordered) + self.assertEqual(best.validator, "minLength") + + def test_it_prioritizes_matching_union_types(self): + schema = { + "properties": { + "foo": { + "anyOf": [ + {"type": ["array", "object"], "minItems": 2}, + {"type": ["integer", "string"], "minLength": 10}, + ], + }, + }, + } + best = self.best_match_of(instance={"foo": "bar"}, schema=schema) + self.assertEqual(best.validator, "minLength") + + reordered = { + "properties": { + "foo": { + "anyOf": [ + {"type": "string", "minLength": 10}, + {"type": "array", "minItems": 2}, + ], + }, + }, + } + best = self.best_match_of(instance={"foo": "bar"}, schema=reordered) + self.assertEqual(best.validator, "minLength") + + def test_boolean_schemas(self): + schema = {"properties": {"foo": False}} + best = self.best_match_of(instance={"foo": "bar"}, schema=schema) + self.assertIsNone(best.validator) + + def test_one_error(self): + validator = _LATEST_VERSION({"minProperties": 2}) + error, = validator.iter_errors({}) + self.assertEqual( + exceptions.best_match(validator.iter_errors({})).validator, + "minProperties", + ) + + def test_no_errors(self): + validator = _LATEST_VERSION({}) + self.assertIsNone(exceptions.best_match(validator.iter_errors({}))) + + +class TestByRelevance(TestCase): + def test_short_paths_are_better_matches(self): + shallow = exceptions.ValidationError("Oh no!", path=["baz"]) + deep = exceptions.ValidationError("Oh yes!", path=["foo", "bar"]) + match = max([shallow, deep], key=exceptions.relevance) + self.assertIs(match, shallow) + + match = max([deep, shallow], key=exceptions.relevance) + self.assertIs(match, shallow) + + def test_global_errors_are_even_better_matches(self): + shallow = exceptions.ValidationError("Oh no!", path=[]) + deep = exceptions.ValidationError("Oh yes!", path=["foo"]) + + errors = sorted([shallow, deep], key=exceptions.relevance) + self.assertEqual( + [list(error.path) for error in errors], + [["foo"], []], + ) + + errors = sorted([deep, shallow], key=exceptions.relevance) + self.assertEqual( + [list(error.path) for error in errors], + [["foo"], []], + ) + + def test_weak_keywords_are_lower_priority(self): + weak = exceptions.ValidationError("Oh no!", path=[], validator="a") + normal = exceptions.ValidationError("Oh yes!", path=[], validator="b") + + best_match = exceptions.by_relevance(weak="a") + + match = max([weak, normal], key=best_match) + self.assertIs(match, normal) + + match = max([normal, weak], key=best_match) + self.assertIs(match, normal) + + def test_strong_keywords_are_higher_priority(self): + weak = exceptions.ValidationError("Oh no!", path=[], validator="a") + normal = exceptions.ValidationError("Oh yes!", path=[], validator="b") + strong = exceptions.ValidationError("Oh fine!", path=[], validator="c") + + best_match = exceptions.by_relevance(weak="a", strong="c") + + match = max([weak, normal, strong], key=best_match) + self.assertIs(match, strong) + + match = max([strong, normal, weak], key=best_match) + self.assertIs(match, strong) + + +class TestErrorTree(TestCase): + def test_it_knows_how_many_total_errors_it_contains(self): + # FIXME: #442 + errors = [ + exceptions.ValidationError("Something", validator=i) + for i in range(8) + ] + tree = exceptions.ErrorTree(errors) + self.assertEqual(tree.total_errors, 8) + + def test_it_contains_an_item_if_the_item_had_an_error(self): + errors = [exceptions.ValidationError("a message", path=["bar"])] + tree = exceptions.ErrorTree(errors) + self.assertIn("bar", tree) + + def test_it_does_not_contain_an_item_if_the_item_had_no_error(self): + errors = [exceptions.ValidationError("a message", path=["bar"])] + tree = exceptions.ErrorTree(errors) + self.assertNotIn("foo", tree) + + def test_keywords_that_failed_appear_in_errors_dict(self): + error = exceptions.ValidationError("a message", validator="foo") + tree = exceptions.ErrorTree([error]) + self.assertEqual(tree.errors, {"foo": error}) + + def test_it_creates_a_child_tree_for_each_nested_path(self): + errors = [ + exceptions.ValidationError("a bar message", path=["bar"]), + exceptions.ValidationError("a bar -> 0 message", path=["bar", 0]), + ] + tree = exceptions.ErrorTree(errors) + self.assertIn(0, tree["bar"]) + self.assertNotIn(1, tree["bar"]) + + def test_children_have_their_errors_dicts_built(self): + e1, e2 = ( + exceptions.ValidationError("1", validator="foo", path=["bar", 0]), + exceptions.ValidationError("2", validator="quux", path=["bar", 0]), + ) + tree = exceptions.ErrorTree([e1, e2]) + self.assertEqual(tree["bar"][0].errors, {"foo": e1, "quux": e2}) + + def test_multiple_errors_with_instance(self): + e1, e2 = ( + exceptions.ValidationError( + "1", + validator="foo", + path=["bar", "bar2"], + instance="i1"), + exceptions.ValidationError( + "2", + validator="quux", + path=["foobar", 2], + instance="i2"), + ) + exceptions.ErrorTree([e1, e2]) + + def test_it_does_not_contain_subtrees_that_are_not_in_the_instance(self): + error = exceptions.ValidationError("123", validator="foo", instance=[]) + tree = exceptions.ErrorTree([error]) + + with self.assertRaises(IndexError): + tree[0] + + def test_if_its_in_the_tree_anyhow_it_does_not_raise_an_error(self): + """ + If a keyword refers to a path that isn't in the instance, the + tree still properly returns a subtree for that path. + """ + + error = exceptions.ValidationError( + "a message", validator="foo", instance={}, path=["foo"], + ) + tree = exceptions.ErrorTree([error]) + self.assertIsInstance(tree["foo"], exceptions.ErrorTree) + + def test_iter(self): + e1, e2 = ( + exceptions.ValidationError( + "1", + validator="foo", + path=["bar", "bar2"], + instance="i1"), + exceptions.ValidationError( + "2", + validator="quux", + path=["foobar", 2], + instance="i2"), + ) + tree = exceptions.ErrorTree([e1, e2]) + self.assertEqual(set(tree), {"bar", "foobar"}) + + def test_repr_single(self): + error = exceptions.ValidationError( + "1", + validator="foo", + path=["bar", "bar2"], + instance="i1", + ) + tree = exceptions.ErrorTree([error]) + self.assertEqual(repr(tree), "") + + def test_repr_multiple(self): + e1, e2 = ( + exceptions.ValidationError( + "1", + validator="foo", + path=["bar", "bar2"], + instance="i1"), + exceptions.ValidationError( + "2", + validator="quux", + path=["foobar", 2], + instance="i2"), + ) + tree = exceptions.ErrorTree([e1, e2]) + self.assertEqual(repr(tree), "") + + def test_repr_empty(self): + tree = exceptions.ErrorTree([]) + self.assertEqual(repr(tree), "") + + +class TestErrorInitReprStr(TestCase): + def make_error(self, **kwargs): + defaults = dict( + message="hello", + validator="type", + validator_value="string", + instance=5, + schema={"type": "string"}, + ) + defaults.update(kwargs) + return exceptions.ValidationError(**defaults) + + def assertShows(self, expected, **kwargs): + expected = textwrap.dedent(expected).rstrip("\n") + + error = self.make_error(**kwargs) + message_line, _, rest = str(error).partition("\n") + self.assertEqual(message_line, error.message) + self.assertEqual(rest, expected) + + def test_it_calls_super_and_sets_args(self): + error = self.make_error() + self.assertGreater(len(error.args), 1) + + def test_repr(self): + self.assertEqual( + repr(exceptions.ValidationError(message="Hello!")), + "", + ) + + def test_unset_error(self): + error = exceptions.ValidationError("message") + self.assertEqual(str(error), "message") + + kwargs = { + "validator": "type", + "validator_value": "string", + "instance": 5, + "schema": {"type": "string"}, + } + # Just the message should show if any of the attributes are unset + for attr in kwargs: + k = dict(kwargs) + del k[attr] + error = exceptions.ValidationError("message", **k) + self.assertEqual(str(error), "message") + + def test_empty_paths(self): + self.assertShows( + """ + Failed validating 'type' in schema: + {'type': 'string'} + + On instance: + 5 + """, + path=[], + schema_path=[], + ) + + def test_one_item_paths(self): + self.assertShows( + """ + Failed validating 'type' in schema: + {'type': 'string'} + + On instance[0]: + 5 + """, + path=[0], + schema_path=["items"], + ) + + def test_multiple_item_paths(self): + self.assertShows( + """ + Failed validating 'type' in schema['items'][0]: + {'type': 'string'} + + On instance[0]['a']: + 5 + """, + path=[0, "a"], + schema_path=["items", 0, 1], + ) + + def test_uses_pprint(self): + self.assertShows( + """ + Failed validating 'maxLength' in schema: + {0: 0, + 1: 1, + 2: 2, + 3: 3, + 4: 4, + 5: 5, + 6: 6, + 7: 7, + 8: 8, + 9: 9, + 10: 10, + 11: 11, + 12: 12, + 13: 13, + 14: 14, + 15: 15, + 16: 16, + 17: 17, + 18: 18, + 19: 19} + + On instance: + [0, + 1, + 2, + 3, + 4, + 5, + 6, + 7, + 8, + 9, + 10, + 11, + 12, + 13, + 14, + 15, + 16, + 17, + 18, + 19, + 20, + 21, + 22, + 23, + 24] + """, + instance=list(range(25)), + schema=dict(zip(range(20), range(20))), + validator="maxLength", + ) + + def test_does_not_reorder_dicts(self): + self.assertShows( + """ + Failed validating 'type' in schema: + {'do': 3, 'not': 7, 'sort': 37, 'me': 73} + + On instance: + {'here': 73, 'too': 37, 'no': 7, 'sorting': 3} + """, + schema={ + "do": 3, + "not": 7, + "sort": 37, + "me": 73, + }, + instance={ + "here": 73, + "too": 37, + "no": 7, + "sorting": 3, + }, + ) + + def test_str_works_with_instances_having_overriden_eq_operator(self): + """ + Check for #164 which rendered exceptions unusable when a + `ValidationError` involved instances with an `__eq__` method + that returned truthy values. + """ + + class DontEQMeBro: + def __eq__(this, other): # pragma: no cover + self.fail("Don't!") + + def __ne__(this, other): # pragma: no cover + self.fail("Don't!") + + instance = DontEQMeBro() + error = exceptions.ValidationError( + "a message", + validator="foo", + instance=instance, + validator_value="some", + schema="schema", + ) + self.assertIn(repr(instance), str(error)) + + +class TestHashable(TestCase): + def test_hashable(self): + {exceptions.ValidationError("")} + {exceptions.SchemaError("")} diff --git a/vllm/lib/python3.10/site-packages/jsonschema/tests/test_format.py b/vllm/lib/python3.10/site-packages/jsonschema/tests/test_format.py new file mode 100644 index 0000000000000000000000000000000000000000..d829f9848f51f882d5a3f9413c80e0dbdcdaf292 --- /dev/null +++ b/vllm/lib/python3.10/site-packages/jsonschema/tests/test_format.py @@ -0,0 +1,91 @@ +""" +Tests for the parts of jsonschema related to the :kw:`format` keyword. +""" + +from unittest import TestCase + +from jsonschema import FormatChecker, ValidationError +from jsonschema.exceptions import FormatError +from jsonschema.validators import Draft4Validator + +BOOM = ValueError("Boom!") +BANG = ZeroDivisionError("Bang!") + + +def boom(thing): + if thing == "bang": + raise BANG + raise BOOM + + +class TestFormatChecker(TestCase): + def test_it_can_validate_no_formats(self): + checker = FormatChecker(formats=()) + self.assertFalse(checker.checkers) + + def test_it_raises_a_key_error_for_unknown_formats(self): + with self.assertRaises(KeyError): + FormatChecker(formats=["o noes"]) + + def test_it_can_register_cls_checkers(self): + original = dict(FormatChecker.checkers) + self.addCleanup(FormatChecker.checkers.pop, "boom") + with self.assertWarns(DeprecationWarning): + FormatChecker.cls_checks("boom")(boom) + self.assertEqual( + FormatChecker.checkers, + dict(original, boom=(boom, ())), + ) + + def test_it_can_register_checkers(self): + checker = FormatChecker() + checker.checks("boom")(boom) + self.assertEqual( + checker.checkers, + dict(FormatChecker.checkers, boom=(boom, ())), + ) + + def test_it_catches_registered_errors(self): + checker = FormatChecker() + checker.checks("boom", raises=type(BOOM))(boom) + + with self.assertRaises(FormatError) as cm: + checker.check(instance=12, format="boom") + + self.assertIs(cm.exception.cause, BOOM) + self.assertIs(cm.exception.__cause__, BOOM) + self.assertEqual(str(cm.exception), "12 is not a 'boom'") + + # Unregistered errors should not be caught + with self.assertRaises(type(BANG)): + checker.check(instance="bang", format="boom") + + def test_format_error_causes_become_validation_error_causes(self): + checker = FormatChecker() + checker.checks("boom", raises=ValueError)(boom) + validator = Draft4Validator({"format": "boom"}, format_checker=checker) + + with self.assertRaises(ValidationError) as cm: + validator.validate("BOOM") + + self.assertIs(cm.exception.cause, BOOM) + self.assertIs(cm.exception.__cause__, BOOM) + + def test_format_checkers_come_with_defaults(self): + # This is bad :/ but relied upon. + # The docs for quite awhile recommended people do things like + # validate(..., format_checker=FormatChecker()) + # We should change that, but we can't without deprecation... + checker = FormatChecker() + with self.assertRaises(FormatError): + checker.check(instance="not-an-ipv4", format="ipv4") + + def test_repr(self): + checker = FormatChecker(formats=()) + checker.checks("foo")(lambda thing: True) # pragma: no cover + checker.checks("bar")(lambda thing: True) # pragma: no cover + checker.checks("baz")(lambda thing: True) # pragma: no cover + self.assertEqual( + repr(checker), + "", + ) diff --git a/vllm/lib/python3.10/site-packages/jsonschema/tests/test_jsonschema_test_suite.py b/vllm/lib/python3.10/site-packages/jsonschema/tests/test_jsonschema_test_suite.py new file mode 100644 index 0000000000000000000000000000000000000000..282c1369cdd2a3b877ac55a735ad3ad0837ada27 --- /dev/null +++ b/vllm/lib/python3.10/site-packages/jsonschema/tests/test_jsonschema_test_suite.py @@ -0,0 +1,269 @@ +""" +Test runner for the JSON Schema official test suite + +Tests comprehensive correctness of each draft's validator. + +See https://github.com/json-schema-org/JSON-Schema-Test-Suite for details. +""" + +import sys + +from jsonschema.tests._suite import Suite +import jsonschema + +SUITE = Suite() +DRAFT3 = SUITE.version(name="draft3") +DRAFT4 = SUITE.version(name="draft4") +DRAFT6 = SUITE.version(name="draft6") +DRAFT7 = SUITE.version(name="draft7") +DRAFT201909 = SUITE.version(name="draft2019-09") +DRAFT202012 = SUITE.version(name="draft2020-12") + + +def skip(message, **kwargs): + def skipper(test): + if all(value == getattr(test, attr) for attr, value in kwargs.items()): + return message + return skipper + + +def missing_format(Validator): + def missing_format(test): # pragma: no cover + schema = test.schema + if ( + schema is True + or schema is False + or "format" not in schema + or schema["format"] in Validator.FORMAT_CHECKER.checkers + or test.valid + ): + return + + return f"Format checker {schema['format']!r} not found." + return missing_format + + +def complex_email_validation(test): + if test.subject != "email": + return + + message = "Complex email validation is (intentionally) unsupported." + return skip( + message=message, + description="an invalid domain", + )(test) or skip( + message=message, + description="an invalid IPv4-address-literal", + )(test) or skip( + message=message, + description="dot after local part is not valid", + )(test) or skip( + message=message, + description="dot before local part is not valid", + )(test) or skip( + message=message, + description="two subsequent dots inside local part are not valid", + )(test) + + +if sys.version_info < (3, 9): # pragma: no cover + message = "Rejecting leading zeros is 3.9+" + allowed_leading_zeros = skip( + message=message, + subject="ipv4", + description="invalid leading zeroes, as they are treated as octals", + ) +else: + def allowed_leading_zeros(test): # pragma: no cover + return + + +def leap_second(test): + message = "Leap seconds are unsupported." + return skip( + message=message, + subject="time", + description="a valid time string with leap second", + )(test) or skip( + message=message, + subject="time", + description="a valid time string with leap second, Zulu", + )(test) or skip( + message=message, + subject="time", + description="a valid time string with leap second with offset", + )(test) or skip( + message=message, + subject="time", + description="valid leap second, positive time-offset", + )(test) or skip( + message=message, + subject="time", + description="valid leap second, negative time-offset", + )(test) or skip( + message=message, + subject="time", + description="valid leap second, large positive time-offset", + )(test) or skip( + message=message, + subject="time", + description="valid leap second, large negative time-offset", + )(test) or skip( + message=message, + subject="time", + description="valid leap second, zero time-offset", + )(test) or skip( + message=message, + subject="date-time", + description="a valid date-time with a leap second, UTC", + )(test) or skip( + message=message, + subject="date-time", + description="a valid date-time with a leap second, with minus offset", + )(test) + + +TestDraft3 = DRAFT3.to_unittest_testcase( + DRAFT3.cases(), + DRAFT3.format_cases(), + DRAFT3.optional_cases_of(name="bignum"), + DRAFT3.optional_cases_of(name="non-bmp-regex"), + DRAFT3.optional_cases_of(name="zeroTerminatedFloats"), + Validator=jsonschema.Draft3Validator, + format_checker=jsonschema.Draft3Validator.FORMAT_CHECKER, + skip=lambda test: ( + missing_format(jsonschema.Draft3Validator)(test) + or complex_email_validation(test) + ), +) + + +TestDraft4 = DRAFT4.to_unittest_testcase( + DRAFT4.cases(), + DRAFT4.format_cases(), + DRAFT4.optional_cases_of(name="bignum"), + DRAFT4.optional_cases_of(name="float-overflow"), + DRAFT4.optional_cases_of(name="id"), + DRAFT4.optional_cases_of(name="non-bmp-regex"), + DRAFT4.optional_cases_of(name="zeroTerminatedFloats"), + Validator=jsonschema.Draft4Validator, + format_checker=jsonschema.Draft4Validator.FORMAT_CHECKER, + skip=lambda test: ( + allowed_leading_zeros(test) + or leap_second(test) + or missing_format(jsonschema.Draft4Validator)(test) + or complex_email_validation(test) + ), +) + + +TestDraft6 = DRAFT6.to_unittest_testcase( + DRAFT6.cases(), + DRAFT6.format_cases(), + DRAFT6.optional_cases_of(name="bignum"), + DRAFT6.optional_cases_of(name="float-overflow"), + DRAFT6.optional_cases_of(name="id"), + DRAFT6.optional_cases_of(name="non-bmp-regex"), + Validator=jsonschema.Draft6Validator, + format_checker=jsonschema.Draft6Validator.FORMAT_CHECKER, + skip=lambda test: ( + allowed_leading_zeros(test) + or leap_second(test) + or missing_format(jsonschema.Draft6Validator)(test) + or complex_email_validation(test) + ), +) + + +TestDraft7 = DRAFT7.to_unittest_testcase( + DRAFT7.cases(), + DRAFT7.format_cases(), + DRAFT7.optional_cases_of(name="bignum"), + DRAFT7.optional_cases_of(name="cross-draft"), + DRAFT7.optional_cases_of(name="float-overflow"), + DRAFT6.optional_cases_of(name="id"), + DRAFT7.optional_cases_of(name="non-bmp-regex"), + DRAFT7.optional_cases_of(name="unknownKeyword"), + Validator=jsonschema.Draft7Validator, + format_checker=jsonschema.Draft7Validator.FORMAT_CHECKER, + skip=lambda test: ( + allowed_leading_zeros(test) + or leap_second(test) + or missing_format(jsonschema.Draft7Validator)(test) + or complex_email_validation(test) + ), +) + + +TestDraft201909 = DRAFT201909.to_unittest_testcase( + DRAFT201909.cases(), + DRAFT201909.optional_cases_of(name="anchor"), + DRAFT201909.optional_cases_of(name="bignum"), + DRAFT201909.optional_cases_of(name="cross-draft"), + DRAFT201909.optional_cases_of(name="float-overflow"), + DRAFT201909.optional_cases_of(name="id"), + DRAFT201909.optional_cases_of(name="no-schema"), + DRAFT201909.optional_cases_of(name="non-bmp-regex"), + DRAFT201909.optional_cases_of(name="refOfUnknownKeyword"), + DRAFT201909.optional_cases_of(name="unknownKeyword"), + Validator=jsonschema.Draft201909Validator, + skip=skip( + message="Vocabulary support is still in-progress.", + subject="vocabulary", + description=( + "no validation: invalid number, but it still validates" + ), + ), +) + + +TestDraft201909Format = DRAFT201909.to_unittest_testcase( + DRAFT201909.format_cases(), + name="TestDraft201909Format", + Validator=jsonschema.Draft201909Validator, + format_checker=jsonschema.Draft201909Validator.FORMAT_CHECKER, + skip=lambda test: ( + complex_email_validation(test) + or allowed_leading_zeros(test) + or leap_second(test) + or missing_format(jsonschema.Draft201909Validator)(test) + or complex_email_validation(test) + ), +) + + +TestDraft202012 = DRAFT202012.to_unittest_testcase( + DRAFT202012.cases(), + DRAFT201909.optional_cases_of(name="anchor"), + DRAFT202012.optional_cases_of(name="bignum"), + DRAFT202012.optional_cases_of(name="cross-draft"), + DRAFT202012.optional_cases_of(name="float-overflow"), + DRAFT202012.optional_cases_of(name="id"), + DRAFT202012.optional_cases_of(name="no-schema"), + DRAFT202012.optional_cases_of(name="non-bmp-regex"), + DRAFT202012.optional_cases_of(name="refOfUnknownKeyword"), + DRAFT202012.optional_cases_of(name="unknownKeyword"), + Validator=jsonschema.Draft202012Validator, + skip=skip( + message="Vocabulary support is still in-progress.", + subject="vocabulary", + description=( + "no validation: invalid number, but it still validates" + ), + ), +) + + +TestDraft202012Format = DRAFT202012.to_unittest_testcase( + DRAFT202012.format_cases(), + name="TestDraft202012Format", + Validator=jsonschema.Draft202012Validator, + format_checker=jsonschema.Draft202012Validator.FORMAT_CHECKER, + skip=lambda test: ( + complex_email_validation(test) + or allowed_leading_zeros(test) + or leap_second(test) + or missing_format(jsonschema.Draft202012Validator)(test) + or complex_email_validation(test) + ), +) diff --git a/vllm/lib/python3.10/site-packages/jsonschema/tests/test_types.py b/vllm/lib/python3.10/site-packages/jsonschema/tests/test_types.py new file mode 100644 index 0000000000000000000000000000000000000000..bd97b180029ae05d7f3b22f1048f9adcb769f36f --- /dev/null +++ b/vllm/lib/python3.10/site-packages/jsonschema/tests/test_types.py @@ -0,0 +1,221 @@ +""" +Tests for the `TypeChecker`-based type interface. + +The actual correctness of the type checking is handled in +`test_jsonschema_test_suite`; these tests check that TypeChecker +functions correctly at a more granular level. +""" +from collections import namedtuple +from unittest import TestCase + +from jsonschema import ValidationError, _keywords +from jsonschema._types import TypeChecker +from jsonschema.exceptions import UndefinedTypeCheck, UnknownType +from jsonschema.validators import Draft202012Validator, extend + + +def equals_2(checker, instance): + return instance == 2 + + +def is_namedtuple(instance): + return isinstance(instance, tuple) and getattr(instance, "_fields", None) + + +def is_object_or_named_tuple(checker, instance): + if Draft202012Validator.TYPE_CHECKER.is_type(instance, "object"): + return True + return is_namedtuple(instance) + + +class TestTypeChecker(TestCase): + def test_is_type(self): + checker = TypeChecker({"two": equals_2}) + self.assertEqual( + ( + checker.is_type(instance=2, type="two"), + checker.is_type(instance="bar", type="two"), + ), + (True, False), + ) + + def test_is_unknown_type(self): + with self.assertRaises(UndefinedTypeCheck) as e: + TypeChecker().is_type(4, "foobar") + self.assertIn( + "'foobar' is unknown to this type checker", + str(e.exception), + ) + self.assertTrue( + e.exception.__suppress_context__, + msg="Expected the internal KeyError to be hidden.", + ) + + def test_checks_can_be_added_at_init(self): + checker = TypeChecker({"two": equals_2}) + self.assertEqual(checker, TypeChecker().redefine("two", equals_2)) + + def test_redefine_existing_type(self): + self.assertEqual( + TypeChecker().redefine("two", object()).redefine("two", equals_2), + TypeChecker().redefine("two", equals_2), + ) + + def test_remove(self): + self.assertEqual( + TypeChecker({"two": equals_2}).remove("two"), + TypeChecker(), + ) + + def test_remove_unknown_type(self): + with self.assertRaises(UndefinedTypeCheck) as context: + TypeChecker().remove("foobar") + self.assertIn("foobar", str(context.exception)) + + def test_redefine_many(self): + self.assertEqual( + TypeChecker().redefine_many({"foo": int, "bar": str}), + TypeChecker().redefine("foo", int).redefine("bar", str), + ) + + def test_remove_multiple(self): + self.assertEqual( + TypeChecker({"foo": int, "bar": str}).remove("foo", "bar"), + TypeChecker(), + ) + + def test_type_check_can_raise_key_error(self): + """ + Make sure no one writes: + + try: + self._type_checkers[type](...) + except KeyError: + + ignoring the fact that the function itself can raise that. + """ + + error = KeyError("Stuff") + + def raises_keyerror(checker, instance): + raise error + + with self.assertRaises(KeyError) as context: + TypeChecker({"foo": raises_keyerror}).is_type(4, "foo") + + self.assertIs(context.exception, error) + + def test_repr(self): + checker = TypeChecker({"foo": is_namedtuple, "bar": is_namedtuple}) + self.assertEqual(repr(checker), "") + + +class TestCustomTypes(TestCase): + def test_simple_type_can_be_extended(self): + def int_or_str_int(checker, instance): + if not isinstance(instance, (int, str)): + return False + try: + int(instance) + except ValueError: + return False + return True + + CustomValidator = extend( + Draft202012Validator, + type_checker=Draft202012Validator.TYPE_CHECKER.redefine( + "integer", int_or_str_int, + ), + ) + validator = CustomValidator({"type": "integer"}) + + validator.validate(4) + validator.validate("4") + + with self.assertRaises(ValidationError): + validator.validate(4.4) + + with self.assertRaises(ValidationError): + validator.validate("foo") + + def test_object_can_be_extended(self): + schema = {"type": "object"} + + Point = namedtuple("Point", ["x", "y"]) + + type_checker = Draft202012Validator.TYPE_CHECKER.redefine( + "object", is_object_or_named_tuple, + ) + + CustomValidator = extend( + Draft202012Validator, + type_checker=type_checker, + ) + validator = CustomValidator(schema) + + validator.validate(Point(x=4, y=5)) + + def test_object_extensions_require_custom_validators(self): + schema = {"type": "object", "required": ["x"]} + + type_checker = Draft202012Validator.TYPE_CHECKER.redefine( + "object", is_object_or_named_tuple, + ) + + CustomValidator = extend( + Draft202012Validator, + type_checker=type_checker, + ) + validator = CustomValidator(schema) + + Point = namedtuple("Point", ["x", "y"]) + # Cannot handle required + with self.assertRaises(ValidationError): + validator.validate(Point(x=4, y=5)) + + def test_object_extensions_can_handle_custom_validators(self): + schema = { + "type": "object", + "required": ["x"], + "properties": {"x": {"type": "integer"}}, + } + + type_checker = Draft202012Validator.TYPE_CHECKER.redefine( + "object", is_object_or_named_tuple, + ) + + def coerce_named_tuple(fn): + def coerced(validator, value, instance, schema): + if is_namedtuple(instance): + instance = instance._asdict() + return fn(validator, value, instance, schema) + return coerced + + required = coerce_named_tuple(_keywords.required) + properties = coerce_named_tuple(_keywords.properties) + + CustomValidator = extend( + Draft202012Validator, + type_checker=type_checker, + validators={"required": required, "properties": properties}, + ) + + validator = CustomValidator(schema) + + Point = namedtuple("Point", ["x", "y"]) + # Can now process required and properties + validator.validate(Point(x=4, y=5)) + + with self.assertRaises(ValidationError): + validator.validate(Point(x="not an integer", y=5)) + + # As well as still handle objects. + validator.validate({"x": 4, "y": 5}) + + with self.assertRaises(ValidationError): + validator.validate({"x": "not an integer", "y": 5}) + + def test_unknown_type(self): + with self.assertRaises(UnknownType) as e: + Draft202012Validator({}).is_type(12, "some unknown type") + self.assertIn("'some unknown type'", str(e.exception)) diff --git a/vllm/lib/python3.10/site-packages/jsonschema/tests/test_utils.py b/vllm/lib/python3.10/site-packages/jsonschema/tests/test_utils.py new file mode 100644 index 0000000000000000000000000000000000000000..d9764b0f9e92edb38c19e1fc43b248a20186ef6b --- /dev/null +++ b/vllm/lib/python3.10/site-packages/jsonschema/tests/test_utils.py @@ -0,0 +1,138 @@ +from math import nan +from unittest import TestCase + +from jsonschema._utils import equal + + +class TestEqual(TestCase): + def test_none(self): + self.assertTrue(equal(None, None)) + + def test_nan(self): + self.assertTrue(equal(nan, nan)) + + +class TestDictEqual(TestCase): + def test_equal_dictionaries(self): + dict_1 = {"a": "b", "c": "d"} + dict_2 = {"c": "d", "a": "b"} + self.assertTrue(equal(dict_1, dict_2)) + + def test_equal_dictionaries_with_nan(self): + dict_1 = {"a": nan, "c": "d"} + dict_2 = {"c": "d", "a": nan} + self.assertTrue(equal(dict_1, dict_2)) + + def test_missing_key(self): + dict_1 = {"a": "b", "c": "d"} + dict_2 = {"c": "d", "x": "b"} + self.assertFalse(equal(dict_1, dict_2)) + + def test_additional_key(self): + dict_1 = {"a": "b", "c": "d"} + dict_2 = {"c": "d", "a": "b", "x": "x"} + self.assertFalse(equal(dict_1, dict_2)) + + def test_missing_value(self): + dict_1 = {"a": "b", "c": "d"} + dict_2 = {"c": "d", "a": "x"} + self.assertFalse(equal(dict_1, dict_2)) + + def test_empty_dictionaries(self): + dict_1 = {} + dict_2 = {} + self.assertTrue(equal(dict_1, dict_2)) + + def test_one_none(self): + dict_1 = None + dict_2 = {"a": "b", "c": "d"} + self.assertFalse(equal(dict_1, dict_2)) + + def test_same_item(self): + dict_1 = {"a": "b", "c": "d"} + self.assertTrue(equal(dict_1, dict_1)) + + def test_nested_equal(self): + dict_1 = {"a": {"a": "b", "c": "d"}, "c": "d"} + dict_2 = {"c": "d", "a": {"a": "b", "c": "d"}} + self.assertTrue(equal(dict_1, dict_2)) + + def test_nested_dict_unequal(self): + dict_1 = {"a": {"a": "b", "c": "d"}, "c": "d"} + dict_2 = {"c": "d", "a": {"a": "b", "c": "x"}} + self.assertFalse(equal(dict_1, dict_2)) + + def test_mixed_nested_equal(self): + dict_1 = {"a": ["a", "b", "c", "d"], "c": "d"} + dict_2 = {"c": "d", "a": ["a", "b", "c", "d"]} + self.assertTrue(equal(dict_1, dict_2)) + + def test_nested_list_unequal(self): + dict_1 = {"a": ["a", "b", "c", "d"], "c": "d"} + dict_2 = {"c": "d", "a": ["b", "c", "d", "a"]} + self.assertFalse(equal(dict_1, dict_2)) + + +class TestListEqual(TestCase): + def test_equal_lists(self): + list_1 = ["a", "b", "c"] + list_2 = ["a", "b", "c"] + self.assertTrue(equal(list_1, list_2)) + + def test_equal_lists_with_nan(self): + list_1 = ["a", nan, "c"] + list_2 = ["a", nan, "c"] + self.assertTrue(equal(list_1, list_2)) + + def test_unsorted_lists(self): + list_1 = ["a", "b", "c"] + list_2 = ["b", "b", "a"] + self.assertFalse(equal(list_1, list_2)) + + def test_first_list_larger(self): + list_1 = ["a", "b", "c"] + list_2 = ["a", "b"] + self.assertFalse(equal(list_1, list_2)) + + def test_second_list_larger(self): + list_1 = ["a", "b"] + list_2 = ["a", "b", "c"] + self.assertFalse(equal(list_1, list_2)) + + def test_list_with_none_unequal(self): + list_1 = ["a", "b", None] + list_2 = ["a", "b", "c"] + self.assertFalse(equal(list_1, list_2)) + + list_1 = ["a", "b", None] + list_2 = [None, "b", "c"] + self.assertFalse(equal(list_1, list_2)) + + def test_list_with_none_equal(self): + list_1 = ["a", None, "c"] + list_2 = ["a", None, "c"] + self.assertTrue(equal(list_1, list_2)) + + def test_empty_list(self): + list_1 = [] + list_2 = [] + self.assertTrue(equal(list_1, list_2)) + + def test_one_none(self): + list_1 = None + list_2 = [] + self.assertFalse(equal(list_1, list_2)) + + def test_same_list(self): + list_1 = ["a", "b", "c"] + self.assertTrue(equal(list_1, list_1)) + + def test_equal_nested_lists(self): + list_1 = ["a", ["b", "c"], "d"] + list_2 = ["a", ["b", "c"], "d"] + self.assertTrue(equal(list_1, list_2)) + + def test_unequal_nested_lists(self): + list_1 = ["a", ["b", "c"], "d"] + list_2 = ["a", [], "c"] + self.assertFalse(equal(list_1, list_2)) diff --git a/vllm/lib/python3.10/site-packages/jsonschema/validators.py b/vllm/lib/python3.10/site-packages/jsonschema/validators.py new file mode 100644 index 0000000000000000000000000000000000000000..85c39160dc64efd13ed985b08369350cb022d888 --- /dev/null +++ b/vllm/lib/python3.10/site-packages/jsonschema/validators.py @@ -0,0 +1,1410 @@ +""" +Creation and extension of validators, with implementations for existing drafts. +""" +from __future__ import annotations + +from collections import deque +from collections.abc import Iterable, Mapping, Sequence +from functools import lru_cache +from operator import methodcaller +from typing import TYPE_CHECKING +from urllib.parse import unquote, urldefrag, urljoin, urlsplit +from urllib.request import urlopen +from warnings import warn +import contextlib +import json +import reprlib +import warnings + +from attrs import define, field, fields +from jsonschema_specifications import REGISTRY as SPECIFICATIONS +from rpds import HashTrieMap +import referencing.exceptions +import referencing.jsonschema + +from jsonschema import ( + _format, + _keywords, + _legacy_keywords, + _types, + _typing, + _utils, + exceptions, +) + +if TYPE_CHECKING: + from jsonschema.protocols import Validator + +_UNSET = _utils.Unset() + +_VALIDATORS: dict[str, Validator] = {} +_META_SCHEMAS = _utils.URIDict() + + +def __getattr__(name): + if name == "ErrorTree": + warnings.warn( + "Importing ErrorTree from jsonschema.validators is deprecated. " + "Instead import it from jsonschema.exceptions.", + DeprecationWarning, + stacklevel=2, + ) + from jsonschema.exceptions import ErrorTree + return ErrorTree + elif name == "validators": + warnings.warn( + "Accessing jsonschema.validators.validators is deprecated. " + "Use jsonschema.validators.validator_for with a given schema.", + DeprecationWarning, + stacklevel=2, + ) + return _VALIDATORS + elif name == "meta_schemas": + warnings.warn( + "Accessing jsonschema.validators.meta_schemas is deprecated. " + "Use jsonschema.validators.validator_for with a given schema.", + DeprecationWarning, + stacklevel=2, + ) + return _META_SCHEMAS + elif name == "RefResolver": + warnings.warn( + _RefResolver._DEPRECATION_MESSAGE, + DeprecationWarning, + stacklevel=2, + ) + return _RefResolver + raise AttributeError(f"module {__name__} has no attribute {name}") + + +def validates(version): + """ + Register the decorated validator for a ``version`` of the specification. + + Registered validators and their meta schemas will be considered when + parsing :kw:`$schema` keywords' URIs. + + Arguments: + + version (str): + + An identifier to use as the version's name + + Returns: + + collections.abc.Callable: + + a class decorator to decorate the validator with the version + + """ + + def _validates(cls): + _VALIDATORS[version] = cls + meta_schema_id = cls.ID_OF(cls.META_SCHEMA) + _META_SCHEMAS[meta_schema_id] = cls + return cls + return _validates + + +def _warn_for_remote_retrieve(uri: str): + from urllib.request import Request, urlopen + headers = {"User-Agent": "python-jsonschema (deprecated $ref resolution)"} + request = Request(uri, headers=headers) # noqa: S310 + with urlopen(request) as response: # noqa: S310 + warnings.warn( + "Automatically retrieving remote references can be a security " + "vulnerability and is discouraged by the JSON Schema " + "specifications. Relying on this behavior is deprecated " + "and will shortly become an error. If you are sure you want to " + "remotely retrieve your reference and that it is safe to do so, " + "you can find instructions for doing so via referencing.Registry " + "in the referencing documentation " + "(https://referencing.readthedocs.org).", + DeprecationWarning, + stacklevel=9, # Ha ha ha ha magic numbers :/ + ) + return referencing.Resource.from_contents( + json.load(response), + default_specification=referencing.jsonschema.DRAFT202012, + ) + + +_REMOTE_WARNING_REGISTRY = SPECIFICATIONS.combine( + referencing.Registry(retrieve=_warn_for_remote_retrieve), # type: ignore[call-arg] +) + + +def create( + meta_schema: referencing.jsonschema.ObjectSchema, + validators: ( + Mapping[str, _typing.SchemaKeywordValidator] + | Iterable[tuple[str, _typing.SchemaKeywordValidator]] + ) = (), + version: str | None = None, + type_checker: _types.TypeChecker = _types.draft202012_type_checker, + format_checker: _format.FormatChecker = _format.draft202012_format_checker, + id_of: _typing.id_of = referencing.jsonschema.DRAFT202012.id_of, + applicable_validators: _typing.ApplicableValidators = methodcaller( + "items", + ), +): + """ + Create a new validator class. + + Arguments: + + meta_schema: + + the meta schema for the new validator class + + validators: + + a mapping from names to callables, where each callable will + validate the schema property with the given name. + + Each callable should take 4 arguments: + + 1. a validator instance, + 2. the value of the property being validated within the + instance + 3. the instance + 4. the schema + + version: + + an identifier for the version that this validator class will + validate. If provided, the returned validator class will + have its ``__name__`` set to include the version, and also + will have `jsonschema.validators.validates` automatically + called for the given version. + + type_checker: + + a type checker, used when applying the :kw:`type` keyword. + + If unprovided, a `jsonschema.TypeChecker` will be created + with a set of default types typical of JSON Schema drafts. + + format_checker: + + a format checker, used when applying the :kw:`format` keyword. + + If unprovided, a `jsonschema.FormatChecker` will be created + with a set of default formats typical of JSON Schema drafts. + + id_of: + + A function that given a schema, returns its ID. + + applicable_validators: + + A function that, given a schema, returns the list of + applicable schema keywords and associated values + which will be used to validate the instance. + This is mostly used to support pre-draft 7 versions of JSON Schema + which specified behavior around ignoring keywords if they were + siblings of a ``$ref`` keyword. If you're not attempting to + implement similar behavior, you can typically ignore this argument + and leave it at its default. + + Returns: + + a new `jsonschema.protocols.Validator` class + + """ + # preemptively don't shadow the `Validator.format_checker` local + format_checker_arg = format_checker + + specification = referencing.jsonschema.specification_with( + dialect_id=id_of(meta_schema) or "urn:unknown-dialect", + default=referencing.Specification.OPAQUE, + ) + + @define + class Validator: + + VALIDATORS = dict(validators) # noqa: RUF012 + META_SCHEMA = dict(meta_schema) # noqa: RUF012 + TYPE_CHECKER = type_checker + FORMAT_CHECKER = format_checker_arg + ID_OF = staticmethod(id_of) + + _APPLICABLE_VALIDATORS = applicable_validators + _validators = field(init=False, repr=False, eq=False) + + schema: referencing.jsonschema.Schema = field(repr=reprlib.repr) + _ref_resolver = field(default=None, repr=False, alias="resolver") + format_checker: _format.FormatChecker | None = field(default=None) + # TODO: include new meta-schemas added at runtime + _registry: referencing.jsonschema.SchemaRegistry = field( + default=_REMOTE_WARNING_REGISTRY, + kw_only=True, + repr=False, + ) + _resolver = field( + alias="_resolver", + default=None, + kw_only=True, + repr=False, + ) + + def __init_subclass__(cls): + warnings.warn( + ( + "Subclassing validator classes is not intended to " + "be part of their public API. A future version " + "will make doing so an error, as the behavior of " + "subclasses isn't guaranteed to stay the same " + "between releases of jsonschema. Instead, prefer " + "composition of validators, wrapping them in an object " + "owned entirely by the downstream library." + ), + DeprecationWarning, + stacklevel=2, + ) + + def evolve(self, **changes): + cls = self.__class__ + schema = changes.setdefault("schema", self.schema) + NewValidator = validator_for(schema, default=cls) + + for field in fields(cls): # noqa: F402 + if not field.init: + continue + attr_name = field.name + init_name = field.alias + if init_name not in changes: + changes[init_name] = getattr(self, attr_name) + + return NewValidator(**changes) + + cls.evolve = evolve + + def __attrs_post_init__(self): + if self._resolver is None: + registry = self._registry + if registry is not _REMOTE_WARNING_REGISTRY: + registry = SPECIFICATIONS.combine(registry) + resource = specification.create_resource(self.schema) + self._resolver = registry.resolver_with_root(resource) + + if self.schema is True or self.schema is False: + self._validators = [] + else: + self._validators = [ + (self.VALIDATORS[k], k, v) + for k, v in applicable_validators(self.schema) + if k in self.VALIDATORS + ] + + # REMOVEME: Legacy ref resolution state management. + push_scope = getattr(self._ref_resolver, "push_scope", None) + if push_scope is not None: + id = id_of(self.schema) + if id is not None: + push_scope(id) + + @classmethod + def check_schema(cls, schema, format_checker=_UNSET): + Validator = validator_for(cls.META_SCHEMA, default=cls) + if format_checker is _UNSET: + format_checker = Validator.FORMAT_CHECKER + validator = Validator( + schema=cls.META_SCHEMA, + format_checker=format_checker, + ) + for error in validator.iter_errors(schema): + raise exceptions.SchemaError.create_from(error) + + @property + def resolver(self): + warnings.warn( + ( + f"Accessing {self.__class__.__name__}.resolver is " + "deprecated as of v4.18.0, in favor of the " + "https://github.com/python-jsonschema/referencing " + "library, which provides more compliant referencing " + "behavior as well as more flexible APIs for " + "customization." + ), + DeprecationWarning, + stacklevel=2, + ) + if self._ref_resolver is None: + self._ref_resolver = _RefResolver.from_schema( + self.schema, + id_of=id_of, + ) + return self._ref_resolver + + def evolve(self, **changes): + schema = changes.setdefault("schema", self.schema) + NewValidator = validator_for(schema, default=self.__class__) + + for (attr_name, init_name) in evolve_fields: + if init_name not in changes: + changes[init_name] = getattr(self, attr_name) + + return NewValidator(**changes) + + def iter_errors(self, instance, _schema=None): + if _schema is not None: + warnings.warn( + ( + "Passing a schema to Validator.iter_errors " + "is deprecated and will be removed in a future " + "release. Call validator.evolve(schema=new_schema)." + "iter_errors(...) instead." + ), + DeprecationWarning, + stacklevel=2, + ) + validators = [ + (self.VALIDATORS[k], k, v) + for k, v in applicable_validators(_schema) + if k in self.VALIDATORS + ] + else: + _schema, validators = self.schema, self._validators + + if _schema is True: + return + elif _schema is False: + yield exceptions.ValidationError( + f"False schema does not allow {instance!r}", + validator=None, + validator_value=None, + instance=instance, + schema=_schema, + ) + return + + for validator, k, v in validators: + errors = validator(self, v, instance, _schema) or () + for error in errors: + # set details if not already set by the called fn + error._set( + validator=k, + validator_value=v, + instance=instance, + schema=_schema, + type_checker=self.TYPE_CHECKER, + ) + if k not in {"if", "$ref"}: + error.schema_path.appendleft(k) + yield error + + def descend( + self, + instance, + schema, + path=None, + schema_path=None, + resolver=None, + ): + if schema is True: + return + elif schema is False: + yield exceptions.ValidationError( + f"False schema does not allow {instance!r}", + validator=None, + validator_value=None, + instance=instance, + schema=schema, + ) + return + + if self._ref_resolver is not None: + evolved = self.evolve(schema=schema) + else: + if resolver is None: + resolver = self._resolver.in_subresource( + specification.create_resource(schema), + ) + evolved = self.evolve(schema=schema, _resolver=resolver) + + for k, v in applicable_validators(schema): + validator = evolved.VALIDATORS.get(k) + if validator is None: + continue + + errors = validator(evolved, v, instance, schema) or () + for error in errors: + # set details if not already set by the called fn + error._set( + validator=k, + validator_value=v, + instance=instance, + schema=schema, + type_checker=evolved.TYPE_CHECKER, + ) + if k not in {"if", "$ref"}: + error.schema_path.appendleft(k) + if path is not None: + error.path.appendleft(path) + if schema_path is not None: + error.schema_path.appendleft(schema_path) + yield error + + def validate(self, *args, **kwargs): + for error in self.iter_errors(*args, **kwargs): + raise error + + def is_type(self, instance, type): + try: + return self.TYPE_CHECKER.is_type(instance, type) + except exceptions.UndefinedTypeCheck: + exc = exceptions.UnknownType(type, instance, self.schema) + raise exc from None + + def _validate_reference(self, ref, instance): + if self._ref_resolver is None: + try: + resolved = self._resolver.lookup(ref) + except referencing.exceptions.Unresolvable as err: + raise exceptions._WrappedReferencingError(err) from err + + return self.descend( + instance, + resolved.contents, + resolver=resolved.resolver, + ) + else: + resolve = getattr(self._ref_resolver, "resolve", None) + if resolve is None: + with self._ref_resolver.resolving(ref) as resolved: + return self.descend(instance, resolved) + else: + scope, resolved = resolve(ref) + self._ref_resolver.push_scope(scope) + + try: + return list(self.descend(instance, resolved)) + finally: + self._ref_resolver.pop_scope() + + def is_valid(self, instance, _schema=None): + if _schema is not None: + warnings.warn( + ( + "Passing a schema to Validator.is_valid is deprecated " + "and will be removed in a future release. Call " + "validator.evolve(schema=new_schema).is_valid(...) " + "instead." + ), + DeprecationWarning, + stacklevel=2, + ) + self = self.evolve(schema=_schema) + + error = next(self.iter_errors(instance), None) + return error is None + + evolve_fields = [ + (field.name, field.alias) + for field in fields(Validator) + if field.init + ] + + if version is not None: + safe = version.title().replace(" ", "").replace("-", "") + Validator.__name__ = Validator.__qualname__ = f"{safe}Validator" + Validator = validates(version)(Validator) # type: ignore[misc] + + return Validator + + +def extend( + validator, + validators=(), + version=None, + type_checker=None, + format_checker=None, +): + """ + Create a new validator class by extending an existing one. + + Arguments: + + validator (jsonschema.protocols.Validator): + + an existing validator class + + validators (collections.abc.Mapping): + + a mapping of new validator callables to extend with, whose + structure is as in `create`. + + .. note:: + + Any validator callables with the same name as an + existing one will (silently) replace the old validator + callable entirely, effectively overriding any validation + done in the "parent" validator class. + + If you wish to instead extend the behavior of a parent's + validator callable, delegate and call it directly in + the new validator function by retrieving it using + ``OldValidator.VALIDATORS["validation_keyword_name"]``. + + version (str): + + a version for the new validator class + + type_checker (jsonschema.TypeChecker): + + a type checker, used when applying the :kw:`type` keyword. + + If unprovided, the type checker of the extended + `jsonschema.protocols.Validator` will be carried along. + + format_checker (jsonschema.FormatChecker): + + a format checker, used when applying the :kw:`format` keyword. + + If unprovided, the format checker of the extended + `jsonschema.protocols.Validator` will be carried along. + + Returns: + + a new `jsonschema.protocols.Validator` class extending the one + provided + + .. note:: Meta Schemas + + The new validator class will have its parent's meta schema. + + If you wish to change or extend the meta schema in the new + validator class, modify ``META_SCHEMA`` directly on the returned + class. Note that no implicit copying is done, so a copy should + likely be made before modifying it, in order to not affect the + old validator. + + """ + all_validators = dict(validator.VALIDATORS) + all_validators.update(validators) + + if type_checker is None: + type_checker = validator.TYPE_CHECKER + if format_checker is None: + format_checker = validator.FORMAT_CHECKER + return create( + meta_schema=validator.META_SCHEMA, + validators=all_validators, + version=version, + type_checker=type_checker, + format_checker=format_checker, + id_of=validator.ID_OF, + applicable_validators=validator._APPLICABLE_VALIDATORS, + ) + + +Draft3Validator = create( + meta_schema=SPECIFICATIONS.contents( + "http://json-schema.org/draft-03/schema#", + ), + validators={ + "$ref": _keywords.ref, + "additionalItems": _legacy_keywords.additionalItems, + "additionalProperties": _keywords.additionalProperties, + "dependencies": _legacy_keywords.dependencies_draft3, + "disallow": _legacy_keywords.disallow_draft3, + "divisibleBy": _keywords.multipleOf, + "enum": _keywords.enum, + "extends": _legacy_keywords.extends_draft3, + "format": _keywords.format, + "items": _legacy_keywords.items_draft3_draft4, + "maxItems": _keywords.maxItems, + "maxLength": _keywords.maxLength, + "maximum": _legacy_keywords.maximum_draft3_draft4, + "minItems": _keywords.minItems, + "minLength": _keywords.minLength, + "minimum": _legacy_keywords.minimum_draft3_draft4, + "pattern": _keywords.pattern, + "patternProperties": _keywords.patternProperties, + "properties": _legacy_keywords.properties_draft3, + "type": _legacy_keywords.type_draft3, + "uniqueItems": _keywords.uniqueItems, + }, + type_checker=_types.draft3_type_checker, + format_checker=_format.draft3_format_checker, + version="draft3", + id_of=referencing.jsonschema.DRAFT3.id_of, + applicable_validators=_legacy_keywords.ignore_ref_siblings, +) + +Draft4Validator = create( + meta_schema=SPECIFICATIONS.contents( + "http://json-schema.org/draft-04/schema#", + ), + validators={ + "$ref": _keywords.ref, + "additionalItems": _legacy_keywords.additionalItems, + "additionalProperties": _keywords.additionalProperties, + "allOf": _keywords.allOf, + "anyOf": _keywords.anyOf, + "dependencies": _legacy_keywords.dependencies_draft4_draft6_draft7, + "enum": _keywords.enum, + "format": _keywords.format, + "items": _legacy_keywords.items_draft3_draft4, + "maxItems": _keywords.maxItems, + "maxLength": _keywords.maxLength, + "maxProperties": _keywords.maxProperties, + "maximum": _legacy_keywords.maximum_draft3_draft4, + "minItems": _keywords.minItems, + "minLength": _keywords.minLength, + "minProperties": _keywords.minProperties, + "minimum": _legacy_keywords.minimum_draft3_draft4, + "multipleOf": _keywords.multipleOf, + "not": _keywords.not_, + "oneOf": _keywords.oneOf, + "pattern": _keywords.pattern, + "patternProperties": _keywords.patternProperties, + "properties": _keywords.properties, + "required": _keywords.required, + "type": _keywords.type, + "uniqueItems": _keywords.uniqueItems, + }, + type_checker=_types.draft4_type_checker, + format_checker=_format.draft4_format_checker, + version="draft4", + id_of=referencing.jsonschema.DRAFT4.id_of, + applicable_validators=_legacy_keywords.ignore_ref_siblings, +) + +Draft6Validator = create( + meta_schema=SPECIFICATIONS.contents( + "http://json-schema.org/draft-06/schema#", + ), + validators={ + "$ref": _keywords.ref, + "additionalItems": _legacy_keywords.additionalItems, + "additionalProperties": _keywords.additionalProperties, + "allOf": _keywords.allOf, + "anyOf": _keywords.anyOf, + "const": _keywords.const, + "contains": _legacy_keywords.contains_draft6_draft7, + "dependencies": _legacy_keywords.dependencies_draft4_draft6_draft7, + "enum": _keywords.enum, + "exclusiveMaximum": _keywords.exclusiveMaximum, + "exclusiveMinimum": _keywords.exclusiveMinimum, + "format": _keywords.format, + "items": _legacy_keywords.items_draft6_draft7_draft201909, + "maxItems": _keywords.maxItems, + "maxLength": _keywords.maxLength, + "maxProperties": _keywords.maxProperties, + "maximum": _keywords.maximum, + "minItems": _keywords.minItems, + "minLength": _keywords.minLength, + "minProperties": _keywords.minProperties, + "minimum": _keywords.minimum, + "multipleOf": _keywords.multipleOf, + "not": _keywords.not_, + "oneOf": _keywords.oneOf, + "pattern": _keywords.pattern, + "patternProperties": _keywords.patternProperties, + "properties": _keywords.properties, + "propertyNames": _keywords.propertyNames, + "required": _keywords.required, + "type": _keywords.type, + "uniqueItems": _keywords.uniqueItems, + }, + type_checker=_types.draft6_type_checker, + format_checker=_format.draft6_format_checker, + version="draft6", + id_of=referencing.jsonschema.DRAFT6.id_of, + applicable_validators=_legacy_keywords.ignore_ref_siblings, +) + +Draft7Validator = create( + meta_schema=SPECIFICATIONS.contents( + "http://json-schema.org/draft-07/schema#", + ), + validators={ + "$ref": _keywords.ref, + "additionalItems": _legacy_keywords.additionalItems, + "additionalProperties": _keywords.additionalProperties, + "allOf": _keywords.allOf, + "anyOf": _keywords.anyOf, + "const": _keywords.const, + "contains": _legacy_keywords.contains_draft6_draft7, + "dependencies": _legacy_keywords.dependencies_draft4_draft6_draft7, + "enum": _keywords.enum, + "exclusiveMaximum": _keywords.exclusiveMaximum, + "exclusiveMinimum": _keywords.exclusiveMinimum, + "format": _keywords.format, + "if": _keywords.if_, + "items": _legacy_keywords.items_draft6_draft7_draft201909, + "maxItems": _keywords.maxItems, + "maxLength": _keywords.maxLength, + "maxProperties": _keywords.maxProperties, + "maximum": _keywords.maximum, + "minItems": _keywords.minItems, + "minLength": _keywords.minLength, + "minProperties": _keywords.minProperties, + "minimum": _keywords.minimum, + "multipleOf": _keywords.multipleOf, + "not": _keywords.not_, + "oneOf": _keywords.oneOf, + "pattern": _keywords.pattern, + "patternProperties": _keywords.patternProperties, + "properties": _keywords.properties, + "propertyNames": _keywords.propertyNames, + "required": _keywords.required, + "type": _keywords.type, + "uniqueItems": _keywords.uniqueItems, + }, + type_checker=_types.draft7_type_checker, + format_checker=_format.draft7_format_checker, + version="draft7", + id_of=referencing.jsonschema.DRAFT7.id_of, + applicable_validators=_legacy_keywords.ignore_ref_siblings, +) + +Draft201909Validator = create( + meta_schema=SPECIFICATIONS.contents( + "https://json-schema.org/draft/2019-09/schema", + ), + validators={ + "$recursiveRef": _legacy_keywords.recursiveRef, + "$ref": _keywords.ref, + "additionalItems": _legacy_keywords.additionalItems, + "additionalProperties": _keywords.additionalProperties, + "allOf": _keywords.allOf, + "anyOf": _keywords.anyOf, + "const": _keywords.const, + "contains": _keywords.contains, + "dependentRequired": _keywords.dependentRequired, + "dependentSchemas": _keywords.dependentSchemas, + "enum": _keywords.enum, + "exclusiveMaximum": _keywords.exclusiveMaximum, + "exclusiveMinimum": _keywords.exclusiveMinimum, + "format": _keywords.format, + "if": _keywords.if_, + "items": _legacy_keywords.items_draft6_draft7_draft201909, + "maxItems": _keywords.maxItems, + "maxLength": _keywords.maxLength, + "maxProperties": _keywords.maxProperties, + "maximum": _keywords.maximum, + "minItems": _keywords.minItems, + "minLength": _keywords.minLength, + "minProperties": _keywords.minProperties, + "minimum": _keywords.minimum, + "multipleOf": _keywords.multipleOf, + "not": _keywords.not_, + "oneOf": _keywords.oneOf, + "pattern": _keywords.pattern, + "patternProperties": _keywords.patternProperties, + "properties": _keywords.properties, + "propertyNames": _keywords.propertyNames, + "required": _keywords.required, + "type": _keywords.type, + "unevaluatedItems": _legacy_keywords.unevaluatedItems_draft2019, + "unevaluatedProperties": ( + _legacy_keywords.unevaluatedProperties_draft2019 + ), + "uniqueItems": _keywords.uniqueItems, + }, + type_checker=_types.draft201909_type_checker, + format_checker=_format.draft201909_format_checker, + version="draft2019-09", +) + +Draft202012Validator = create( + meta_schema=SPECIFICATIONS.contents( + "https://json-schema.org/draft/2020-12/schema", + ), + validators={ + "$dynamicRef": _keywords.dynamicRef, + "$ref": _keywords.ref, + "additionalProperties": _keywords.additionalProperties, + "allOf": _keywords.allOf, + "anyOf": _keywords.anyOf, + "const": _keywords.const, + "contains": _keywords.contains, + "dependentRequired": _keywords.dependentRequired, + "dependentSchemas": _keywords.dependentSchemas, + "enum": _keywords.enum, + "exclusiveMaximum": _keywords.exclusiveMaximum, + "exclusiveMinimum": _keywords.exclusiveMinimum, + "format": _keywords.format, + "if": _keywords.if_, + "items": _keywords.items, + "maxItems": _keywords.maxItems, + "maxLength": _keywords.maxLength, + "maxProperties": _keywords.maxProperties, + "maximum": _keywords.maximum, + "minItems": _keywords.minItems, + "minLength": _keywords.minLength, + "minProperties": _keywords.minProperties, + "minimum": _keywords.minimum, + "multipleOf": _keywords.multipleOf, + "not": _keywords.not_, + "oneOf": _keywords.oneOf, + "pattern": _keywords.pattern, + "patternProperties": _keywords.patternProperties, + "prefixItems": _keywords.prefixItems, + "properties": _keywords.properties, + "propertyNames": _keywords.propertyNames, + "required": _keywords.required, + "type": _keywords.type, + "unevaluatedItems": _keywords.unevaluatedItems, + "unevaluatedProperties": _keywords.unevaluatedProperties, + "uniqueItems": _keywords.uniqueItems, + }, + type_checker=_types.draft202012_type_checker, + format_checker=_format.draft202012_format_checker, + version="draft2020-12", +) + +_LATEST_VERSION = Draft202012Validator + + +class _RefResolver: + """ + Resolve JSON References. + + Arguments: + + base_uri (str): + + The URI of the referring document + + referrer: + + The actual referring document + + store (dict): + + A mapping from URIs to documents to cache + + cache_remote (bool): + + Whether remote refs should be cached after first resolution + + handlers (dict): + + A mapping from URI schemes to functions that should be used + to retrieve them + + urljoin_cache (:func:`functools.lru_cache`): + + A cache that will be used for caching the results of joining + the resolution scope to subscopes. + + remote_cache (:func:`functools.lru_cache`): + + A cache that will be used for caching the results of + resolved remote URLs. + + Attributes: + + cache_remote (bool): + + Whether remote refs should be cached after first resolution + + .. deprecated:: v4.18.0 + + ``RefResolver`` has been deprecated in favor of `referencing`. + + """ + + _DEPRECATION_MESSAGE = ( + "jsonschema.RefResolver is deprecated as of v4.18.0, in favor of the " + "https://github.com/python-jsonschema/referencing library, which " + "provides more compliant referencing behavior as well as more " + "flexible APIs for customization. A future release will remove " + "RefResolver. Please file a feature request (on referencing) if you " + "are missing an API for the kind of customization you need." + ) + + def __init__( + self, + base_uri, + referrer, + store=HashTrieMap(), + cache_remote=True, + handlers=(), + urljoin_cache=None, + remote_cache=None, + ): + if urljoin_cache is None: + urljoin_cache = lru_cache(1024)(urljoin) + if remote_cache is None: + remote_cache = lru_cache(1024)(self.resolve_from_url) + + self.referrer = referrer + self.cache_remote = cache_remote + self.handlers = dict(handlers) + + self._scopes_stack = [base_uri] + + self.store = _utils.URIDict( + (uri, each.contents) for uri, each in SPECIFICATIONS.items() + ) + self.store.update( + (id, each.META_SCHEMA) for id, each in _META_SCHEMAS.items() + ) + self.store.update(store) + self.store.update( + (schema["$id"], schema) + for schema in store.values() + if isinstance(schema, Mapping) and "$id" in schema + ) + self.store[base_uri] = referrer + + self._urljoin_cache = urljoin_cache + self._remote_cache = remote_cache + + @classmethod + def from_schema( # noqa: D417 + cls, + schema, + id_of=referencing.jsonschema.DRAFT202012.id_of, + *args, + **kwargs, + ): + """ + Construct a resolver from a JSON schema object. + + Arguments: + + schema: + + the referring schema + + Returns: + + `_RefResolver` + + """ + return cls(base_uri=id_of(schema) or "", referrer=schema, *args, **kwargs) # noqa: B026, E501 + + def push_scope(self, scope): + """ + Enter a given sub-scope. + + Treats further dereferences as being performed underneath the + given scope. + """ + self._scopes_stack.append( + self._urljoin_cache(self.resolution_scope, scope), + ) + + def pop_scope(self): + """ + Exit the most recent entered scope. + + Treats further dereferences as being performed underneath the + original scope. + + Don't call this method more times than `push_scope` has been + called. + """ + try: + self._scopes_stack.pop() + except IndexError: + raise exceptions._RefResolutionError( + "Failed to pop the scope from an empty stack. " + "`pop_scope()` should only be called once for every " + "`push_scope()`", + ) from None + + @property + def resolution_scope(self): + """ + Retrieve the current resolution scope. + """ + return self._scopes_stack[-1] + + @property + def base_uri(self): + """ + Retrieve the current base URI, not including any fragment. + """ + uri, _ = urldefrag(self.resolution_scope) + return uri + + @contextlib.contextmanager + def in_scope(self, scope): + """ + Temporarily enter the given scope for the duration of the context. + + .. deprecated:: v4.0.0 + """ + warnings.warn( + "jsonschema.RefResolver.in_scope is deprecated and will be " + "removed in a future release.", + DeprecationWarning, + stacklevel=3, + ) + self.push_scope(scope) + try: + yield + finally: + self.pop_scope() + + @contextlib.contextmanager + def resolving(self, ref): + """ + Resolve the given ``ref`` and enter its resolution scope. + + Exits the scope on exit of this context manager. + + Arguments: + + ref (str): + + The reference to resolve + + """ + url, resolved = self.resolve(ref) + self.push_scope(url) + try: + yield resolved + finally: + self.pop_scope() + + def _find_in_referrer(self, key): + return self._get_subschemas_cache()[key] + + @lru_cache # noqa: B019 + def _get_subschemas_cache(self): + cache = {key: [] for key in _SUBSCHEMAS_KEYWORDS} + for keyword, subschema in _search_schema( + self.referrer, _match_subschema_keywords, + ): + cache[keyword].append(subschema) + return cache + + @lru_cache # noqa: B019 + def _find_in_subschemas(self, url): + subschemas = self._get_subschemas_cache()["$id"] + if not subschemas: + return None + uri, fragment = urldefrag(url) + for subschema in subschemas: + id = subschema["$id"] + if not isinstance(id, str): + continue + target_uri = self._urljoin_cache(self.resolution_scope, id) + if target_uri.rstrip("/") == uri.rstrip("/"): + if fragment: + subschema = self.resolve_fragment(subschema, fragment) + self.store[url] = subschema + return url, subschema + return None + + def resolve(self, ref): + """ + Resolve the given reference. + """ + url = self._urljoin_cache(self.resolution_scope, ref).rstrip("/") + + match = self._find_in_subschemas(url) + if match is not None: + return match + + return url, self._remote_cache(url) + + def resolve_from_url(self, url): + """ + Resolve the given URL. + """ + url, fragment = urldefrag(url) + if not url: + url = self.base_uri + + try: + document = self.store[url] + except KeyError: + try: + document = self.resolve_remote(url) + except Exception as exc: + raise exceptions._RefResolutionError(exc) from exc + + return self.resolve_fragment(document, fragment) + + def resolve_fragment(self, document, fragment): + """ + Resolve a ``fragment`` within the referenced ``document``. + + Arguments: + + document: + + The referent document + + fragment (str): + + a URI fragment to resolve within it + + """ + fragment = fragment.lstrip("/") + + if not fragment: + return document + + if document is self.referrer: + find = self._find_in_referrer + else: + + def find(key): + yield from _search_schema(document, _match_keyword(key)) + + for keyword in ["$anchor", "$dynamicAnchor"]: + for subschema in find(keyword): + if fragment == subschema[keyword]: + return subschema + for keyword in ["id", "$id"]: + for subschema in find(keyword): + if "#" + fragment == subschema[keyword]: + return subschema + + # Resolve via path + parts = unquote(fragment).split("/") if fragment else [] + for part in parts: + part = part.replace("~1", "/").replace("~0", "~") + + if isinstance(document, Sequence): + try: # noqa: SIM105 + part = int(part) + except ValueError: + pass + try: + document = document[part] + except (TypeError, LookupError) as err: + raise exceptions._RefResolutionError( + f"Unresolvable JSON pointer: {fragment!r}", + ) from err + + return document + + def resolve_remote(self, uri): + """ + Resolve a remote ``uri``. + + If called directly, does not check the store first, but after + retrieving the document at the specified URI it will be saved in + the store if :attr:`cache_remote` is True. + + .. note:: + + If the requests_ library is present, ``jsonschema`` will use it to + request the remote ``uri``, so that the correct encoding is + detected and used. + + If it isn't, or if the scheme of the ``uri`` is not ``http`` or + ``https``, UTF-8 is assumed. + + Arguments: + + uri (str): + + The URI to resolve + + Returns: + + The retrieved document + + .. _requests: https://pypi.org/project/requests/ + + """ + try: + import requests + except ImportError: + requests = None + + scheme = urlsplit(uri).scheme + + if scheme in self.handlers: + result = self.handlers[scheme](uri) + elif scheme in ["http", "https"] and requests: + # Requests has support for detecting the correct encoding of + # json over http + result = requests.get(uri).json() + else: + # Otherwise, pass off to urllib and assume utf-8 + with urlopen(uri) as url: # noqa: S310 + result = json.loads(url.read().decode("utf-8")) + + if self.cache_remote: + self.store[uri] = result + return result + + +_SUBSCHEMAS_KEYWORDS = ("$id", "id", "$anchor", "$dynamicAnchor") + + +def _match_keyword(keyword): + + def matcher(value): + if keyword in value: + yield value + + return matcher + + +def _match_subschema_keywords(value): + for keyword in _SUBSCHEMAS_KEYWORDS: + if keyword in value: + yield keyword, value + + +def _search_schema(schema, matcher): + """Breadth-first search routine.""" + values = deque([schema]) + while values: + value = values.pop() + if not isinstance(value, dict): + continue + yield from matcher(value) + values.extendleft(value.values()) + + +def validate(instance, schema, cls=None, *args, **kwargs): # noqa: D417 + """ + Validate an instance under the given schema. + + >>> validate([2, 3, 4], {"maxItems": 2}) + Traceback (most recent call last): + ... + ValidationError: [2, 3, 4] is too long + + :func:`~jsonschema.validators.validate` will first verify that the + provided schema is itself valid, since not doing so can lead to less + obvious error messages and fail in less obvious or consistent ways. + + If you know you have a valid schema already, especially + if you intend to validate multiple instances with + the same schema, you likely would prefer using the + `jsonschema.protocols.Validator.validate` method directly on a + specific validator (e.g. ``Draft202012Validator.validate``). + + + Arguments: + + instance: + + The instance to validate + + schema: + + The schema to validate with + + cls (jsonschema.protocols.Validator): + + The class that will be used to validate the instance. + + If the ``cls`` argument is not provided, two things will happen + in accordance with the specification. First, if the schema has a + :kw:`$schema` keyword containing a known meta-schema [#]_ then the + proper validator will be used. The specification recommends that + all schemas contain :kw:`$schema` properties for this reason. If no + :kw:`$schema` property is found, the default validator class is the + latest released draft. + + Any other provided positional and keyword arguments will be passed + on when instantiating the ``cls``. + + Raises: + + `jsonschema.exceptions.ValidationError`: + + if the instance is invalid + + `jsonschema.exceptions.SchemaError`: + + if the schema itself is invalid + + .. rubric:: Footnotes + .. [#] known by a validator registered with + `jsonschema.validators.validates` + + """ + if cls is None: + cls = validator_for(schema) + + cls.check_schema(schema) + validator = cls(schema, *args, **kwargs) + error = exceptions.best_match(validator.iter_errors(instance)) + if error is not None: + raise error + + +def validator_for( + schema, + default: Validator | _utils.Unset = _UNSET, +) -> type[Validator]: + """ + Retrieve the validator class appropriate for validating the given schema. + + Uses the :kw:`$schema` keyword that should be present in the given + schema to look up the appropriate validator class. + + Arguments: + + schema (collections.abc.Mapping or bool): + + the schema to look at + + default: + + the default to return if the appropriate validator class + cannot be determined. + + If unprovided, the default is to return the latest supported + draft. + + Examples: + + The :kw:`$schema` JSON Schema keyword will control which validator + class is returned: + + >>> schema = { + ... "$schema": "https://json-schema.org/draft/2020-12/schema", + ... "type": "integer", + ... } + >>> jsonschema.validators.validator_for(schema) + + + + Here, a draft 7 schema instead will return the draft 7 validator: + + >>> schema = { + ... "$schema": "http://json-schema.org/draft-07/schema#", + ... "type": "integer", + ... } + >>> jsonschema.validators.validator_for(schema) + + + + Schemas with no ``$schema`` keyword will fallback to the default + argument: + + >>> schema = {"type": "integer"} + >>> jsonschema.validators.validator_for( + ... schema, default=Draft7Validator, + ... ) + + + or if none is provided, to the latest version supported. + Always including the keyword when authoring schemas is highly + recommended. + + """ + DefaultValidator = _LATEST_VERSION if default is _UNSET else default + + if schema is True or schema is False or "$schema" not in schema: + return DefaultValidator + if schema["$schema"] not in _META_SCHEMAS and default is _UNSET: + warn( + ( + "The metaschema specified by $schema was not found. " + "Using the latest draft to validate, but this will raise " + "an error in the future." + ), + DeprecationWarning, + stacklevel=2, + ) + return _META_SCHEMAS.get(schema["$schema"], DefaultValidator) diff --git a/vllm/lib/python3.10/site-packages/pydantic-2.10.3.dist-info/INSTALLER b/vllm/lib/python3.10/site-packages/pydantic-2.10.3.dist-info/INSTALLER new file mode 100644 index 0000000000000000000000000000000000000000..a1b589e38a32041e49332e5e81c2d363dc418d68 --- /dev/null +++ b/vllm/lib/python3.10/site-packages/pydantic-2.10.3.dist-info/INSTALLER @@ -0,0 +1 @@ +pip diff --git a/vllm/lib/python3.10/site-packages/pydantic-2.10.3.dist-info/METADATA b/vllm/lib/python3.10/site-packages/pydantic-2.10.3.dist-info/METADATA new file mode 100644 index 0000000000000000000000000000000000000000..fd79a4e4968a128a5bff849d9bd8d43078061ca6 --- /dev/null +++ b/vllm/lib/python3.10/site-packages/pydantic-2.10.3.dist-info/METADATA @@ -0,0 +1,1661 @@ +Metadata-Version: 2.3 +Name: pydantic +Version: 2.10.3 +Summary: Data validation using Python type hints +Project-URL: Homepage, https://github.com/pydantic/pydantic +Project-URL: Documentation, https://docs.pydantic.dev +Project-URL: Funding, https://github.com/sponsors/samuelcolvin +Project-URL: Source, https://github.com/pydantic/pydantic +Project-URL: Changelog, https://docs.pydantic.dev/latest/changelog/ +Author-email: Samuel Colvin , Eric Jolibois , Hasan Ramezani , Adrian Garcia Badaracco <1755071+adriangb@users.noreply.github.com>, Terrence Dorsey , David Montague , Serge Matveenko , Marcelo Trylesinski , Sydney Runkle , David Hewitt , Alex Hall , Victorien Plot +License: MIT +Classifier: Development Status :: 5 - Production/Stable +Classifier: Framework :: Hypothesis +Classifier: Framework :: Pydantic +Classifier: Intended Audience :: Developers +Classifier: Intended Audience :: Information Technology +Classifier: License :: OSI Approved :: MIT License +Classifier: Operating System :: OS Independent +Classifier: Programming Language :: Python +Classifier: Programming Language :: Python :: 3 +Classifier: Programming Language :: Python :: 3 :: Only +Classifier: Programming Language :: Python :: 3.8 +Classifier: Programming Language :: Python :: 3.9 +Classifier: Programming Language :: Python :: 3.10 +Classifier: Programming Language :: Python :: 3.11 +Classifier: Programming Language :: Python :: 3.12 +Classifier: Programming Language :: Python :: 3.13 +Classifier: Programming Language :: Python :: Implementation :: CPython +Classifier: Programming Language :: Python :: Implementation :: PyPy +Classifier: Topic :: Internet +Classifier: Topic :: Software Development :: Libraries :: Python Modules +Requires-Python: >=3.8 +Requires-Dist: annotated-types>=0.6.0 +Requires-Dist: pydantic-core==2.27.1 +Requires-Dist: typing-extensions>=4.12.2 +Provides-Extra: email +Requires-Dist: email-validator>=2.0.0; extra == 'email' +Provides-Extra: timezone +Requires-Dist: tzdata; (python_version >= '3.9' and platform_system == 'Windows') and extra == 'timezone' +Description-Content-Type: text/markdown + +# Pydantic +[![CI](https://img.shields.io/github/actions/workflow/status/pydantic/pydantic/ci.yml?branch=main&logo=github&label=CI)](https://github.com/pydantic/pydantic/actions?query=event%3Apush+branch%3Amain+workflow%3ACI) +[![Coverage](https://coverage-badge.samuelcolvin.workers.dev/pydantic/pydantic.svg)](https://coverage-badge.samuelcolvin.workers.dev/redirect/pydantic/pydantic) +[![pypi](https://img.shields.io/pypi/v/pydantic.svg)](https://pypi.python.org/pypi/pydantic) +[![CondaForge](https://img.shields.io/conda/v/conda-forge/pydantic.svg)](https://anaconda.org/conda-forge/pydantic) +[![downloads](https://static.pepy.tech/badge/pydantic/month)](https://pepy.tech/project/pydantic) +[![versions](https://img.shields.io/pypi/pyversions/pydantic.svg)](https://github.com/pydantic/pydantic) +[![license](https://img.shields.io/github/license/pydantic/pydantic.svg)](https://github.com/pydantic/pydantic/blob/main/LICENSE) +[![Pydantic v2](https://img.shields.io/endpoint?url=https://raw.githubusercontent.com/pydantic/pydantic/main/docs/badge/v2.json)](https://docs.pydantic.dev/latest/contributing/#badges) + +Data validation using Python type hints. + +Fast and extensible, Pydantic plays nicely with your linters/IDE/brain. +Define how data should be in pure, canonical Python 3.8+; validate it with Pydantic. + +## Pydantic Logfire :fire: + +We've recently launched Pydantic Logfire to help you monitor your applications. +[Learn more](https://pydantic.dev/articles/logfire-announcement) + +## Pydantic V1.10 vs. V2 + +Pydantic V2 is a ground-up rewrite that offers many new features, performance improvements, and some breaking changes compared to Pydantic V1. + +If you're using Pydantic V1 you may want to look at the +[pydantic V1.10 Documentation](https://docs.pydantic.dev/) or, +[`1.10.X-fixes` git branch](https://github.com/pydantic/pydantic/tree/1.10.X-fixes). Pydantic V2 also ships with the latest version of Pydantic V1 built in so that you can incrementally upgrade your code base and projects: `from pydantic import v1 as pydantic_v1`. + +## Help + +See [documentation](https://docs.pydantic.dev/) for more details. + +## Installation + +Install using `pip install -U pydantic` or `conda install pydantic -c conda-forge`. +For more installation options to make Pydantic even faster, +see the [Install](https://docs.pydantic.dev/install/) section in the documentation. + +## A Simple Example + +```python +from datetime import datetime +from typing import List, Optional +from pydantic import BaseModel + +class User(BaseModel): + id: int + name: str = 'John Doe' + signup_ts: Optional[datetime] = None + friends: List[int] = [] + +external_data = {'id': '123', 'signup_ts': '2017-06-01 12:22', 'friends': [1, '2', b'3']} +user = User(**external_data) +print(user) +#> User id=123 name='John Doe' signup_ts=datetime.datetime(2017, 6, 1, 12, 22) friends=[1, 2, 3] +print(user.id) +#> 123 +``` + +## Contributing + +For guidance on setting up a development environment and how to make a +contribution to Pydantic, see +[Contributing to Pydantic](https://docs.pydantic.dev/contributing/). + +## Reporting a Security Vulnerability + +See our [security policy](https://github.com/pydantic/pydantic/security/policy). + +## Changelog + +## v2.10.3 (2024-12-03) + +[GitHub release](https://github.com/pydantic/pydantic/releases/tag/v2.10.3) + +### What's Changed + +#### Fixes + +* Set fields when `defer_build` is set on Pydantic dataclasses by [@Viicos](https://github.com/Viicos) in [#10984](https://github.com/pydantic/pydantic/pull/10984) +* Do not resolve the JSON Schema reference for `dict` core schema keys by [@Viicos](https://github.com/Viicos) in [#10989](https://github.com/pydantic/pydantic/pull/10989) +* Use the globals of the function when evaluating the return type for `PlainSerializer` and `WrapSerializer` functions by [@Viicos](https://github.com/Viicos) in [#11008](https://github.com/pydantic/pydantic/pull/11008) +* Fix host required enforcement for urls to be compatible with v2.9 behavior by [@sydney-runkle](https://github.com/sydney-runkle) in [#11027](https://github.com/pydantic/pydantic/pull/11027) +* Add a `default_factory_takes_validated_data` property to `FieldInfo` by [@Viicos](https://github.com/Viicos) in [#11034](https://github.com/pydantic/pydantic/pull/11034) +* Fix url json schema in `serialization` mode by [@sydney-runkle](https://github.com/sydney-runkle) in [#11035](https://github.com/pydantic/pydantic/pull/11035) + +## v2.10.2 (2024-11-25) + +[GitHub release](https://github.com/pydantic/pydantic/releases/tag/v2.10.2) + +### What's Changed + +#### Fixes + +* Only evaluate FieldInfo annotations if required during schema building by [@Viicos](https://github.com/Viicos) in [#10769](https://github.com/pydantic/pydantic/pull/10769) +* Do not evaluate annotations for private fields by [@Viicos](https://github.com/Viicos) in [#10962](https://github.com/pydantic/pydantic/pull/10962) +* Support serialization as any for `Secret` types and `Url` types by [@sydney-runkle](https://github.com/sydney-runkle) in [#10947](https://github.com/pydantic/pydantic/pull/10947) +* Fix type hint of `Field.default` to be compatible with Python 3.8 and 3.9 by [@Viicos](https://github.com/Viicos) in [#10972](https://github.com/pydantic/pydantic/pull/10972) +* Add hashing support for URL types by [@sydney-runkle](https://github.com/sydney-runkle) in [#10975](https://github.com/pydantic/pydantic/pull/10975) +* Hide `BaseModel.__replace__` definition from type checkers by [@Viicos](https://github.com/Viicos) in [10979](https://github.com/pydantic/pydantic/pull/10979) + +## v2.10.1 (2024-11-21) + +[GitHub release](https://github.com/pydantic/pydantic/releases/tag/v2.10.1) + +### What's Changed + +#### Packaging + +* Bump `pydantic-core` version to `v2.27.1` by [@sydney-runkle](https://github.com/sydney-runkle) in [#10938](https://github.com/pydantic/pydantic/pull/10938) + +#### Fixes + +* Use the correct frame when instantiating a parametrized `TypeAdapter` by [@Viicos](https://github.com/Viicos) in [#10893](https://github.com/pydantic/pydantic/pull/10893) +* Relax check for validated data in `default_factory` utils by [@sydney-runkle](https://github.com/sydney-runkle) in [#10909](https://github.com/pydantic/pydantic/pull/10909) +* Fix type checking issue with `model_fields` and `model_computed_fields` by [@sydney-runkle](https://github.com/sydney-runkle) in [#10911](https://github.com/pydantic/pydantic/pull/10911) +* Use the parent configuration during schema generation for stdlib `dataclass`es by [@sydney-runkle](https://github.com/sydney-runkle) in [#10928](https://github.com/pydantic/pydantic/pull/10928) +* Use the `globals` of the function when evaluating the return type of serializers and `computed_field`s by [@Viicos](https://github.com/Viicos) in [#10929](https://github.com/pydantic/pydantic/pull/10929) +* Fix URL constraint application by [@sydney-runkle](https://github.com/sydney-runkle) in [#10922](https://github.com/pydantic/pydantic/pull/10922) +* Fix URL equality with different validation methods by [@sydney-runkle](https://github.com/sydney-runkle) in [#10934](https://github.com/pydantic/pydantic/pull/10934) +* Fix JSON schema title when specified as `''` by [@sydney-runkle](https://github.com/sydney-runkle) in [#10936](https://github.com/pydantic/pydantic/pull/10936) +* Fix `python` mode serialization for `complex` inference by [@sydney-runkle](https://github.com/sydney-runkle) in [pydantic-core#1549](https://github.com/pydantic/pydantic-core/pull/1549) + +## v2.10.0 (2024-11-20) + +The code released in v2.10.0 is practically identical to that of v2.10.0b2. + +[GitHub release](https://github.com/pydantic/pydantic/releases/tag/v2.10.0) + +See the [v2.10 release blog post](https://pydantic.dev/articles/pydantic-v2-10-release) for the highlights! + +### What's Changed + +#### Packaging + +* Bump `pydantic-core` to `v2.27.0` by [@sydney-runkle](https://github.com/sydney-runkle) in [#10825](https://github.com/pydantic/pydantic/pull/10825) +* Replaced pdm with uv by [@frfahim](https://github.com/frfahim) in [#10727](https://github.com/pydantic/pydantic/pull/10727) + +#### New Features + +* Support `fractions.Fraction` by [@sydney-runkle](https://github.com/sydney-runkle) in [#10318](https://github.com/pydantic/pydantic/pull/10318) +* Support `Hashable` for json validation by [@sydney-runkle](https://github.com/sydney-runkle) in [#10324](https://github.com/pydantic/pydantic/pull/10324) +* Add a `SocketPath` type for `linux` systems by [@theunkn0wn1](https://github.com/theunkn0wn1) in [#10378](https://github.com/pydantic/pydantic/pull/10378) +* Allow arbitrary refs in JSON schema `examples` by [@sydney-runkle](https://github.com/sydney-runkle) in [#10417](https://github.com/pydantic/pydantic/pull/10417) +* Support `defer_build` for Pydantic dataclasses by [@Viicos](https://github.com/Viicos) in [#10313](https://github.com/pydantic/pydantic/pull/10313) +* Adding v1 / v2 incompatibility warning for nested v1 model by [@sydney-runkle](https://github.com/sydney-runkle) in [#10431](https://github.com/pydantic/pydantic/pull/10431) +* Add support for unpacked `TypedDict` to type hint variadic keyword arguments with `@validate_call` by [@Viicos](https://github.com/Viicos) in [#10416](https://github.com/pydantic/pydantic/pull/10416) +* Support compiled patterns in `protected_namespaces` by [@sydney-runkle](https://github.com/sydney-runkle) in [#10522](https://github.com/pydantic/pydantic/pull/10522) +* Add support for `propertyNames` in JSON schema by [@FlorianSW](https://github.com/FlorianSW) in [#10478](https://github.com/pydantic/pydantic/pull/10478) +* Adding `__replace__` protocol for Python 3.13+ support by [@sydney-runkle](https://github.com/sydney-runkle) in [#10596](https://github.com/pydantic/pydantic/pull/10596) +* Expose public `sort` method for JSON schema generation by [@sydney-runkle](https://github.com/sydney-runkle) in [#10595](https://github.com/pydantic/pydantic/pull/10595) +* Add runtime validation of `@validate_call` callable argument by [@kc0506](https://github.com/kc0506) in [#10627](https://github.com/pydantic/pydantic/pull/10627) +* Add `experimental_allow_partial` support by [@samuelcolvin](https://github.com/samuelcolvin) in [#10748](https://github.com/pydantic/pydantic/pull/10748) +* Support default factories taking validated data as an argument by [@Viicos](https://github.com/Viicos) in [#10678](https://github.com/pydantic/pydantic/pull/10678) +* Allow subclassing `ValidationError` and `PydanticCustomError` by [@Youssefares](https://github.com/Youssefares) in [pydantic/pydantic-core#1413](https://github.com/pydantic/pydantic-core/pull/1413) +* Add `trailing-strings` support to `experimental_allow_partial` by [@sydney-runkle](https://github.com/sydney-runkle) in [#10825](https://github.com/pydantic/pydantic/pull/10825) +* Add `rebuild()` method for `TypeAdapter` and simplify `defer_build` patterns by [@sydney-runkle](https://github.com/sydney-runkle) in [#10537](https://github.com/pydantic/pydantic/pull/10537) +* Improve `TypeAdapter` instance repr by [@sydney-runkle](https://github.com/sydney-runkle) in [#10872](https://github.com/pydantic/pydantic/pull/10872) + +#### Changes + +* Don't allow customization of `SchemaGenerator` until interface is more stable by [@sydney-runkle](https://github.com/sydney-runkle) in [#10303](https://github.com/pydantic/pydantic/pull/10303) +* Cleanly `defer_build` on `TypeAdapters`, removing experimental flag by [@sydney-runkle](https://github.com/sydney-runkle) in [#10329](https://github.com/pydantic/pydantic/pull/10329) +* Fix `mro` of generic subclass by [@kc0506](https://github.com/kc0506) in [#10100](https://github.com/pydantic/pydantic/pull/10100) +* Strip whitespaces on JSON Schema title generation by [@sydney-runkle](https://github.com/sydney-runkle) in [#10404](https://github.com/pydantic/pydantic/pull/10404) +* Use `b64decode` and `b64encode` for `Base64Bytes` type by [@sydney-runkle](https://github.com/sydney-runkle) in [#10486](https://github.com/pydantic/pydantic/pull/10486) +* Relax protected namespace config default by [@sydney-runkle](https://github.com/sydney-runkle) in [#10441](https://github.com/pydantic/pydantic/pull/10441) +* Revalidate parametrized generics if instance's origin is subclass of OG class by [@sydney-runkle](https://github.com/sydney-runkle) in [#10666](https://github.com/pydantic/pydantic/pull/10666) +* Warn if configuration is specified on the `@dataclass` decorator and with the `__pydantic_config__` attribute by [@sydney-runkle](https://github.com/sydney-runkle) in [#10406](https://github.com/pydantic/pydantic/pull/10406) +* Recommend against using `Ellipsis` (...) with `Field` by [@Viicos](https://github.com/Viicos) in [#10661](https://github.com/pydantic/pydantic/pull/10661) +* Migrate to subclassing instead of annotated approach for pydantic url types by [@sydney-runkle](https://github.com/sydney-runkle) in [#10662](https://github.com/pydantic/pydantic/pull/10662) +* Change JSON schema generation of `Literal`s and `Enums` by [@Viicos](https://github.com/Viicos) in [#10692](https://github.com/pydantic/pydantic/pull/10692) +* Simplify unions involving `Any` or `Never` when replacing type variables by [@Viicos](https://github.com/Viicos) in [#10338](https://github.com/pydantic/pydantic/pull/10338) +* Do not require padding when decoding `base64` bytes by [@bschoenmaeckers](https://github.com/bschoenmaeckers) in [pydantic/pydantic-core#1448](https://github.com/pydantic/pydantic-core/pull/1448) +* Support dates all the way to 1BC by [@changhc](https://github.com/changhc) in [pydantic/speedate#77](https://github.com/pydantic/speedate/pull/77) + +#### Performance + +* Schema cleaning: skip unnecessary copies during schema walking by [@Viicos](https://github.com/Viicos) in [#10286](https://github.com/pydantic/pydantic/pull/10286) +* Refactor namespace logic for annotations evaluation by [@Viicos](https://github.com/Viicos) in [#10530](https://github.com/pydantic/pydantic/pull/10530) +* Improve email regexp on edge cases by [@AlekseyLobanov](https://github.com/AlekseyLobanov) in [#10601](https://github.com/pydantic/pydantic/pull/10601) +* `CoreMetadata` refactor with an emphasis on documentation, schema build time performance, and reducing complexity by [@sydney-runkle](https://github.com/sydney-runkle) in [#10675](https://github.com/pydantic/pydantic/pull/10675) + +#### Fixes + +* Remove guarding check on `computed_field` with `field_serializer` by [@nix010](https://github.com/nix010) in [#10390](https://github.com/pydantic/pydantic/pull/10390) +* Fix `Predicate` issue in `v2.9.0` by [@sydney-runkle](https://github.com/sydney-runkle) in [#10321](https://github.com/pydantic/pydantic/pull/10321) +* Fixing `annotated-types` bound by [@sydney-runkle](https://github.com/sydney-runkle) in [#10327](https://github.com/pydantic/pydantic/pull/10327) +* Turn `tzdata` install requirement into optional `timezone` dependency by [@jakob-keller](https://github.com/jakob-keller) in [#10331](https://github.com/pydantic/pydantic/pull/10331) +* Use correct types namespace when building `namedtuple` core schemas by [@Viicos](https://github.com/Viicos) in [#10337](https://github.com/pydantic/pydantic/pull/10337) +* Fix evaluation of stringified annotations during namespace inspection by [@Viicos](https://github.com/Viicos) in [#10347](https://github.com/pydantic/pydantic/pull/10347) +* Fix `IncEx` type alias definition by [@Viicos](https://github.com/Viicos) in [#10339](https://github.com/pydantic/pydantic/pull/10339) +* Do not error when trying to evaluate annotations of private attributes by [@Viicos](https://github.com/Viicos) in [#10358](https://github.com/pydantic/pydantic/pull/10358) +* Fix nested type statement by [@kc0506](https://github.com/kc0506) in [#10369](https://github.com/pydantic/pydantic/pull/10369) +* Improve typing of `ModelMetaclass.mro` by [@Viicos](https://github.com/Viicos) in [#10372](https://github.com/pydantic/pydantic/pull/10372) +* Fix class access of deprecated `computed_field`s by [@Viicos](https://github.com/Viicos) in [#10391](https://github.com/pydantic/pydantic/pull/10391) +* Make sure `inspect.iscoroutinefunction` works on coroutines decorated with `@validate_call` by [@MovisLi](https://github.com/MovisLi) in [#10374](https://github.com/pydantic/pydantic/pull/10374) +* Fix `NameError` when using `validate_call` with PEP 695 on a class by [@kc0506](https://github.com/kc0506) in [#10380](https://github.com/pydantic/pydantic/pull/10380) +* Fix `ZoneInfo` with various invalid types by [@sydney-runkle](https://github.com/sydney-runkle) in [#10408](https://github.com/pydantic/pydantic/pull/10408) +* Fix `PydanticUserError` on empty `model_config` with annotations by [@cdwilson](https://github.com/cdwilson) in [#10412](https://github.com/pydantic/pydantic/pull/10412) +* Fix variance issue in `_IncEx` type alias, only allow `True` by [@Viicos](https://github.com/Viicos) in [#10414](https://github.com/pydantic/pydantic/pull/10414) +* Fix serialization schema generation when using `PlainValidator` by [@Viicos](https://github.com/Viicos) in [#10427](https://github.com/pydantic/pydantic/pull/10427) +* Fix schema generation error when serialization schema holds references by [@Viicos](https://github.com/Viicos) in [#10444](https://github.com/pydantic/pydantic/pull/10444) +* Inline references if possible when generating schema for `json_schema_input_type` by [@Viicos](https://github.com/Viicos) in [#10439](https://github.com/pydantic/pydantic/pull/10439) +* Fix recursive arguments in `Representation` by [@Viicos](https://github.com/Viicos) in [#10480](https://github.com/pydantic/pydantic/pull/10480) +* Fix representation for builtin function types by [@kschwab](https://github.com/kschwab) in [#10479](https://github.com/pydantic/pydantic/pull/10479) +* Add python validators for decimal constraints (`max_digits` and `decimal_places`) by [@sydney-runkle](https://github.com/sydney-runkle) in [#10506](https://github.com/pydantic/pydantic/pull/10506) +* Only fetch `__pydantic_core_schema__` from the current class during schema generation by [@Viicos](https://github.com/Viicos) in [#10518](https://github.com/pydantic/pydantic/pull/10518) +* Fix `stacklevel` on deprecation warnings for `BaseModel` by [@sydney-runkle](https://github.com/sydney-runkle) in [#10520](https://github.com/pydantic/pydantic/pull/10520) +* Fix warning `stacklevel` in `BaseModel.__init__` by [@Viicos](https://github.com/Viicos) in [#10526](https://github.com/pydantic/pydantic/pull/10526) +* Improve error handling for in-evaluable refs for discriminator application by [@sydney-runkle](https://github.com/sydney-runkle) in [#10440](https://github.com/pydantic/pydantic/pull/10440) +* Change the signature of `ConfigWrapper.core_config` to take the title directly by [@Viicos](https://github.com/Viicos) in [#10562](https://github.com/pydantic/pydantic/pull/10562) +* Do not use the previous config from the stack for dataclasses without config by [@Viicos](https://github.com/Viicos) in [#10576](https://github.com/pydantic/pydantic/pull/10576) +* Fix serialization for IP types with `mode='python'` by [@sydney-runkle](https://github.com/sydney-runkle) in [#10594](https://github.com/pydantic/pydantic/pull/10594) +* Support constraint application for `Base64Etc` types by [@sydney-runkle](https://github.com/sydney-runkle) in [#10584](https://github.com/pydantic/pydantic/pull/10584) +* Fix `validate_call` ignoring `Field` in `Annotated` by [@kc0506](https://github.com/kc0506) in [#10610](https://github.com/pydantic/pydantic/pull/10610) +* Raise an error when `Self` is invalid by [@kc0506](https://github.com/kc0506) in [#10609](https://github.com/pydantic/pydantic/pull/10609) +* Using `core_schema.InvalidSchema` instead of metadata injection + checks by [@sydney-runkle](https://github.com/sydney-runkle) in [#10523](https://github.com/pydantic/pydantic/pull/10523) +* Tweak type alias logic by [@kc0506](https://github.com/kc0506) in [#10643](https://github.com/pydantic/pydantic/pull/10643) +* Support usage of `type` with `typing.Self` and type aliases by [@kc0506](https://github.com/kc0506) in [#10621](https://github.com/pydantic/pydantic/pull/10621) +* Use overloads for `Field` and `PrivateAttr` functions by [@Viicos](https://github.com/Viicos) in [#10651](https://github.com/pydantic/pydantic/pull/10651) +* Clean up the `mypy` plugin implementation by [@Viicos](https://github.com/Viicos) in [#10669](https://github.com/pydantic/pydantic/pull/10669) +* Properly check for `typing_extensions` variant of `TypeAliasType` by [@Daraan](https://github.com/Daraan) in [#10713](https://github.com/pydantic/pydantic/pull/10713) +* Allow any mapping in `BaseModel.model_copy()` by [@Viicos](https://github.com/Viicos) in [#10751](https://github.com/pydantic/pydantic/pull/10751) +* Fix `isinstance` behavior for urls by [@sydney-runkle](https://github.com/sydney-runkle) in [#10766](https://github.com/pydantic/pydantic/pull/10766) +* Ensure `cached_property` can be set on Pydantic models by [@Viicos](https://github.com/Viicos) in [#10774](https://github.com/pydantic/pydantic/pull/10774) +* Fix equality checks for primitives in literals by [@sydney-runkle](https://github.com/sydney-runkle) in [pydantic/pydantic-core#1459](https://github.com/pydantic/pydantic-core/pull/1459) +* Properly enforce `host_required` for URLs by [@Viicos](https://github.com/Viicos) in [pydantic/pydantic-core#1488](https://github.com/pydantic/pydantic-core/pull/1488) +* Fix when `coerce_numbers_to_str` enabled and string has invalid Unicode character by [@andrey-berenda](https://github.com/andrey-berenda) in [pydantic/pydantic-core#1515](https://github.com/pydantic/pydantic-core/pull/1515) +* Fix serializing `complex` values in `Enum`s by [@changhc](https://github.com/changhc) in [pydantic/pydantic-core#1524](https://github.com/pydantic/pydantic-core/pull/1524) +* Refactor `_typing_extra` module by [@Viicos](https://github.com/Viicos) in [#10725](https://github.com/pydantic/pydantic/pull/10725) +* Support intuitive equality for urls by [@sydney-runkle](https://github.com/sydney-runkle) in [#10798](https://github.com/pydantic/pydantic/pull/10798) +* Add `bytearray` to `TypeAdapter.validate_json` signature by [@samuelcolvin](https://github.com/samuelcolvin) in [#10802](https://github.com/pydantic/pydantic/pull/10802) +* Ensure class access of method descriptors is performed when used as a default with `Field` by [@Viicos](https://github.com/Viicos) in [#10816](https://github.com/pydantic/pydantic/pull/10816) +* Fix circular import with `validate_call` by [@sydney-runkle](https://github.com/sydney-runkle) in [#10807](https://github.com/pydantic/pydantic/pull/10807) +* Fix error when using type aliases referencing other type aliases by [@Viicos](https://github.com/Viicos) in [#10809](https://github.com/pydantic/pydantic/pull/10809) +* Fix `IncEx` type alias to be compatible with mypy by [@Viicos](https://github.com/Viicos) in [#10813](https://github.com/pydantic/pydantic/pull/10813) +* Make `__signature__` a lazy property, do not deepcopy defaults by [@Viicos](https://github.com/Viicos) in [#10818](https://github.com/pydantic/pydantic/pull/10818) +* Make `__signature__` lazy for dataclasses, too by [@sydney-runkle](https://github.com/sydney-runkle) in [#10832](https://github.com/pydantic/pydantic/pull/10832) +* Subclass all single host url classes from `AnyUrl` to preserve behavior from v2.9 by [@sydney-runkle](https://github.com/sydney-runkle) in [#10856](https://github.com/pydantic/pydantic/pull/10856) + +### New Contributors + +* [@jakob-keller](https://github.com/jakob-keller) made their first contribution in [#10331](https://github.com/pydantic/pydantic/pull/10331) +* [@MovisLi](https://github.com/MovisLi) made their first contribution in [#10374](https://github.com/pydantic/pydantic/pull/10374) +* [@joaopalmeiro](https://github.com/joaopalmeiro) made their first contribution in [#10405](https://github.com/pydantic/pydantic/pull/10405) +* [@theunkn0wn1](https://github.com/theunkn0wn1) made their first contribution in [#10378](https://github.com/pydantic/pydantic/pull/10378) +* [@cdwilson](https://github.com/cdwilson) made their first contribution in [#10412](https://github.com/pydantic/pydantic/pull/10412) +* [@dlax](https://github.com/dlax) made their first contribution in [#10421](https://github.com/pydantic/pydantic/pull/10421) +* [@kschwab](https://github.com/kschwab) made their first contribution in [#10479](https://github.com/pydantic/pydantic/pull/10479) +* [@santibreo](https://github.com/santibreo) made their first contribution in [#10453](https://github.com/pydantic/pydantic/pull/10453) +* [@FlorianSW](https://github.com/FlorianSW) made their first contribution in [#10478](https://github.com/pydantic/pydantic/pull/10478) +* [@tkasuz](https://github.com/tkasuz) made their first contribution in [#10555](https://github.com/pydantic/pydantic/pull/10555) +* [@AlekseyLobanov](https://github.com/AlekseyLobanov) made their first contribution in [#10601](https://github.com/pydantic/pydantic/pull/10601) +* [@NiclasvanEyk](https://github.com/NiclasvanEyk) made their first contribution in [#10667](https://github.com/pydantic/pydantic/pull/10667) +* [@mschoettle](https://github.com/mschoettle) made their first contribution in [#10677](https://github.com/pydantic/pydantic/pull/10677) +* [@Daraan](https://github.com/Daraan) made their first contribution in [#10713](https://github.com/pydantic/pydantic/pull/10713) +* [@k4nar](https://github.com/k4nar) made their first contribution in [#10736](https://github.com/pydantic/pydantic/pull/10736) +* [@UriyaHarpeness](https://github.com/UriyaHarpeness) made their first contribution in [#10740](https://github.com/pydantic/pydantic/pull/10740) +* [@frfahim](https://github.com/frfahim) made their first contribution in [#10727](https://github.com/pydantic/pydantic/pull/10727) + +## v2.10.0b2 (2024-11-13) + +Pre-release, see [the GitHub release](https://github.com/pydantic/pydantic/releases/tag/v2.10.0b2) for details. + +## v2.10.0b1 (2024-11-06) + +Pre-release, see [the GitHub release](https://github.com/pydantic/pydantic/releases/tag/v2.9.0b1) for details. + +## v2.9.2 (2024-09-17) + +[GitHub release](https://github.com/pydantic/pydantic/releases/tag/v2.9.2) + +### What's Changed + +#### Fixes +* Do not error when trying to evaluate annotations of private attributes by [@Viicos](https://github.com/Viicos) in [#10358](https://github.com/pydantic/pydantic/pull/10358) +* Adding notes on designing sound `Callable` discriminators by [@sydney-runkle](https://github.com/sydney-runkle) in [#10400](https://github.com/pydantic/pydantic/pull/10400) +* Fix serialization schema generation when using `PlainValidator` by [@Viicos](https://github.com/Viicos) in [#10427](https://github.com/pydantic/pydantic/pull/10427) +* Fix `Union` serialization warnings by [@sydney-runkle](https://github.com/sydney-runkle) in [pydantic/pydantic-core#1449](https://github.com/pydantic/pydantic-core/pull/1449) +* Fix variance issue in `_IncEx` type alias, only allow `True` by [@Viicos](https://github.com/Viicos) in [#10414](https://github.com/pydantic/pydantic/pull/10414) +* Fix `ZoneInfo` validation with various invalid types by [@sydney-runkle](https://github.com/sydney-runkle) in [#10408](https://github.com/pydantic/pydantic/pull/10408) + +## v2.9.1 (2024-09-09) + +[GitHub release](https://github.com/pydantic/pydantic/releases/tag/v2.9.1) + +### What's Changed + +#### Fixes +* Fix Predicate issue in v2.9.0 by [@sydney-runkle](https://github.com/sydney-runkle) in [#10321](https://github.com/pydantic/pydantic/pull/10321) +* Fixing `annotated-types` bound to `>=0.6.0` by [@sydney-runkle](https://github.com/sydney-runkle) in [#10327](https://github.com/pydantic/pydantic/pull/10327) +* Turn `tzdata` install requirement into optional `timezone` dependency by [@jakob-keller](https://github.com/jakob-keller) in [#10331](https://github.com/pydantic/pydantic/pull/10331) +* Fix `IncExc` type alias definition by [@Viicos](https://github.com/Viicos) in [#10339](https://github.com/pydantic/pydantic/pull/10339) +* Use correct types namespace when building namedtuple core schemas by [@Viicos](https://github.com/Viicos) in [#10337](https://github.com/pydantic/pydantic/pull/10337) +* Fix evaluation of stringified annotations during namespace inspection by [@Viicos](https://github.com/Viicos) in [#10347](https://github.com/pydantic/pydantic/pull/10347) +* Fix tagged union serialization with alias generators by [@sydney-runkle](https://github.com/sydney-runkle) in [pydantic/pydantic-core#1442](https://github.com/pydantic/pydantic-core/pull/1442) + +## v2.9.0 (2024-09-05) + +[GitHub release](https://github.com/pydantic/pydantic/releases/tag/v2.9.0) + +The code released in v2.9.0 is practically identical to that of v2.9.0b2. + +### What's Changed + +#### Packaging + +* Bump `ruff` to `v0.5.0` and `pyright` to `v1.1.369` by [@sydney-runkle](https://github.com/sydney-runkle) in [#9801](https://github.com/pydantic/pydantic/pull/9801) +* Bump `pydantic-extra-types` to `v2.9.0` by [@sydney-runkle](https://github.com/sydney-runkle) in [#9832](https://github.com/pydantic/pydantic/pull/9832) +* Support compatibility with `pdm v2.18.1` by [@Viicos](https://github.com/Viicos) in [#10138](https://github.com/pydantic/pydantic/pull/10138) +* Bump `v1` version stub to `v1.10.18` by [@sydney-runkle](https://github.com/sydney-runkle) in [#10214](https://github.com/pydantic/pydantic/pull/10214) +* Bump `pydantic-core` to `v2.23.2` by [@sydney-runkle](https://github.com/sydney-runkle) in [#10311](https://github.com/pydantic/pydantic/pull/10311) + +#### New Features + +* Add support for `ZoneInfo` by [@Youssefares](https://github.com/Youssefares) in [#9896](https://github.com/pydantic/pydantic/pull/9896) +* Add `Config.val_json_bytes` by [@josh-newman](https://github.com/josh-newman) in [#9770](https://github.com/pydantic/pydantic/pull/9770) +* Add DSN for Snowflake by [@aditkumar72](https://github.com/aditkumar72) in [#10128](https://github.com/pydantic/pydantic/pull/10128) +* Support `complex` number by [@changhc](https://github.com/changhc) in [#9654](https://github.com/pydantic/pydantic/pull/9654) +* Add support for `annotated_types.Not` by [@aditkumar72](https://github.com/aditkumar72) in [#10210](https://github.com/pydantic/pydantic/pull/10210) +* Allow `WithJsonSchema` to inject `$ref`s w/ `http` or `https` links by [@dAIsySHEng1](https://github.com/dAIsySHEng1) in [#9863](https://github.com/pydantic/pydantic/pull/9863) +* Allow validators to customize validation JSON schema by [@Viicos](https://github.com/Viicos) in [#10094](https://github.com/pydantic/pydantic/pull/10094) +* Support parametrized `PathLike` types by [@nix010](https://github.com/nix010) in [#9764](https://github.com/pydantic/pydantic/pull/9764) +* Add tagged union serializer that attempts to use `str` or `callable` discriminators to select the correct serializer by [@sydney-runkle](https://github.com/sydney-runkle) in in [pydantic/pydantic-core#1397](https://github.com/pydantic/pydantic-core/pull/1397) + +#### Changes + +* Breaking Change: Merge `dict` type `json_schema_extra` by [@sydney-runkle](https://github.com/sydney-runkle) in [#9792](https://github.com/pydantic/pydantic/pull/9792) + * For more info (how to replicate old behavior) on this change, see [here](https://docs.pydantic.dev/dev/concepts/json_schema/#merging-json_schema_extra) +* Refactor annotation injection for known (often generic) types by [@sydney-runkle](https://github.com/sydney-runkle) in [#9979](https://github.com/pydantic/pydantic/pull/9979) +* Move annotation compatibility errors to validation phase by [@sydney-runkle](https://github.com/sydney-runkle) in [#9999](https://github.com/pydantic/pydantic/pull/9999) +* Improve runtime errors for string constraints like `pattern` for incompatible types by [@sydney-runkle](https://github.com/sydney-runkle) in [#10158](https://github.com/pydantic/pydantic/pull/10158) +* Remove `'allOf'` JSON schema workarounds by [@dpeachey](https://github.com/dpeachey) in [#10029](https://github.com/pydantic/pydantic/pull/10029) +* Remove `typed_dict_cls` data from `CoreMetadata` by [@sydney-runkle](https://github.com/sydney-runkle) in [#10180](https://github.com/pydantic/pydantic/pull/10180) +* Deprecate passing a dict to the `Examples` class by [@Viicos](https://github.com/Viicos) in [#10181](https://github.com/pydantic/pydantic/pull/10181) +* Remove `initial_metadata` from internal metadata construct by [@sydney-runkle](https://github.com/sydney-runkle) in [#10194](https://github.com/pydantic/pydantic/pull/10194) +* Use `re.Pattern.search` instead of `re.Pattern.match` for consistency with `rust` behavior by [@tinez](https://github.com/tinez) in [pydantic/pydantic-core#1368](https://github.com/pydantic/pydantic-core/pull/1368) +* Show value of wrongly typed data in `pydantic-core` serialization warning by [@BoxyUwU](https://github.com/BoxyUwU) in [pydantic/pydantic-core#1377](https://github.com/pydantic/pydantic-core/pull/1377) +* Breaking Change: in `pydantic-core`, change `metadata` type hint in core schemas from `Any` -> `Dict[str, Any] | None` by [@sydney-runkle](https://github.com/sydney-runkle) in [pydantic/pydantic-core#1411](https://github.com/pydantic/pydantic-core/pull/1411) +* Raise helpful warning when `self` isn't returned from model validator by [@sydney-runkle](https://github.com/sydney-runkle) in [#10255](https://github.com/pydantic/pydantic/pull/10255) + +#### Performance + +* Initial start at improving import times for modules, using caching primarily by [@sydney-runkle](https://github.com/sydney-runkle) in [#10009](https://github.com/pydantic/pydantic/pull/10009) +* Using cached internal import for `BaseModel` by [@sydney-runkle](https://github.com/sydney-runkle) in [#10013](https://github.com/pydantic/pydantic/pull/10013) +* Simplify internal generics logic - remove generator overhead by [@sydney-runkle](https://github.com/sydney-runkle) in [#10059](https://github.com/pydantic/pydantic/pull/10059) +* Remove default module globals from types namespace by [@sydney-runkle](https://github.com/sydney-runkle) in [#10123](https://github.com/pydantic/pydantic/pull/10123) +* Performance boost: skip caching parent namespaces in most cases by [@sydney-runkle](https://github.com/sydney-runkle) in [#10113](https://github.com/pydantic/pydantic/pull/10113) +* Update ns stack with already copied ns by [@sydney-runkle](https://github.com/sydney-runkle) in [#10267](https://github.com/pydantic/pydantic/pull/10267) + +##### Minor Internal Improvements +* ⚡️ Speed up `multiple_of_validator()` by 31% in `pydantic/_internal/_validators.py` by [@misrasaurabh1](https://github.com/misrasaurabh1) in [#9839](https://github.com/pydantic/pydantic/pull/9839) +* ⚡️ Speed up `ModelPrivateAttr.__set_name__()` by 18% in `pydantic/fields.py` by [@misrasaurabh1](https://github.com/misrasaurabh1) in [#9841](https://github.com/pydantic/pydantic/pull/9841) +* ⚡️ Speed up `dataclass()` by 7% in `pydantic/dataclasses.py` by [@misrasaurabh1](https://github.com/misrasaurabh1) in [#9843](https://github.com/pydantic/pydantic/pull/9843) +* ⚡️ Speed up function `_field_name_for_signature` by 37% in `pydantic/_internal/_signature.py` by [@misrasaurabh1](https://github.com/misrasaurabh1) in [#9951](https://github.com/pydantic/pydantic/pull/9951) +* ⚡️ Speed up method `GenerateSchema._unpack_refs_defs` by 26% in `pydantic/_internal/_generate_schema.py` by [@misrasaurabh1](https://github.com/misrasaurabh1) in [#9949](https://github.com/pydantic/pydantic/pull/9949) +* ⚡️ Speed up function `apply_each_item_validators` by 100% in `pydantic/_internal/_generate_schema.py` by [@misrasaurabh1](https://github.com/misrasaurabh1) in [#9950](https://github.com/pydantic/pydantic/pull/9950) +* ⚡️ Speed up method `ConfigWrapper.core_config` by 28% in `pydantic/_internal/_config.py` by [@misrasaurabh1](https://github.com/misrasaurabh1) in [#9953](https://github.com/pydantic/pydantic/pull/9953) + +#### Fixes + +* Respect `use_enum_values` on `Literal` types by [@kwint](https://github.com/kwint) in [#9787](https://github.com/pydantic/pydantic/pull/9787) +* Prevent type error for exotic `BaseModel/RootModel` inheritance by [@dmontagu](https://github.com/dmontagu) in [#9913](https://github.com/pydantic/pydantic/pull/9913) +* Fix typing issue with field_validator-decorated methods by [@dmontagu](https://github.com/dmontagu) in [#9914](https://github.com/pydantic/pydantic/pull/9914) +* Replace `str` type annotation with `Any` in validator factories in documentation on validators by [@maximilianfellhuber](https://github.com/maximilianfellhuber) in [#9885](https://github.com/pydantic/pydantic/pull/9885) +* Fix `ComputedFieldInfo.wrapped_property` pointer when a property setter is assigned by [@tlambert03](https://github.com/tlambert03) in [#9892](https://github.com/pydantic/pydantic/pull/9892) +* Fix recursive typing of `main.IncEnx` by [@tlambert03](https://github.com/tlambert03) in [#9924](https://github.com/pydantic/pydantic/pull/9924) +* Allow usage of `type[Annotated[...]]` by [@Viicos](https://github.com/Viicos) in [#9932](https://github.com/pydantic/pydantic/pull/9932) +* `mypy` plugin: handle frozen fields on a per-field basis by [@dmontagu](https://github.com/dmontagu) in [#9935](https://github.com/pydantic/pydantic/pull/9935) +* Fix typo in `invalid-annotated-type` error code by [@sydney-runkle](https://github.com/sydney-runkle) in [#9948](https://github.com/pydantic/pydantic/pull/9948) +* Simplify schema generation for `uuid`, `url`, and `ip` types by [@sydney-runkle](https://github.com/sydney-runkle) in [#9975](https://github.com/pydantic/pydantic/pull/9975) +* Move `date` schemas to `_generate_schema.py` by [@sydney-runkle](https://github.com/sydney-runkle) in [#9976](https://github.com/pydantic/pydantic/pull/9976) +* Move `decimal.Decimal` validation to `_generate_schema.py` by [@sydney-runkle](https://github.com/sydney-runkle) in [#9977](https://github.com/pydantic/pydantic/pull/9977) +* Simplify IP address schema in `_std_types_schema.py` by [@sydney-runkle](https://github.com/sydney-runkle) in [#9959](https://github.com/pydantic/pydantic/pull/9959) +* Fix type annotations for some potentially generic `GenerateSchema.match_type` options by [@sydney-runkle](https://github.com/sydney-runkle) in [#9961](https://github.com/pydantic/pydantic/pull/9961) +* Add class name to "has conflict" warnings by [@msabramo](https://github.com/msabramo) in [#9964](https://github.com/pydantic/pydantic/pull/9964) +* Fix `dataclass` ignoring `default_factory` passed in Annotated by [@kc0506](https://github.com/kc0506) in [#9971](https://github.com/pydantic/pydantic/pull/9971) +* Fix `Sequence` ignoring `discriminator` by [@kc0506](https://github.com/kc0506) in [#9980](https://github.com/pydantic/pydantic/pull/9980) +* Fix typing for `IPvAnyAddress` and `IPvAnyInterface` by [@haoyun](https://github.com/haoyun) in [#9990](https://github.com/pydantic/pydantic/pull/9990) +* Fix false positives on v1 models in `mypy` plugin for `from_orm` check requiring from_attributes=True config by [@radekwlsk](https://github.com/radekwlsk) in [#9938](https://github.com/pydantic/pydantic/pull/9938) +* Apply `strict=True` to `__init__` in `mypy` plugin by [@kc0506](https://github.com/kc0506) in [#9998](https://github.com/pydantic/pydantic/pull/9998) +* Refactor application of `deque` annotations by [@sydney-runkle](https://github.com/sydney-runkle) in [#10018](https://github.com/pydantic/pydantic/pull/10018) +* Raise a better user error when failing to evaluate a forward reference by [@Viicos](https://github.com/Viicos) in [#10030](https://github.com/pydantic/pydantic/pull/10030) +* Fix evaluation of `__pydantic_extra__` annotation in specific circumstances by [@Viicos](https://github.com/Viicos) in [#10070](https://github.com/pydantic/pydantic/pull/10070) +* Fix `frozen` enforcement for `dataclasses` by [@sydney-runkle](https://github.com/sydney-runkle) in [#10066](https://github.com/pydantic/pydantic/pull/10066) +* Remove logic to handle unused `__get_pydantic_core_schema__` signature by [@Viicos](https://github.com/Viicos) in [#10075](https://github.com/pydantic/pydantic/pull/10075) +* Use `is_annotated` consistently by [@Viicos](https://github.com/Viicos) in [#10095](https://github.com/pydantic/pydantic/pull/10095) +* Fix `PydanticDeprecatedSince26` typo by [@kc0506](https://github.com/kc0506) in [#10101](https://github.com/pydantic/pydantic/pull/10101) +* Improve `pyright` tests, refactor model decorators signatures by [@Viicos](https://github.com/Viicos) in [#10092](https://github.com/pydantic/pydantic/pull/10092) +* Fix `ip` serialization logic by [@sydney-runkle](https://github.com/sydney-runkle) in [#10112](https://github.com/pydantic/pydantic/pull/10112) +* Warn when frozen defined twice for `dataclasses` by [@mochi22](https://github.com/mochi22) in [#10082](https://github.com/pydantic/pydantic/pull/10082) +* Do not compute JSON Schema default when plain serializers are used with `when_used` set to `'json-unless-none'` and the default value is `None` by [@Viicos](https://github.com/Viicos) in [#10121](https://github.com/pydantic/pydantic/pull/10121) +* Fix `ImportString` special cases by [@sydney-runkle](https://github.com/sydney-runkle) in [#10137](https://github.com/pydantic/pydantic/pull/10137) +* Blacklist default globals to support exotic user code with `__` prefixed annotations by [@sydney-runkle](https://github.com/sydney-runkle) in [#10136](https://github.com/pydantic/pydantic/pull/10136) +* Handle `nullable` schemas with `serialization` schema available during JSON Schema generation by [@Viicos](https://github.com/Viicos) in [#10132](https://github.com/pydantic/pydantic/pull/10132) +* Reorganize `BaseModel` annotations by [@kc0506](https://github.com/kc0506) in [#10110](https://github.com/pydantic/pydantic/pull/10110) +* Fix core schema simplification when serialization schemas are involved in specific scenarios by [@Viicos](https://github.com/Viicos) in [#10155](https://github.com/pydantic/pydantic/pull/10155) +* Add support for stringified annotations when using `PrivateAttr` with `Annotated` by [@Viicos](https://github.com/Viicos) in [#10157](https://github.com/pydantic/pydantic/pull/10157) +* Fix JSON Schema `number` type for literal and enum schemas by [@Viicos](https://github.com/Viicos) in [#10172](https://github.com/pydantic/pydantic/pull/10172) +* Fix JSON Schema generation of fields with plain validators in serialization mode by [@Viicos](https://github.com/Viicos) in [#10167](https://github.com/pydantic/pydantic/pull/10167) +* Fix invalid JSON Schemas being generated for functions in certain scenarios by [@Viicos](https://github.com/Viicos) in [#10188](https://github.com/pydantic/pydantic/pull/10188) +* Make sure generated JSON Schemas are valid in tests by [@Viicos](https://github.com/Viicos) in [#10182](https://github.com/pydantic/pydantic/pull/10182) +* Fix key error with custom serializer by [@sydney-runkle](https://github.com/sydney-runkle) in [#10200](https://github.com/pydantic/pydantic/pull/10200) +* Add 'wss' for allowed schemes in NatsDsn by [@swelborn](https://github.com/swelborn) in [#10224](https://github.com/pydantic/pydantic/pull/10224) +* Fix `Mapping` and `MutableMapping` annotations to use mapping schema instead of dict schema by [@sydney-runkle](https://github.com/sydney-runkle) in [#10020](https://github.com/pydantic/pydantic/pull/10020) +* Fix JSON Schema generation for constrained dates by [@Viicos](https://github.com/Viicos) in [#10185](https://github.com/pydantic/pydantic/pull/10185) +* Fix discriminated union bug regression when using enums by [@kfreezen](https://github.com/kfreezen) in [pydantic/pydantic-core#1286](https://github.com/pydantic/pydantic-core/pull/1286) +* Fix `field_serializer` with computed field when using `*` by [@nix010](https://github.com/nix010) in [pydantic/pydantic-core#1349](https://github.com/pydantic/pydantic-core/pull/1349) +* Try each option in `Union` serializer before inference by [@sydney-runkle](https://github.com/sydney-runkle) in [pydantic/pydantic-core#1398](https://github.com/pydantic/pydantic-core/pull/1398) +* Fix `float` serialization behavior in `strict` mode by [@sydney-runkle](https://github.com/sydney-runkle) in [pydantic/pydantic-core#1400](https://github.com/pydantic/pydantic-core/pull/1400) +* Introduce `exactness` into Decimal validation logic to improve union validation behavior by [@sydney-runkle](https://github.com/sydney-runkle) in in [pydantic/pydantic-core#1405](https://github.com/pydantic/pydantic-core/pull/1405) +* Fix new warnings assertions to use `pytest.warns()` by [@mgorny](https://github.com/mgorny) in [#10241](https://github.com/pydantic/pydantic/pull/10241) +* Fix a crash when cleaning the namespace in `ModelMetaclass` by [@Viicos](https://github.com/Viicos) in [#10242](https://github.com/pydantic/pydantic/pull/10242) +* Fix parent namespace issue with model rebuilds by [@sydney-runkle](https://github.com/sydney-runkle) in [#10257](https://github.com/pydantic/pydantic/pull/10257) +* Remove defaults filter for namespace by [@sydney-runkle](https://github.com/sydney-runkle) in [#10261](https://github.com/pydantic/pydantic/pull/10261) +* Use identity instead of equality after validating model in `__init__` by [@Viicos](https://github.com/Viicos) in [#10264](https://github.com/pydantic/pydantic/pull/10264) +* Support `BigInt` serialization for `int` subclasses by [@kxx317](https://github.com/kxx317) in [pydantic/pydantic-core#1417](https://github.com/pydantic/pydantic-core/pull/1417) +* Support signature for wrap validators without `info` by [@sydney-runkle](https://github.com/sydney-runkle) in [#10277](https://github.com/pydantic/pydantic/pull/10277) +* Ensure `__pydantic_complete__` is set when rebuilding `dataclasses` by [@Viicos](https://github.com/Viicos) in [#10291](https://github.com/pydantic/pydantic/pull/10291) +* Respect `schema_generator` config value in `TypeAdapter` by [@sydney-runkle](https://github.com/sydney-runkle) in [#10300](https://github.com/pydantic/pydantic/pull/10300) + +### New Contributors + +#### `pydantic` + +* [@kwint](https://github.com/kwint) made their first contribution in [#9787](https://github.com/pydantic/pydantic/pull/9787) +* [@seekinginfiniteloop](https://github.com/seekinginfiniteloop) made their first contribution in [#9822](https://github.com/pydantic/pydantic/pull/9822) +* [@a-alexander](https://github.com/a-alexander) made their first contribution in [#9848](https://github.com/pydantic/pydantic/pull/9848) +* [@maximilianfellhuber](https://github.com/maximilianfellhuber) made their first contribution in [#9885](https://github.com/pydantic/pydantic/pull/9885) +* [@karmaBonfire](https://github.com/karmaBonfire) made their first contribution in [#9945](https://github.com/pydantic/pydantic/pull/9945) +* [@s-rigaud](https://github.com/s-rigaud) made their first contribution in [#9958](https://github.com/pydantic/pydantic/pull/9958) +* [@msabramo](https://github.com/msabramo) made their first contribution in [#9964](https://github.com/pydantic/pydantic/pull/9964) +* [@DimaCybr](https://github.com/DimaCybr) made their first contribution in [#9972](https://github.com/pydantic/pydantic/pull/9972) +* [@kc0506](https://github.com/kc0506) made their first contribution in [#9971](https://github.com/pydantic/pydantic/pull/9971) +* [@haoyun](https://github.com/haoyun) made their first contribution in [#9990](https://github.com/pydantic/pydantic/pull/9990) +* [@radekwlsk](https://github.com/radekwlsk) made their first contribution in [#9938](https://github.com/pydantic/pydantic/pull/9938) +* [@dpeachey](https://github.com/dpeachey) made their first contribution in [#10029](https://github.com/pydantic/pydantic/pull/10029) +* [@BoxyUwU](https://github.com/BoxyUwU) made their first contribution in [#10085](https://github.com/pydantic/pydantic/pull/10085) +* [@mochi22](https://github.com/mochi22) made their first contribution in [#10082](https://github.com/pydantic/pydantic/pull/10082) +* [@aditkumar72](https://github.com/aditkumar72) made their first contribution in [#10128](https://github.com/pydantic/pydantic/pull/10128) +* [@changhc](https://github.com/changhc) made their first contribution in [#9654](https://github.com/pydantic/pydantic/pull/9654) +* [@insumanth](https://github.com/insumanth) made their first contribution in [#10229](https://github.com/pydantic/pydantic/pull/10229) +* [@AdolfoVillalobos](https://github.com/AdolfoVillalobos) made their first contribution in [#10240](https://github.com/pydantic/pydantic/pull/10240) +* [@bllchmbrs](https://github.com/bllchmbrs) made their first contribution in [#10270](https://github.com/pydantic/pydantic/pull/10270) + +#### `pydantic-core` + +* [@kfreezen](https://github.com/kfreezen) made their first contribution in [pydantic/pydantic-core#1286](https://github.com/pydantic/pydantic-core/pull/1286) +* [@tinez](https://github.com/tinez) made their first contribution in [pydantic/pydantic-core#1368](https://github.com/pydantic/pydantic-core/pull/1368) +* [@fft001](https://github.com/fft001) made their first contribution in [pydantic/pydantic-core#1362](https://github.com/pydantic/pydantic-core/pull/1362) +* [@nix010](https://github.com/nix010) made their first contribution in [pydantic/pydantic-core#1349](https://github.com/pydantic/pydantic-core/pull/1349) +* [@BoxyUwU](https://github.com/BoxyUwU) made their first contribution in [pydantic/pydantic-core#1379](https://github.com/pydantic/pydantic-core/pull/1379) +* [@candleindark](https://github.com/candleindark) made their first contribution in [pydantic/pydantic-core#1404](https://github.com/pydantic/pydantic-core/pull/1404) +* [@changhc](https://github.com/changhc) made their first contribution in [pydantic/pydantic-core#1331](https://github.com/pydantic/pydantic-core/pull/1331) + +## v2.9.0b2 (2024-08-30) + +Pre-release, see [the GitHub release](https://github.com/pydantic/pydantic/releases/tag/v2.9.0b2) for details. + +## v2.9.0b1 (2024-08-26) + +Pre-release, see [the GitHub release](https://github.com/pydantic/pydantic/releases/tag/v2.9.0b1) for details. + +## v2.8.2 (2024-07-03) + +[GitHub release](https://github.com/pydantic/pydantic/releases/tag/v2.8.2) + +### What's Changed + +#### Fixes + +* Fix issue with assertion caused by pluggable schema validator by [@dmontagu](https://github.com/dmontagu) in [#9838](https://github.com/pydantic/pydantic/pull/9838) + +## v2.8.1 (2024-07-03) + +[GitHub release](https://github.com/pydantic/pydantic/releases/tag/v2.8.1) + +### What's Changed + +#### Packaging +* Bump `ruff` to `v0.5.0` and `pyright` to `v1.1.369` by [@sydney-runkle](https://github.com/sydney-runkle) in [#9801](https://github.com/pydantic/pydantic/pull/9801) +* Bump `pydantic-core` to `v2.20.1`, `pydantic-extra-types` to `v2.9.0` by [@sydney-runkle](https://github.com/sydney-runkle) in [#9832](https://github.com/pydantic/pydantic/pull/9832) + +#### Fixes +* Fix breaking change in `to_snake` from v2.7 -> v2.8 by [@sydney-runkle](https://github.com/sydney-runkle) in [#9812](https://github.com/pydantic/pydantic/pull/9812) +* Fix list constraint json schema application by [@sydney-runkle](https://github.com/sydney-runkle) in [#9818](https://github.com/pydantic/pydantic/pull/9818) +* Support time duration more than 23 by [@nix010](https://github.com/nix010) in [pydantic/speedate#64](https://github.com/pydantic/speedate/pull/64) +* Fix millisecond fraction being handled with the wrong scale by [@davidhewitt](https://github.com/davidhewitt) in [pydantic/speedate#65](https://github.com/pydantic/speedate/pull/65) +* Handle negative fractional durations correctly by [@sydney-runkle](https://github.com/sydney-runkle) in [pydantic/speedate#71](https://github.com/pydantic/speedate/pull/71) + +## v2.8.0 (2024-07-01) + +[GitHub release](https://github.com/pydantic/pydantic/releases/tag/v2.8.0) + +The code released in v2.8.0 is functionally identical to that of v2.8.0b1. + +### What's Changed + +#### Packaging + +* Update citation version automatically with new releases by [@sydney-runkle](https://github.com/sydney-runkle) in [#9673](https://github.com/pydantic/pydantic/pull/9673) +* Bump pyright to `v1.1.367` and add type checking tests for pipeline API by [@adriangb](https://github.com/adriangb) in [#9674](https://github.com/pydantic/pydantic/pull/9674) +* Update `pydantic.v1` stub to `v1.10.17` by [@sydney-runkle](https://github.com/sydney-runkle) in [#9707](https://github.com/pydantic/pydantic/pull/9707) +* General package updates to prep for `v2.8.0b1` by [@sydney-runkle](https://github.com/sydney-runkle) in [#9741](https://github.com/pydantic/pydantic/pull/9741) +* Bump `pydantic-core` to `v2.20.0` by [@sydney-runkle](https://github.com/sydney-runkle) in [#9745](https://github.com/pydantic/pydantic/pull/9745) +* Add support for Python 3.13 by [@sydney-runkle](https://github.com/sydney-runkle) in [#9743](https://github.com/pydantic/pydantic/pull/9743) +* Update `pdm` version used for `pdm.lock` to v2.16.1 by [@sydney-runkle](https://github.com/sydney-runkle) in [#9761](https://github.com/pydantic/pydantic/pull/9761) +* Update to `ruff` `v0.4.8` by [@Viicos](https://github.com/Viicos) in [#9585](https://github.com/pydantic/pydantic/pull/9585) + +#### New Features + +* Experimental: support `defer_build` for `TypeAdapter` by [@MarkusSintonen](https://github.com/MarkusSintonen) in [#8939](https://github.com/pydantic/pydantic/pull/8939) +* Implement `deprecated` field in json schema by [@NeevCohen](https://github.com/NeevCohen) in [#9298](https://github.com/pydantic/pydantic/pull/9298) +* Experimental: Add pipeline API by [@adriangb](https://github.com/adriangb) in [#9459](https://github.com/pydantic/pydantic/pull/9459) +* Add support for programmatic title generation by [@NeevCohen](https://github.com/NeevCohen) in [#9183](https://github.com/pydantic/pydantic/pull/9183) +* Implement `fail_fast` feature by [@uriyyo](https://github.com/uriyyo) in [#9708](https://github.com/pydantic/pydantic/pull/9708) +* Add `ser_json_inf_nan='strings'` mode to produce valid JSON by [@josh-newman](https://github.com/josh-newman) in [pydantic/pydantic-core#1307](https://github.com/pydantic/pydantic-core/pull/1307) + +#### Changes + +* Add warning when "alias" is set in ignored `Annotated` field by [@nix010](https://github.com/nix010) in [#9170](https://github.com/pydantic/pydantic/pull/9170) +* Support serialization of some serializable defaults in JSON schema by [@sydney-runkle](https://github.com/sydney-runkle) in [#9624](https://github.com/pydantic/pydantic/pull/9624) +* Relax type specification for `__validators__` values in `create_model` by [@sydney-runkle](https://github.com/sydney-runkle) in [#9697](https://github.com/pydantic/pydantic/pull/9697) +* **Breaking Change:** Improve `smart` union matching logic by [@sydney-runkle](https://github.com/sydney-runkle) in [pydantic/pydantic-core#1322](https://github.com/pydantic/pydantic-core/pull/1322) +You can read more about our `smart` union matching logic [here](https://docs.pydantic.dev/dev/concepts/unions/#smart-mode). In some cases, if the old behavior +is desired, you can switch to `left-to-right` mode and change the order of your `Union` members. + +#### Performance + +##### Internal Improvements + +* ⚡️ Speed up `_display_error_loc()` by 25% in `pydantic/v1/error_wrappers.py` by [@misrasaurabh1](https://github.com/misrasaurabh1) in [#9653](https://github.com/pydantic/pydantic/pull/9653) +* ⚡️ Speed up `_get_all_json_refs()` by 34% in `pydantic/json_schema.py` by [@misrasaurabh1](https://github.com/misrasaurabh1) in [#9650](https://github.com/pydantic/pydantic/pull/9650) +* ⚡️ Speed up `is_pydantic_dataclass()` by 41% in `pydantic/dataclasses.py` by [@misrasaurabh1](https://github.com/misrasaurabh1) in [#9652](https://github.com/pydantic/pydantic/pull/9652) +* ⚡️ Speed up `to_snake()` by 27% in `pydantic/alias_generators.py` by [@misrasaurabh1](https://github.com/misrasaurabh1) in [#9747](https://github.com/pydantic/pydantic/pull/9747) +* ⚡️ Speed up `unwrap_wrapped_function()` by 93% in `pydantic/_internal/_decorators.py` by [@misrasaurabh1](https://github.com/misrasaurabh1) in [#9727](https://github.com/pydantic/pydantic/pull/9727) + +#### Fixes + +* Replace `__spec__.parent` with `__package__` by [@hramezani](https://github.com/hramezani) in [#9331](https://github.com/pydantic/pydantic/pull/9331) +* Fix Outputted Model JSON Schema for `Sequence` type by [@anesmemisevic](https://github.com/anesmemisevic) in [#9303](https://github.com/pydantic/pydantic/pull/9303) +* Fix typing of `_frame_depth` by [@Viicos](https://github.com/Viicos) in [#9353](https://github.com/pydantic/pydantic/pull/9353) +* Make `ImportString` json schema compatible by [@amitschang](https://github.com/amitschang) in [#9344](https://github.com/pydantic/pydantic/pull/9344) +* Hide private attributes (`PrivateAttr`) from `__init__` signature in type checkers by [@idan22moral](https://github.com/idan22moral) in [#9293](https://github.com/pydantic/pydantic/pull/9293) +* Make detection of `TypeVar` defaults robust to the CPython `PEP-696` implementation by [@AlexWaygood](https://github.com/AlexWaygood) in [#9426](https://github.com/pydantic/pydantic/pull/9426) +* Fix usage of `PlainSerializer` with builtin types by [@Viicos](https://github.com/Viicos) in [#9450](https://github.com/pydantic/pydantic/pull/9450) +* Add more robust custom validation examples by [@ChrisPappalardo](https://github.com/ChrisPappalardo) in [#9468](https://github.com/pydantic/pydantic/pull/9468) +* Fix ignored `strict` specification for `StringConstraint(strict=False)` by [@vbmendes](https://github.com/vbmendes) in [#9476](https://github.com/pydantic/pydantic/pull/9476) +* **Breaking Change:** Use PEP 570 syntax by [@Viicos](https://github.com/Viicos) in [#9479](https://github.com/pydantic/pydantic/pull/9479) +* Use `Self` where possible by [@Viicos](https://github.com/Viicos) in [#9479](https://github.com/pydantic/pydantic/pull/9479) +* Do not alter `RootModel.model_construct` signature in the `mypy` plugin by [@Viicos](https://github.com/Viicos) in [#9480](https://github.com/pydantic/pydantic/pull/9480) +* Fixed type hint of `validation_context` by [@OhioDschungel6](https://github.com/OhioDschungel6) in [#9508](https://github.com/pydantic/pydantic/pull/9508) +* Support context being passed to TypeAdapter's `dump_json`/`dump_python` by [@alexcouper](https://github.com/alexcouper) in [#9495](https://github.com/pydantic/pydantic/pull/9495) +* Updates type signature for `Field()` constructor by [@bjmc](https://github.com/bjmc) in [#9484](https://github.com/pydantic/pydantic/pull/9484) +* Improve builtin alias generators by [@sydney-runkle](https://github.com/sydney-runkle) in [#9561](https://github.com/pydantic/pydantic/pull/9561) +* Fix typing of `TypeAdapter` by [@Viicos](https://github.com/Viicos) in [#9570](https://github.com/pydantic/pydantic/pull/9570) +* Add fallback default value for private fields in `__setstate__` of BaseModel by [@anhpham1509](https://github.com/anhpham1509) in [#9584](https://github.com/pydantic/pydantic/pull/9584) +* Support `PEP 746` by [@adriangb](https://github.com/adriangb) in [#9587](https://github.com/pydantic/pydantic/pull/9587) +* Allow validator and serializer functions to have default values by [@Viicos](https://github.com/Viicos) in [#9478](https://github.com/pydantic/pydantic/pull/9478) +* Fix bug with mypy plugin's handling of covariant `TypeVar` fields by [@dmontagu](https://github.com/dmontagu) in [#9606](https://github.com/pydantic/pydantic/pull/9606) +* Fix multiple annotation / constraint application logic by [@sydney-runkle](https://github.com/sydney-runkle) in [#9623](https://github.com/pydantic/pydantic/pull/9623) +* Respect `regex` flags in validation and json schema by [@sydney-runkle](https://github.com/sydney-runkle) in [#9591](https://github.com/pydantic/pydantic/pull/9591) +* Fix type hint on `IpvAnyAddress` by [@sydney-runkle](https://github.com/sydney-runkle) in [#9640](https://github.com/pydantic/pydantic/pull/9640) +* Allow a field specifier on `__pydantic_extra__` by [@dmontagu](https://github.com/dmontagu) in [#9659](https://github.com/pydantic/pydantic/pull/9659) +* Use normalized case for file path comparison by [@sydney-runkle](https://github.com/sydney-runkle) in [#9737](https://github.com/pydantic/pydantic/pull/9737) +* Modify constraint application logic to allow field constraints on `Optional[Decimal]` by [@lazyhope](https://github.com/lazyhope) in [#9754](https://github.com/pydantic/pydantic/pull/9754) +* `validate_call` type params fix by [@sydney-runkle](https://github.com/sydney-runkle) in [#9760](https://github.com/pydantic/pydantic/pull/9760) +* Check all warnings returned by pytest.warns() by [@s-t-e-v-e-n-k](https://github.com/s-t-e-v-e-n-k) in [#9702](https://github.com/pydantic/pydantic/pull/9702) +* Reuse `re.Pattern` object in regex patterns to allow for regex flags by [@sydney-runkle](https://github.com/sydney-runkle) in [pydantic/pydantic-core#1318](https://github.com/pydantic/pydantic-core/pull/1318) + +### New Contributors + +* [@idan22moral](https://github.com/idan22moral) made their first contribution in [#9294](https://github.com/pydantic/pydantic/pull/9294) +* [@anesmemisevic](https://github.com/anesmemisevic) made their first contribution in [#9303](https://github.com/pydantic/pydantic/pull/9303) +* [@max-muoto](https://github.com/max-muoto) made their first contribution in [#9338](https://github.com/pydantic/pydantic/pull/9338) +* [@amitschang](https://github.com/amitschang) made their first contribution in [#9344](https://github.com/pydantic/pydantic/pull/9344) +* [@paulmartin91](https://github.com/paulmartin91) made their first contribution in [#9410](https://github.com/pydantic/pydantic/pull/9410) +* [@OhioDschungel6](https://github.com/OhioDschungel6) made their first contribution in [#9405](https://github.com/pydantic/pydantic/pull/9405) +* [@AlexWaygood](https://github.com/AlexWaygood) made their first contribution in [#9426](https://github.com/pydantic/pydantic/pull/9426) +* [@kinuax](https://github.com/kinuax) made their first contribution in [#9433](https://github.com/pydantic/pydantic/pull/9433) +* [@antoni-jamiolkowski](https://github.com/antoni-jamiolkowski) made their first contribution in [#9431](https://github.com/pydantic/pydantic/pull/9431) +* [@candleindark](https://github.com/candleindark) made their first contribution in [#9448](https://github.com/pydantic/pydantic/pull/9448) +* [@nix010](https://github.com/nix010) made their first contribution in [#9170](https://github.com/pydantic/pydantic/pull/9170) +* [@tomy0000000](https://github.com/tomy0000000) made their first contribution in [#9457](https://github.com/pydantic/pydantic/pull/9457) +* [@vbmendes](https://github.com/vbmendes) made their first contribution in [#9470](https://github.com/pydantic/pydantic/pull/9470) +* [@micheleAlberto](https://github.com/micheleAlberto) made their first contribution in [#9471](https://github.com/pydantic/pydantic/pull/9471) +* [@ChrisPappalardo](https://github.com/ChrisPappalardo) made their first contribution in [#9468](https://github.com/pydantic/pydantic/pull/9468) +* [@blueTurtz](https://github.com/blueTurtz) made their first contribution in [#9475](https://github.com/pydantic/pydantic/pull/9475) +* [@WinterBlue16](https://github.com/WinterBlue16) made their first contribution in [#9477](https://github.com/pydantic/pydantic/pull/9477) +* [@bittner](https://github.com/bittner) made their first contribution in [#9500](https://github.com/pydantic/pydantic/pull/9500) +* [@alexcouper](https://github.com/alexcouper) made their first contribution in [#9495](https://github.com/pydantic/pydantic/pull/9495) +* [@bjmc](https://github.com/bjmc) made their first contribution in [#9484](https://github.com/pydantic/pydantic/pull/9484) +* [@pjvv](https://github.com/pjvv) made their first contribution in [#9529](https://github.com/pydantic/pydantic/pull/9529) +* [@nedbat](https://github.com/nedbat) made their first contribution in [#9530](https://github.com/pydantic/pydantic/pull/9530) +* [@gunnellEvan](https://github.com/gunnellEvan) made their first contribution in [#9469](https://github.com/pydantic/pydantic/pull/9469) +* [@jaymbans](https://github.com/jaymbans) made their first contribution in [#9531](https://github.com/pydantic/pydantic/pull/9531) +* [@MarcBresson](https://github.com/MarcBresson) made their first contribution in [#9534](https://github.com/pydantic/pydantic/pull/9534) +* [@anhpham1509](https://github.com/anhpham1509) made their first contribution in [#9584](https://github.com/pydantic/pydantic/pull/9584) +* [@K-dash](https://github.com/K-dash) made their first contribution in [#9595](https://github.com/pydantic/pydantic/pull/9595) +* [@s-t-e-v-e-n-k](https://github.com/s-t-e-v-e-n-k) made their first contribution in [#9527](https://github.com/pydantic/pydantic/pull/9527) +* [@airwoodix](https://github.com/airwoodix) made their first contribution in [#9506](https://github.com/pydantic/pydantic/pull/9506) +* [@misrasaurabh1](https://github.com/misrasaurabh1) made their first contribution in [#9653](https://github.com/pydantic/pydantic/pull/9653) +* [@AlessandroMiola](https://github.com/AlessandroMiola) made their first contribution in [#9740](https://github.com/pydantic/pydantic/pull/9740) +* [@mylapallilavanyaa](https://github.com/mylapallilavanyaa) made their first contribution in [#9746](https://github.com/pydantic/pydantic/pull/9746) +* [@lazyhope](https://github.com/lazyhope) made their first contribution in [#9754](https://github.com/pydantic/pydantic/pull/9754) +* [@YassinNouh21](https://github.com/YassinNouh21) made their first contribution in [#9759](https://github.com/pydantic/pydantic/pull/9759) + +## v2.8.0b1 (2024-06-27) + +Pre-release, see [the GitHub release](https://github.com/pydantic/pydantic/releases/tag/v2.8.0b1) for details. + +## v2.7.4 (2024-06-12) + +[Github release](https://github.com/pydantic/pydantic/releases/tag/v2.7.4) + +### What's Changed + +#### Packaging + +* Bump `pydantic.v1` to `v1.10.16` reference by [@sydney-runkle](https://github.com/sydney-runkle) in [#9639](https://github.com/pydantic/pydantic/pull/9639) + +#### Fixes + +* Specify `recursive_guard` as kwarg in `FutureRef._evaluate` by [@vfazio](https://github.com/vfazio) in [#9612](https://github.com/pydantic/pydantic/pull/9612) + +## v2.7.3 (2024-06-03) + +[GitHub release](https://github.com/pydantic/pydantic/releases/tag/v2.7.3) + +### What's Changed + +#### Packaging + +* Bump `pydantic-core` to `v2.18.4` by [@sydney-runkle](https://github.com/sydney-runkle) in [#9550](https://github.com/pydantic/pydantic/pull/9550) + +#### Fixes + +* Fix u style unicode strings in python [@samuelcolvin](https://github.com/samuelcolvin) in [pydantic/jiter#110](https://github.com/pydantic/jiter/pull/110) + +## v2.7.2 (2024-05-28) + +[GitHub release](https://github.com/pydantic/pydantic/releases/tag/v2.7.2) + +### What's Changed + +#### Packaging + +* Bump `pydantic-core` to `v2.18.3` by [@sydney-runkle](https://github.com/sydney-runkle) in [#9515](https://github.com/pydantic/pydantic/pull/9515) + +#### Fixes + +* Replace `__spec__.parent` with `__package__` by [@hramezani](https://github.com/hramezani) in [#9331](https://github.com/pydantic/pydantic/pull/9331) +* Fix validation of `int`s with leading unary minus by [@RajatRajdeep](https://github.com/RajatRajdeep) in [pydantic/pydantic-core#1291](https://github.com/pydantic/pydantic-core/pull/1291) +* Fix `str` subclass validation for enums by [@sydney-runkle](https://github.com/sydney-runkle) in [pydantic/pydantic-core#1273](https://github.com/pydantic/pydantic-core/pull/1273) +* Support `BigInt`s in `Literal`s and `Enum`s by [@samuelcolvin](https://github.com/samuelcolvin) in [pydantic/pydantic-core#1297](https://github.com/pydantic/pydantic-core/pull/1297) +* Fix: uuid - allow `str` subclass as input by [@davidhewitt](https://github.com/davidhewitt) in [pydantic/pydantic-core#1296](https://github.com/pydantic/pydantic-core/pull/1296) + +## v2.7.1 (2024-04-23) + +[GitHub release](https://github.com/pydantic/pydantic/releases/tag/v2.7.1) + +### What's Changed + +#### Packaging + +* Bump `pydantic-core` to `v2.18.2` by [@sydney-runkle](https://github.com/sydney-runkle) in [#9307](https://github.com/pydantic/pydantic/pull/9307) + +#### New Features + +* Ftp and Websocket connection strings support by [@CherrySuryp](https://github.com/CherrySuryp) in [#9205](https://github.com/pydantic/pydantic/pull/9205) + +#### Changes + +* Use field description for RootModel schema description when there is `…` by [@LouisGobert](https://github.com/LouisGobert) in [#9214](https://github.com/pydantic/pydantic/pull/9214) + +#### Fixes + +* Fix `validation_alias` behavior with `model_construct` for `AliasChoices` and `AliasPath` by [@sydney-runkle](https://github.com/sydney-runkle) in [#9223](https://github.com/pydantic/pydantic/pull/9223) +* Revert `typing.Literal` and import it outside the TYPE_CHECKING block by [@frost-nzcr4](https://github.com/frost-nzcr4) in [#9232](https://github.com/pydantic/pydantic/pull/9232) +* Fix `Secret` serialization schema, applicable for unions by [@sydney-runkle](https://github.com/sydney-runkle) in [#9240](https://github.com/pydantic/pydantic/pull/9240) +* Fix `strict` application to `function-after` with `use_enum_values` by [@sydney-runkle](https://github.com/sydney-runkle) in [#9279](https://github.com/pydantic/pydantic/pull/9279) +* Address case where `model_construct` on a class which defines `model_post_init` fails with `AttributeError` by [@babygrimes](https://github.com/babygrimes) in [#9168](https://github.com/pydantic/pydantic/pull/9168) +* Fix `model_json_schema` with config types by [@NeevCohen](https://github.com/NeevCohen) in [#9287](https://github.com/pydantic/pydantic/pull/9287) +* Support multiple zeros as an `int` by [@samuelcolvin](https://github.com/samuelcolvin) in [pydantic/pydantic-core#1269](https://github.com/pydantic/pydantic-core/pull/1269) +* Fix validation of `int`s with leading unary plus by [@cknv](https://github.com/cknv) in [pydantic/pydantic-core#1272](https://github.com/pydantic/pydantic-core/pull/1272) +* Fix interaction between `extra != 'ignore'` and `from_attributes=True` by [@davidhewitt](https://github.com/davidhewitt) in [pydantic/pydantic-core#1276](https://github.com/pydantic/pydantic-core/pull/1276) +* Handle error from `Enum`'s `missing` function as `ValidationError` by [@sydney-runkle](https://github.com/sydney-runkle) in [pydantic/pydantic-core#1274](https://github.com/pydantic/pydantic-core/pull/1754) +* Fix memory leak with `Iterable` validation by [@davidhewitt](https://github.com/davidhewitt) in [pydantic/pydantic-core#1271](https://github.com/pydantic/pydantic-core/pull/1751) + +### New Contributors + +* [@zzstoatzz](https://github.com/zzstoatzz) made their first contribution in [#9219](https://github.com/pydantic/pydantic/pull/9219) +* [@frost-nzcr4](https://github.com/frost-nzcr4) made their first contribution in [#9232](https://github.com/pydantic/pydantic/pull/9232) +* [@CherrySuryp](https://github.com/CherrySuryp) made their first contribution in [#9205](https://github.com/pydantic/pydantic/pull/9205) +* [@vagenas](https://github.com/vagenas) made their first contribution in [#9268](https://github.com/pydantic/pydantic/pull/9268) +* [@ollz272](https://github.com/ollz272) made their first contribution in [#9262](https://github.com/pydantic/pydantic/pull/9262) +* [@babygrimes](https://github.com/babygrimes) made their first contribution in [#9168](https://github.com/pydantic/pydantic/pull/9168) +* [@swelborn](https://github.com/swelborn) made their first contribution in [#9296](https://github.com/pydantic/pydantic/pull/9296) +* [@kf-novi](https://github.com/kf-novi) made their first contribution in [#9236](https://github.com/pydantic/pydantic/pull/9236) +* [@lgeiger](https://github.com/lgeiger) made their first contribution in [#9288](https://github.com/pydantic/pydantic/pull/9288) + +## v2.7.0 (2024-04-11) + +[GitHub release](https://github.com/pydantic/pydantic/releases/tag/v2.7.0) + +The code released in v2.7.0 is practically identical to that of v2.7.0b1. + +### What's Changed + +#### Packaging + +* Reorganize `pyproject.toml` sections by [@Viicos](https://github.com/Viicos) in [#8899](https://github.com/pydantic/pydantic/pull/8899) +* Bump `pydantic-core` to `v2.18.1` by [@sydney-runkle](https://github.com/sydney-runkle) in [#9211](https://github.com/pydantic/pydantic/pull/9211) +* Adopt `jiter` `v0.2.0` by [@samuelcolvin](https://github.com/samuelcolvin) in [pydantic/pydantic-core#1250](https://github.com/pydantic/pydantic-core/pull/1250) + +#### New Features + +* Extract attribute docstrings from `FieldInfo.description` by [@Viicos](https://github.com/Viicos) in [#6563](https://github.com/pydantic/pydantic/pull/6563) +* Add a `with_config` decorator to comply with typing spec by [@Viicos](https://github.com/Viicos) in [#8611](https://github.com/pydantic/pydantic/pull/8611) +* Allow an optional separator splitting the value and unit of the result of `ByteSize.human_readable` by [@jks15satoshi](https://github.com/jks15satoshi) in [#8706](https://github.com/pydantic/pydantic/pull/8706) +* Add generic `Secret` base type by [@conradogarciaberrotaran](https://github.com/conradogarciaberrotaran) in [#8519](https://github.com/pydantic/pydantic/pull/8519) +* Make use of `Sphinx` inventories for cross references in docs by [@Viicos](https://github.com/Viicos) in [#8682](https://github.com/pydantic/pydantic/pull/8682) +* Add environment variable to disable plugins by [@geospackle](https://github.com/geospackle) in [#8767](https://github.com/pydantic/pydantic/pull/8767) +* Add support for `deprecated` fields by [@Viicos](https://github.com/Viicos) in [#8237](https://github.com/pydantic/pydantic/pull/8237) +* Allow `field_serializer('*')` by [@ornariece](https://github.com/ornariece) in [#9001](https://github.com/pydantic/pydantic/pull/9001) +* Handle a case when `model_config` is defined as a model property by [@alexeyt101](https://github.com/alexeyt101) in [#9004](https://github.com/pydantic/pydantic/pull/9004) +* Update `create_model()` to support `typing.Annotated` as input by [@wannieman98](https://github.com/wannieman98) in [#8947](https://github.com/pydantic/pydantic/pull/8947) +* Add `ClickhouseDsn` support by [@solidguy7](https://github.com/solidguy7) in [#9062](https://github.com/pydantic/pydantic/pull/9062) +* Add support for `re.Pattern[str]` to `pattern` field by [@jag-k](https://github.com/jag-k) in [#9053](https://github.com/pydantic/pydantic/pull/9053) +* Support for `serialize_as_any` runtime setting by [@sydney-runkle](https://github.com/sydney-runkle) in [#8830](https://github.com/pydantic/pydantic/pull/8830) +* Add support for `typing.Self` by [@Youssefares](https://github.com/Youssefares) in [#9023](https://github.com/pydantic/pydantic/pull/9023) +* Ability to pass `context` to serialization by [@ornariece](https://github.com/ornariece) in [#8965](https://github.com/pydantic/pydantic/pull/8965) +* Add feedback widget to docs with flarelytics integration by [@sydney-runkle](https://github.com/sydney-runkle) in [#9129](https://github.com/pydantic/pydantic/pull/9129) +* Support for parsing partial JSON strings in Python by [@samuelcolvin](https://github.com/samuelcolvin) in [pydantic/jiter#66](https://github.com/pydantic/jiter/pull/66) + +**Finalized in v2.7.0, rather than v2.7.0b1:** +* Add support for field level number to str coercion option by [@NeevCohen](https://github.com/NeevCohen) in [#9137](https://github.com/pydantic/pydantic/pull/9137) +* Update `warnings` parameter for serialization utilities to allow raising a warning by [@Lance-Drane](https://github.com/Lance-Drane) in [#9166](https://github.com/pydantic/pydantic/pull/9166) + +#### Changes + +* Correct docs, logic for `model_construct` behavior with `extra` by [@sydney-runkle](https://github.com/sydney-runkle) in [#8807](https://github.com/pydantic/pydantic/pull/8807) +* Improve error message for improper `RootModel` subclasses by [@sydney-runkle](https://github.com/sydney-runkle) in [#8857](https://github.com/pydantic/pydantic/pull/8857) +* **Breaking Change:** Use `PEP570` syntax by [@Viicos](https://github.com/Viicos) in [#8940](https://github.com/pydantic/pydantic/pull/8940) +* Add `enum` and `type` to the JSON schema for single item literals by [@dmontagu](https://github.com/dmontagu) in [#8944](https://github.com/pydantic/pydantic/pull/8944) +* Deprecate `update_json_schema` internal function by [@sydney-runkle](https://github.com/sydney-runkle) in [#9125](https://github.com/pydantic/pydantic/pull/9125) +* Serialize duration to hour minute second, instead of just seconds by [@kakilangit](https://github.com/kakilangit) in [pydantic/speedate#50](https://github.com/pydantic/speedate/pull/50) +* Trimming str before parsing to int and float by [@hungtsetse](https://github.com/hungtsetse) in [pydantic/pydantic-core#1203](https://github.com/pydantic/pydantic-core/pull/1203) + +#### Performance + +* `enum` validator improvements by [@samuelcolvin](https://github.com/samuelcolvin) in [#9045](https://github.com/pydantic/pydantic/pull/9045) +* Move `enum` validation and serialization to Rust by [@samuelcolvin](https://github.com/samuelcolvin) in [#9064](https://github.com/pydantic/pydantic/pull/9064) +* Improve schema generation for nested dataclasses by [@sydney-runkle](https://github.com/sydney-runkle) in [#9114](https://github.com/pydantic/pydantic/pull/9114) +* Fast path for ASCII python string creation in JSON by [@samuelcolvin](https://github.com/samuelcolvin) in in [pydantic/jiter#72](https://github.com/pydantic/jiter/pull/72) +* SIMD integer and string JSON parsing on `aarch64`(**Note:** SIMD on x86 will be implemented in a future release) by [@samuelcolvin](https://github.com/samuelcolvin) in in [pydantic/jiter#65](https://github.com/pydantic/jiter/pull/65) +* Support JSON `Cow` from `jiter` by [@davidhewitt](https://github.com/davidhewitt) in [pydantic/pydantic-core#1231](https://github.com/pydantic/pydantic-core/pull/1231) +* MAJOR performance improvement: update to PyO3 0.21 final by [@davidhewitt](https://github.com/davidhewitt) in [pydantic/pydantic-core#1248](https://github.com/pydantic/pydantic-core/pull/1248) +* cache Python strings by [@samuelcolvin](https://github.com/samuelcolvin) in [pydantic/pydantic-core#1240](https://github.com/pydantic/pydantic-core/pull/1240) + +#### Fixes + +* Fix strict parsing for some `Sequence`s by [@sydney-runkle](https://github.com/sydney-runkle) in [#8614](https://github.com/pydantic/pydantic/pull/8614) +* Add a check on the existence of `__qualname__` by [@anci3ntr0ck](https://github.com/anci3ntr0ck) in [#8642](https://github.com/pydantic/pydantic/pull/8642) +* Handle `__pydantic_extra__` annotation being a string or inherited by [@alexmojaki](https://github.com/alexmojaki) in [#8659](https://github.com/pydantic/pydantic/pull/8659) +* Fix json validation for `NameEmail` by [@Holi0317](https://github.com/Holi0317) in [#8650](https://github.com/pydantic/pydantic/pull/8650) +* Fix type-safety of attribute access in `BaseModel` by [@bluenote10](https://github.com/bluenote10) in [#8651](https://github.com/pydantic/pydantic/pull/8651) +* Fix bug with `mypy` plugin and `no_strict_optional = True` by [@dmontagu](https://github.com/dmontagu) in [#8666](https://github.com/pydantic/pydantic/pull/8666) +* Fix `ByteSize` error `type` change by [@sydney-runkle](https://github.com/sydney-runkle) in [#8681](https://github.com/pydantic/pydantic/pull/8681) +* Fix inheriting annotations in dataclasses by [@sydney-runkle](https://github.com/sydney-runkle) in [#8679](https://github.com/pydantic/pydantic/pull/8679) +* Fix regression in core schema generation for indirect definition references by [@dmontagu](https://github.com/dmontagu) in [#8702](https://github.com/pydantic/pydantic/pull/8702) +* Fix unsupported types bug with plain validator by [@sydney-runkle](https://github.com/sydney-runkle) in [#8710](https://github.com/pydantic/pydantic/pull/8710) +* Reverting problematic fix from 2.6 release, fixing schema building bug by [@sydney-runkle](https://github.com/sydney-runkle) in [#8718](https://github.com/pydantic/pydantic/pull/8718) +* fixes `__pydantic_config__` ignored for TypeDict by [@13sin](https://github.com/13sin) in [#8734](https://github.com/pydantic/pydantic/pull/8734) +* Fix test failures with `pytest v8.0.0` due to `pytest.warns()` starting to work inside `pytest.raises()` by [@mgorny](https://github.com/mgorny) in [#8678](https://github.com/pydantic/pydantic/pull/8678) +* Use `is_valid_field` from 1.x for `mypy` plugin by [@DanielNoord](https://github.com/DanielNoord) in [#8738](https://github.com/pydantic/pydantic/pull/8738) +* Better-support `mypy` strict equality flag by [@dmontagu](https://github.com/dmontagu) in [#8799](https://github.com/pydantic/pydantic/pull/8799) +* model_json_schema export with Annotated types misses 'required' parameters by [@LouisGobert](https://github.com/LouisGobert) in [#8793](https://github.com/pydantic/pydantic/pull/8793) +* Fix default inclusion in `FieldInfo.__repr_args__` by [@sydney-runkle](https://github.com/sydney-runkle) in [#8801](https://github.com/pydantic/pydantic/pull/8801) +* Fix resolution of forward refs in dataclass base classes that are not present in the subclass module namespace by [@matsjoyce-refeyn](https://github.com/matsjoyce-refeyn) in [#8751](https://github.com/pydantic/pydantic/pull/8751) +* Fix `BaseModel` type annotations to be resolvable by `typing.get_type_hints` by [@devmonkey22](https://github.com/devmonkey22) in [#7680](https://github.com/pydantic/pydantic/pull/7680) +* Fix: allow empty string aliases with `AliasGenerator` by [@sydney-runkle](https://github.com/sydney-runkle) in [#8810](https://github.com/pydantic/pydantic/pull/8810) +* Fix test along with `date` -> `datetime` timezone assumption fix by [@sydney-runkle](https://github.com/sydney-runkle) in [#8823](https://github.com/pydantic/pydantic/pull/8823) +* Fix deprecation warning with usage of `ast.Str` by [@Viicos](https://github.com/Viicos) in [#8837](https://github.com/pydantic/pydantic/pull/8837) +* Add missing `deprecated` decorators by [@Viicos](https://github.com/Viicos) in [#8877](https://github.com/pydantic/pydantic/pull/8877) +* Fix serialization of `NameEmail` if name includes an email address by [@NeevCohen](https://github.com/NeevCohen) in [#8860](https://github.com/pydantic/pydantic/pull/8860) +* Add information about class in error message of schema generation by [@Czaki](https://github.com/Czaki) in [#8917](https://github.com/pydantic/pydantic/pull/8917) +* Make `TypeAdapter`'s typing compatible with special forms by [@adriangb](https://github.com/adriangb) in [#8923](https://github.com/pydantic/pydantic/pull/8923) +* Fix issue with config behavior being baked into the ref schema for `enum`s by [@dmontagu](https://github.com/dmontagu) in [#8920](https://github.com/pydantic/pydantic/pull/8920) +* More helpful error re wrong `model_json_schema` usage by [@sydney-runkle](https://github.com/sydney-runkle) in [#8928](https://github.com/pydantic/pydantic/pull/8928) +* Fix nested discriminated union schema gen, pt 2 by [@sydney-runkle](https://github.com/sydney-runkle) in [#8932](https://github.com/pydantic/pydantic/pull/8932) +* Fix schema build for nested dataclasses / TypedDicts with discriminators by [@sydney-runkle](https://github.com/sydney-runkle) in [#8950](https://github.com/pydantic/pydantic/pull/8950) +* Remove unnecessary logic for definitions schema gen with discriminated unions by [@sydney-runkle](https://github.com/sydney-runkle) in [#8951](https://github.com/pydantic/pydantic/pull/8951) +* Fix handling of optionals in `mypy` plugin by [@dmontagu](https://github.com/dmontagu) in [#9008](https://github.com/pydantic/pydantic/pull/9008) +* Fix `PlainSerializer` usage with std type constructor by [@sydney-runkle](https://github.com/sydney-runkle) in [#9031](https://github.com/pydantic/pydantic/pull/9031) +* Remove unnecessary warning for config in plugin by [@dmontagu](https://github.com/dmontagu) in [#9039](https://github.com/pydantic/pydantic/pull/9039) +* Fix default value serializing by [@NeevCohen](https://github.com/NeevCohen) in [#9066](https://github.com/pydantic/pydantic/pull/9066) +* Fix extra fields check in `Model.__getattr__()` by [@NeevCohen](https://github.com/NeevCohen) in [#9082](https://github.com/pydantic/pydantic/pull/9082) +* Fix `ClassVar` forward ref inherited from parent class by [@alexmojaki](https://github.com/alexmojaki) in [#9097](https://github.com/pydantic/pydantic/pull/9097) +* fix sequence like validator with strict `True` by [@andresliszt](https://github.com/andresliszt) in [#8977](https://github.com/pydantic/pydantic/pull/8977) +* Improve warning message when a field name shadows a field in a parent model by [@chan-vince](https://github.com/chan-vince) in [#9105](https://github.com/pydantic/pydantic/pull/9105) +* Do not warn about shadowed fields if they are not redefined in a child class by [@chan-vince](https://github.com/chan-vince) in [#9111](https://github.com/pydantic/pydantic/pull/9111) +* Fix discriminated union bug with unsubstituted type var by [@sydney-runkle](https://github.com/sydney-runkle) in [#9124](https://github.com/pydantic/pydantic/pull/9124) +* Support serialization of `deque` when passed to `Sequence[blah blah blah]` by [@sydney-runkle](https://github.com/sydney-runkle) in [#9128](https://github.com/pydantic/pydantic/pull/9128) +* Init private attributes from super-types in `model_post_init` by [@Viicos](https://github.com/Viicos) in [#9134](https://github.com/pydantic/pydantic/pull/9134) +* fix `model_construct` with `validation_alias` by [@ornariece](https://github.com/ornariece) in [#9144](https://github.com/pydantic/pydantic/pull/9144) +* Ensure json-schema generator handles `Literal` `null` types by [@bruno-f-cruz](https://github.com/bruno-f-cruz) in [#9135](https://github.com/pydantic/pydantic/pull/9135) +* **Fixed in v2.7.0**: Fix allow extra generic by [@dmontagu](https://github.com/dmontagu) in [#9193](https://github.com/pydantic/pydantic/pull/9193) + +### New Contributors + +* [@hungtsetse](https://github.com/hungtsetse) made their first contribution in [#8546](https://github.com/pydantic/pydantic/pull/8546) +* [@StrawHatDrag0n](https://github.com/StrawHatDrag0n) made their first contribution in [#8583](https://github.com/pydantic/pydantic/pull/8583) +* [@anci3ntr0ck](https://github.com/anci3ntr0ck) made their first contribution in [#8642](https://github.com/pydantic/pydantic/pull/8642) +* [@Holi0317](https://github.com/Holi0317) made their first contribution in [#8650](https://github.com/pydantic/pydantic/pull/8650) +* [@bluenote10](https://github.com/bluenote10) made their first contribution in [#8651](https://github.com/pydantic/pydantic/pull/8651) +* [@ADSteele916](https://github.com/ADSteele916) made their first contribution in [#8703](https://github.com/pydantic/pydantic/pull/8703) +* [@musicinmybrain](https://github.com/musicinmybrain) made their first contribution in [#8731](https://github.com/pydantic/pydantic/pull/8731) +* [@jks15satoshi](https://github.com/jks15satoshi) made their first contribution in [#8706](https://github.com/pydantic/pydantic/pull/8706) +* [@13sin](https://github.com/13sin) made their first contribution in [#8734](https://github.com/pydantic/pydantic/pull/8734) +* [@DanielNoord](https://github.com/DanielNoord) made their first contribution in [#8738](https://github.com/pydantic/pydantic/pull/8738) +* [@conradogarciaberrotaran](https://github.com/conradogarciaberrotaran) made their first contribution in [#8519](https://github.com/pydantic/pydantic/pull/8519) +* [@chris-griffin](https://github.com/chris-griffin) made their first contribution in [#8775](https://github.com/pydantic/pydantic/pull/8775) +* [@LouisGobert](https://github.com/LouisGobert) made their first contribution in [#8793](https://github.com/pydantic/pydantic/pull/8793) +* [@matsjoyce-refeyn](https://github.com/matsjoyce-refeyn) made their first contribution in [#8751](https://github.com/pydantic/pydantic/pull/8751) +* [@devmonkey22](https://github.com/devmonkey22) made their first contribution in [#7680](https://github.com/pydantic/pydantic/pull/7680) +* [@adamency](https://github.com/adamency) made their first contribution in [#8847](https://github.com/pydantic/pydantic/pull/8847) +* [@MamfTheKramf](https://github.com/MamfTheKramf) made their first contribution in [#8851](https://github.com/pydantic/pydantic/pull/8851) +* [@ornariece](https://github.com/ornariece) made their first contribution in [#9001](https://github.com/pydantic/pydantic/pull/9001) +* [@alexeyt101](https://github.com/alexeyt101) made their first contribution in [#9004](https://github.com/pydantic/pydantic/pull/9004) +* [@wannieman98](https://github.com/wannieman98) made their first contribution in [#8947](https://github.com/pydantic/pydantic/pull/8947) +* [@solidguy7](https://github.com/solidguy7) made their first contribution in [#9062](https://github.com/pydantic/pydantic/pull/9062) +* [@kloczek](https://github.com/kloczek) made their first contribution in [#9047](https://github.com/pydantic/pydantic/pull/9047) +* [@jag-k](https://github.com/jag-k) made their first contribution in [#9053](https://github.com/pydantic/pydantic/pull/9053) +* [@priya-gitTest](https://github.com/priya-gitTest) made their first contribution in [#9088](https://github.com/pydantic/pydantic/pull/9088) +* [@Youssefares](https://github.com/Youssefares) made their first contribution in [#9023](https://github.com/pydantic/pydantic/pull/9023) +* [@chan-vince](https://github.com/chan-vince) made their first contribution in [#9105](https://github.com/pydantic/pydantic/pull/9105) +* [@bruno-f-cruz](https://github.com/bruno-f-cruz) made their first contribution in [#9135](https://github.com/pydantic/pydantic/pull/9135) +* [@Lance-Drane](https://github.com/Lance-Drane) made their first contribution in [#9166](https://github.com/pydantic/pydantic/pull/9166) + +## v2.7.0b1 (2024-04-03) + +Pre-release, see [the GitHub release](https://github.com/pydantic/pydantic/releases/tag/v2.7.0b1) for details. + +## v2.6.4 (2024-03-12) + +[GitHub release](https://github.com/pydantic/pydantic/releases/tag/v2.6.4) + +### What's Changed + +#### Fixes + +* Fix usage of `AliasGenerator` with `computed_field` decorator by [@sydney-runkle](https://github.com/sydney-runkle) in [#8806](https://github.com/pydantic/pydantic/pull/8806) +* Fix nested discriminated union schema gen, pt 2 by [@sydney-runkle](https://github.com/sydney-runkle) in [#8932](https://github.com/pydantic/pydantic/pull/8932) +* Fix bug with no_strict_optional=True caused by API deferral by [@dmontagu](https://github.com/dmontagu) in [#8826](https://github.com/pydantic/pydantic/pull/8826) + + +## v2.6.3 (2024-02-27) + +[GitHub release](https://github.com/pydantic/pydantic/releases/tag/v2.6.3) + +### What's Changed + +#### Packaging + +* Update `pydantic-settings` version in the docs by [@hramezani](https://github.com/hramezani) in [#8906](https://github.com/pydantic/pydantic/pull/8906) + +#### Fixes + +* Fix discriminated union schema gen bug by [@sydney-runkle](https://github.com/sydney-runkle) in [#8904](https://github.com/pydantic/pydantic/pull/8904) + + +## v2.6.2 (2024-02-23) + +[GitHub release](https://github.com/pydantic/pydantic/releases/tag/v2.6.2) + +### What's Changed + +#### Packaging + +* Upgrade to `pydantic-core` 2.16.3 by [@sydney-runkle](https://github.com/sydney-runkle) in [#8879](https://github.com/pydantic/pydantic/pull/8879) + +#### Fixes + +* 'YYYY-MM-DD' date string coerced to datetime shouldn't infer timezone by [@sydney-runkle](https://github.com/sydney-runkle) in [pydantic/pydantic-core#1193](https://github.com/pydantic/pydantic-core/pull/1193) + + +## v2.6.1 (2024-02-05) + +[GitHub release](https://github.com/pydantic/pydantic/releases/tag/v2.6.1) + +### What's Changed + +#### Packaging + +* Upgrade to `pydantic-core` 2.16.2 by [@sydney-runkle](https://github.com/sydney-runkle) in [#8717](https://github.com/pydantic/pydantic/pull/8717) + +#### Fixes + +* Fix bug with `mypy` plugin and `no_strict_optional = True` by [@dmontagu](https://github.com/dmontagu) in [#8666](https://github.com/pydantic/pydantic/pull/8666) +* Fix `ByteSize` error `type` change by [@sydney-runkle](https://github.com/sydney-runkle) in [#8681](https://github.com/pydantic/pydantic/pull/8681) +* Fix inheriting `Field` annotations in dataclasses by [@sydney-runkle](https://github.com/sydney-runkle) in [#8679](https://github.com/pydantic/pydantic/pull/8679) +* Fix regression in core schema generation for indirect definition references by [@dmontagu](https://github.com/dmontagu) in [#8702](https://github.com/pydantic/pydantic/pull/8702) +* Fix unsupported types bug with `PlainValidator` by [@sydney-runkle](https://github.com/sydney-runkle) in [#8710](https://github.com/pydantic/pydantic/pull/8710) +* Reverting problematic fix from 2.6 release, fixing schema building bug by [@sydney-runkle](https://github.com/sydney-runkle) in [#8718](https://github.com/pydantic/pydantic/pull/8718) +* Fix warning for tuple of wrong size in `Union` by [@davidhewitt](https://github.com/davidhewitt) in [pydantic/pydantic-core#1174](https://github.com/pydantic/pydantic-core/pull/1174) +* Fix `computed_field` JSON serializer `exclude_none` behavior by [@sydney-runkle](https://github.com/sydney-runkle) in [pydantic/pydantic-core#1187](https://github.com/pydantic/pydantic-core/pull/1187) + + +## v2.6.0 (2024-01-23) + +[GitHub release](https://github.com/pydantic/pydantic/releases/tag/v2.6.0) + +The code released in v2.6.0 is practically identical to that of v2.6.0b1. + +### What's Changed + +#### Packaging + +* Check for `email-validator` version >= 2.0 by [@commonism](https://github.com/commonism) in [#6033](https://github.com/pydantic/pydantic/pull/6033) +* Upgrade `ruff`` target version to Python 3.8 by [@Elkiwa](https://github.com/Elkiwa) in [#8341](https://github.com/pydantic/pydantic/pull/8341) +* Update to `pydantic-extra-types==2.4.1` by [@yezz123](https://github.com/yezz123) in [#8478](https://github.com/pydantic/pydantic/pull/8478) +* Update to `pyright==1.1.345` by [@Viicos](https://github.com/Viicos) in [#8453](https://github.com/pydantic/pydantic/pull/8453) +* Update pydantic-core from 2.14.6 to 2.16.1, significant changes from these updates are described below, full changelog [here](https://github.com/pydantic/pydantic-core/compare/v2.14.6...v2.16.1) + +#### New Features + +* Add `NatsDsn` by [@ekeew](https://github.com/ekeew) in [#6874](https://github.com/pydantic/pydantic/pull/6874) +* Add `ConfigDict.ser_json_inf_nan` by [@davidhewitt](https://github.com/davidhewitt) in [#8159](https://github.com/pydantic/pydantic/pull/8159) +* Add `types.OnErrorOmit` by [@adriangb](https://github.com/adriangb) in [#8222](https://github.com/pydantic/pydantic/pull/8222) +* Support `AliasGenerator` usage by [@sydney-runkle](https://github.com/sydney-runkle) in [#8282](https://github.com/pydantic/pydantic/pull/8282) +* Add Pydantic People Page to docs by [@sydney-runkle](https://github.com/sydney-runkle) in [#8345](https://github.com/pydantic/pydantic/pull/8345) +* Support `yyyy-MM-DD` datetime parsing by [@sydney-runkle](https://github.com/sydney-runkle) in [#8404](https://github.com/pydantic/pydantic/pull/8404) +* Added bits conversions to the `ByteSize` class [#8415](https://github.com/pydantic/pydantic/issues/8415) by [@luca-matei](https://github.com/luca-matei) in [#8507](https://github.com/pydantic/pydantic/pull/8507) +* Enable json schema creation with type `ByteSize` by [@geospackle](https://github.com/geospackle) in [#8537](https://github.com/pydantic/pydantic/pull/8537) +* Add `eval_type_backport` to handle union operator and builtin generic subscripting in older Pythons by [@alexmojaki](https://github.com/alexmojaki) in [#8209](https://github.com/pydantic/pydantic/pull/8209) +* Add support for `dataclass` fields `init` by [@dmontagu](https://github.com/dmontagu) in [#8552](https://github.com/pydantic/pydantic/pull/8552) +* Implement pickling for `ValidationError` by [@davidhewitt](https://github.com/davidhewitt) in [pydantic/pydantic-core#1119](https://github.com/pydantic/pydantic-core/pull/1119) +* Add unified tuple validator that can handle "variadic" tuples via PEP-646 by [@dmontagu](https://github.com/dmontagu) in [pydantic/pydantic-core#865](https://github.com/pydantic/pydantic-core/pull/865) + +#### Changes + +* Drop Python3.7 support by [@hramezani](https://github.com/hramezani) in [#7188](https://github.com/pydantic/pydantic/pull/7188) +* Drop Python 3.7, and PyPy 3.7 and 3.8 by [@davidhewitt](https://github.com/davidhewitt) in [pydantic/pydantic-core#1129](https://github.com/pydantic/pydantic-core/pull/1129) +* Use positional-only `self` in `BaseModel` constructor, so no field name can ever conflict with it by [@ariebovenberg](https://github.com/ariebovenberg) in [#8072](https://github.com/pydantic/pydantic/pull/8072) +* Make `@validate_call` return a function instead of a custom descriptor - fixes binding issue with inheritance and adds `self/cls` argument to validation errors by [@alexmojaki](https://github.com/alexmojaki) in [#8268](https://github.com/pydantic/pydantic/pull/8268) +* Exclude `BaseModel` docstring from JSON schema description by [@sydney-runkle](https://github.com/sydney-runkle) in [#8352](https://github.com/pydantic/pydantic/pull/8352) +* Introducing `classproperty` decorator for `model_computed_fields` by [@Jocelyn-Gas](https://github.com/Jocelyn-Gas) in [#8437](https://github.com/pydantic/pydantic/pull/8437) +* Explicitly raise an error if field names clashes with types by [@Viicos](https://github.com/Viicos) in [#8243](https://github.com/pydantic/pydantic/pull/8243) +* Use stricter serializer for unions of simple types by [@alexdrydew](https://github.com/alexdrydew) [pydantic/pydantic-core#1132](https://github.com/pydantic/pydantic-core/pull/1132) + +#### Performance + +* Add Codspeed profiling Actions workflow by [@lambertsbennett](https://github.com/lambertsbennett) in [#8054](https://github.com/pydantic/pydantic/pull/8054) +* Improve `int` extraction by [@samuelcolvin](https://github.com/samuelcolvin) in [pydantic/pydantic-core#1155](https://github.com/pydantic/pydantic-core/pull/1155) +* Improve performance of recursion guard by [@samuelcolvin](https://github.com/samuelcolvin) in [pydantic/pydantic-core#1156](https://github.com/pydantic/pydantic-core/pull/1156) +* `dataclass` serialization speedups by [@samuelcolvin](https://github.com/samuelcolvin) in [pydantic/pydantic-core#1162](https://github.com/pydantic/pydantic-core/pull/1162) +* Avoid `HashMap` creation when looking up small JSON objects in `LazyIndexMaps` by [@samuelcolvin](https://github.com/samuelcolvin) in [pydantic/jiter#55](https://github.com/pydantic/jiter/pull/55) +* use hashbrown to speedup python string caching by [@davidhewitt](https://github.com/davidhewitt) in [pydantic/jiter#51](https://github.com/pydantic/jiter/pull/51) +* Replace `Peak` with more efficient `Peek` by [@davidhewitt](https://github.com/davidhewitt) in [pydantic/jiter#48](https://github.com/pydantic/jiter/pull/48) + +#### Fixes + +* Move `getattr` warning in deprecated `BaseConfig` by [@tlambert03](https://github.com/tlambert03) in [#7183](https://github.com/pydantic/pydantic/pull/7183) +* Only hash `model_fields`, not whole `__dict__` by [@alexmojaki](https://github.com/alexmojaki) in [#7786](https://github.com/pydantic/pydantic/pull/7786) +* Fix mishandling of unions while freezing types in the `mypy` plugin by [@dmontagu](https://github.com/dmontagu) in [#7411](https://github.com/pydantic/pydantic/pull/7411) +* Fix `mypy` error on untyped `ClassVar` by [@vincent-hachin-wmx](https://github.com/vincent-hachin-wmx) in [#8138](https://github.com/pydantic/pydantic/pull/8138) +* Only compare pydantic fields in `BaseModel.__eq__` instead of whole `__dict__` by [@QuentinSoubeyranAqemia](https://github.com/QuentinSoubeyranAqemia) in [#7825](https://github.com/pydantic/pydantic/pull/7825) +* Update `strict` docstring in `model_validate` method. by [@LukeTonin](https://github.com/LukeTonin) in [#8223](https://github.com/pydantic/pydantic/pull/8223) +* Fix overload position of `computed_field` by [@Viicos](https://github.com/Viicos) in [#8227](https://github.com/pydantic/pydantic/pull/8227) +* Fix custom type type casting used in multiple attributes by [@ianhfc](https://github.com/ianhfc) in [#8066](https://github.com/pydantic/pydantic/pull/8066) +* Fix issue not allowing `validate_call` decorator to be dynamically assigned to a class method by [@jusexton](https://github.com/jusexton) in [#8249](https://github.com/pydantic/pydantic/pull/8249) +* Fix issue `unittest.mock` deprecation warnings by [@ibleedicare](https://github.com/ibleedicare) in [#8262](https://github.com/pydantic/pydantic/pull/8262) +* Added tests for the case `JsonValue` contains subclassed primitive values by [@jusexton](https://github.com/jusexton) in [#8286](https://github.com/pydantic/pydantic/pull/8286) +* Fix `mypy` error on free before validator (classmethod) by [@sydney-runkle](https://github.com/sydney-runkle) in [#8285](https://github.com/pydantic/pydantic/pull/8285) +* Fix `to_snake` conversion by [@jevins09](https://github.com/jevins09) in [#8316](https://github.com/pydantic/pydantic/pull/8316) +* Fix type annotation of `ModelMetaclass.__prepare__` by [@slanzmich](https://github.com/slanzmich) in [#8305](https://github.com/pydantic/pydantic/pull/8305) +* Disallow `config` specification when initializing a `TypeAdapter` when the annotated type has config already by [@sydney-runkle](https://github.com/sydney-runkle) in [#8365](https://github.com/pydantic/pydantic/pull/8365) +* Fix a naming issue with JSON schema for generics parametrized by recursive type aliases by [@dmontagu](https://github.com/dmontagu) in [#8389](https://github.com/pydantic/pydantic/pull/8389) +* Fix type annotation in pydantic people script by [@shenxiangzhuang](https://github.com/shenxiangzhuang) in [#8402](https://github.com/pydantic/pydantic/pull/8402) +* Add support for field `alias` in `dataclass` signature by [@NeevCohen](https://github.com/NeevCohen) in [#8387](https://github.com/pydantic/pydantic/pull/8387) +* Fix bug with schema generation with `Field(...)` in a forward ref by [@dmontagu](https://github.com/dmontagu) in [#8494](https://github.com/pydantic/pydantic/pull/8494) +* Fix ordering of keys in `__dict__` with `model_construct` call by [@sydney-runkle](https://github.com/sydney-runkle) in [#8500](https://github.com/pydantic/pydantic/pull/8500) +* Fix module `path_type` creation when globals does not contain `__name__` by [@hramezani](https://github.com/hramezani) in [#8470](https://github.com/pydantic/pydantic/pull/8470) +* Fix for namespace issue with dataclasses with `from __future__ import annotations` by [@sydney-runkle](https://github.com/sydney-runkle) in [#8513](https://github.com/pydantic/pydantic/pull/8513) +* Fix: make function validator types positional-only by [@pmmmwh](https://github.com/pmmmwh) in [#8479](https://github.com/pydantic/pydantic/pull/8479) +* Fix usage of `@deprecated` by [@Viicos](https://github.com/Viicos) in [#8294](https://github.com/pydantic/pydantic/pull/8294) +* Add more support for private attributes in `model_construct` call by [@sydney-runkle](https://github.com/sydney-runkle) in [#8525](https://github.com/pydantic/pydantic/pull/8525) +* Use a stack for the types namespace by [@dmontagu](https://github.com/dmontagu) in [#8378](https://github.com/pydantic/pydantic/pull/8378) +* Fix schema-building bug with `TypeAliasType` for types with refs by [@dmontagu](https://github.com/dmontagu) in [#8526](https://github.com/pydantic/pydantic/pull/8526) +* Support `pydantic.Field(repr=False)` in dataclasses by [@tigeryy2](https://github.com/tigeryy2) in [#8511](https://github.com/pydantic/pydantic/pull/8511) +* Override `dataclass_transform` behavior for `RootModel` by [@Viicos](https://github.com/Viicos) in [#8163](https://github.com/pydantic/pydantic/pull/8163) +* Refactor signature generation for simplicity by [@sydney-runkle](https://github.com/sydney-runkle) in [#8572](https://github.com/pydantic/pydantic/pull/8572) +* Fix ordering bug of PlainValidator annotation by [@Anvil](https://github.com/Anvil) in [#8567](https://github.com/pydantic/pydantic/pull/8567) +* Fix `exclude_none` for json serialization of `computed_field`s by [@sydney-runkle](https://github.com/sydney-runkle) in [pydantic/pydantic-core#1098](https://github.com/pydantic/pydantic-core/pull/1098) +* Support yyyy-MM-DD string for datetimes by [@sydney-runkle](https://github.com/sydney-runkle) in [pydantic/pydantic-core#1124](https://github.com/pydantic/pydantic-core/pull/1124) +* Tweak ordering of definitions in generated schemas by [@StrawHatDrag0n](https://github.com/StrawHatDrag0n) in [#8583](https://github.com/pydantic/pydantic/pull/8583) + + +### New Contributors + +#### `pydantic` +* [@ekeew](https://github.com/ekeew) made their first contribution in [#6874](https://github.com/pydantic/pydantic/pull/6874) +* [@lambertsbennett](https://github.com/lambertsbennett) made their first contribution in [#8054](https://github.com/pydantic/pydantic/pull/8054) +* [@vincent-hachin-wmx](https://github.com/vincent-hachin-wmx) made their first contribution in [#8138](https://github.com/pydantic/pydantic/pull/8138) +* [@QuentinSoubeyranAqemia](https://github.com/QuentinSoubeyranAqemia) made their first contribution in [#7825](https://github.com/pydantic/pydantic/pull/7825) +* [@ariebovenberg](https://github.com/ariebovenberg) made their first contribution in [#8072](https://github.com/pydantic/pydantic/pull/8072) +* [@LukeTonin](https://github.com/LukeTonin) made their first contribution in [#8223](https://github.com/pydantic/pydantic/pull/8223) +* [@denisart](https://github.com/denisart) made their first contribution in [#8231](https://github.com/pydantic/pydantic/pull/8231) +* [@ianhfc](https://github.com/ianhfc) made their first contribution in [#8066](https://github.com/pydantic/pydantic/pull/8066) +* [@eonu](https://github.com/eonu) made their first contribution in [#8255](https://github.com/pydantic/pydantic/pull/8255) +* [@amandahla](https://github.com/amandahla) made their first contribution in [#8263](https://github.com/pydantic/pydantic/pull/8263) +* [@ibleedicare](https://github.com/ibleedicare) made their first contribution in [#8262](https://github.com/pydantic/pydantic/pull/8262) +* [@jevins09](https://github.com/jevins09) made their first contribution in [#8316](https://github.com/pydantic/pydantic/pull/8316) +* [@cuu508](https://github.com/cuu508) made their first contribution in [#8322](https://github.com/pydantic/pydantic/pull/8322) +* [@slanzmich](https://github.com/slanzmich) made their first contribution in [#8305](https://github.com/pydantic/pydantic/pull/8305) +* [@jensenbox](https://github.com/jensenbox) made their first contribution in [#8331](https://github.com/pydantic/pydantic/pull/8331) +* [@szepeviktor](https://github.com/szepeviktor) made their first contribution in [#8356](https://github.com/pydantic/pydantic/pull/8356) +* [@Elkiwa](https://github.com/Elkiwa) made their first contribution in [#8341](https://github.com/pydantic/pydantic/pull/8341) +* [@parhamfh](https://github.com/parhamfh) made their first contribution in [#8395](https://github.com/pydantic/pydantic/pull/8395) +* [@shenxiangzhuang](https://github.com/shenxiangzhuang) made their first contribution in [#8402](https://github.com/pydantic/pydantic/pull/8402) +* [@NeevCohen](https://github.com/NeevCohen) made their first contribution in [#8387](https://github.com/pydantic/pydantic/pull/8387) +* [@zby](https://github.com/zby) made their first contribution in [#8497](https://github.com/pydantic/pydantic/pull/8497) +* [@patelnets](https://github.com/patelnets) made their first contribution in [#8491](https://github.com/pydantic/pydantic/pull/8491) +* [@edwardwli](https://github.com/edwardwli) made their first contribution in [#8503](https://github.com/pydantic/pydantic/pull/8503) +* [@luca-matei](https://github.com/luca-matei) made their first contribution in [#8507](https://github.com/pydantic/pydantic/pull/8507) +* [@Jocelyn-Gas](https://github.com/Jocelyn-Gas) made their first contribution in [#8437](https://github.com/pydantic/pydantic/pull/8437) +* [@bL34cHig0](https://github.com/bL34cHig0) made their first contribution in [#8501](https://github.com/pydantic/pydantic/pull/8501) +* [@tigeryy2](https://github.com/tigeryy2) made their first contribution in [#8511](https://github.com/pydantic/pydantic/pull/8511) +* [@geospackle](https://github.com/geospackle) made their first contribution in [#8537](https://github.com/pydantic/pydantic/pull/8537) +* [@Anvil](https://github.com/Anvil) made their first contribution in [#8567](https://github.com/pydantic/pydantic/pull/8567) +* [@hungtsetse](https://github.com/hungtsetse) made their first contribution in [#8546](https://github.com/pydantic/pydantic/pull/8546) +* [@StrawHatDrag0n](https://github.com/StrawHatDrag0n) made their first contribution in [#8583](https://github.com/pydantic/pydantic/pull/8583) + +#### `pydantic-core` +* [@mariuswinger](https://github.com/mariuswinger) made their first contribution in [pydantic/pydantic-core#1087](https://github.com/pydantic/pydantic-core/pull/1087) +* [@adamchainz](https://github.com/adamchainz) made their first contribution in [pydantic/pydantic-core#1090](https://github.com/pydantic/pydantic-core/pull/1090) +* [@akx](https://github.com/akx) made their first contribution in [pydantic/pydantic-core#1123](https://github.com/pydantic/pydantic-core/pull/1123) + +## v2.6.0b1 (2024-01-19) + +Pre-release, see [the GitHub release](https://github.com/pydantic/pydantic/releases/tag/v2.6.0b1) for details. + +## v2.5.3 (2023-12-22) + +[GitHub release](https://github.com/pydantic/pydantic/releases/tag/v2.5.3) + +### What's Changed + +#### Packaging + +* uprev `pydantic-core` to 2.14.6 + +#### Fixes + +* Fix memory leak with recursive definitions creating reference cycles by [@davidhewitt](https://github.com/davidhewitt) in [pydantic/pydantic-core#1125](https://github.com/pydantic/pydantic-core/pull/1125) + +## v2.5.2 (2023-11-22) + +[GitHub release](https://github.com/pydantic/pydantic/releases/tag/v2.5.2) + +### What's Changed + +#### Packaging + +* uprev `pydantic-core` to 2.14.5 + +#### New Features + +* Add `ConfigDict.ser_json_inf_nan` by [@davidhewitt](https://github.com/davidhewitt) in [#8159](https://github.com/pydantic/pydantic/pull/8159) + +#### Fixes + +* Fix validation of `Literal` from JSON keys when used as `dict` key by [@sydney-runkle](https://github.com/sydney-runkle) in [pydantic/pydantic-core#1075](https://github.com/pydantic/pydantic-core/pull/1075) +* Fix bug re `custom_init` on members of `Union` by [@sydney-runkle](https://github.com/sydney-runkle) in [pydantic/pydantic-core#1076](https://github.com/pydantic/pydantic-core/pull/1076) +* Fix `JsonValue` `bool` serialization by [@sydney-runkle](https://github.com/sydney-runkle) in [#8190](https://github.com/pydantic/pydantic/pull/8159) +* Fix handling of unhashable inputs with `Literal` in `Union`s by [@sydney-runkle](https://github.com/sydney-runkle) in [pydantic/pydantic-core#1089](https://github.com/pydantic/pydantic-core/pull/1089) + +## v2.5.1 (2023-11-15) + +[GitHub release](https://github.com/pydantic/pydantic/releases/tag/v2.5.1) + +### What's Changed + +#### Packaging + +* uprev pydantic-core to 2.14.3 by [@samuelcolvin](https://github.com/samuelcolvin) in [#8120](https://github.com/pydantic/pydantic/pull/8120) + +#### Fixes + +* Fix package description limit by [@dmontagu](https://github.com/dmontagu) in [#8097](https://github.com/pydantic/pydantic/pull/8097) +* Fix `ValidateCallWrapper` error when creating a model which has a [@validate_call](https://github.com/validate_call) wrapped field annotation by [@sydney-runkle](https://github.com/sydney-runkle) in [#8110](https://github.com/pydantic/pydantic/pull/8110) + +## v2.5.0 (2023-11-13) + +[GitHub release](https://github.com/pydantic/pydantic/releases/tag/v2.5.0) + +The code released in v2.5.0 is functionally identical to that of v2.5.0b1. + +### What's Changed + +#### Packaging + +* Update pydantic-core from 2.10.1 to 2.14.1, significant changes from these updates are described below, full changelog [here](https://github.com/pydantic/pydantic-core/compare/v2.10.1...v2.14.1) +* Update to `pyright==1.1.335` by [@Viicos](https://github.com/Viicos) in [#8075](https://github.com/pydantic/pydantic/pull/8075) + +#### New Features + +* Allow plugins to catch non `ValidationError` errors by [@adriangb](https://github.com/adriangb) in [#7806](https://github.com/pydantic/pydantic/pull/7806) +* Support `__doc__` argument in `create_model()` by [@chris-spann](https://github.com/chris-spann) in [#7863](https://github.com/pydantic/pydantic/pull/7863) +* Expose `regex_engine` flag - meaning you can use with the Rust or Python regex libraries in constraints by [@utkini](https://github.com/utkini) in [#7768](https://github.com/pydantic/pydantic/pull/7768) +* Save return type generated from type annotation in `ComputedFieldInfo` by [@alexmojaki](https://github.com/alexmojaki) in [#7889](https://github.com/pydantic/pydantic/pull/7889) +* Adopting `ruff` formatter by [@Luca-Blight](https://github.com/Luca-Blight) in [#7930](https://github.com/pydantic/pydantic/pull/7930) +* Added `validation_error_cause` to config by [@zakstucke](https://github.com/zakstucke) in [#7626](https://github.com/pydantic/pydantic/pull/7626) +* Make path of the item to validate available in plugin by [@hramezani](https://github.com/hramezani) in [#7861](https://github.com/pydantic/pydantic/pull/7861) +* Add `CallableDiscriminator` and `Tag` by [@dmontagu](https://github.com/dmontagu) in [#7983](https://github.com/pydantic/pydantic/pull/7983) + * `CallableDiscriminator` renamed to `Discriminator` by [@dmontagu](https://github.com/dmontagu) in [#8047](https://github.com/pydantic/pydantic/pull/8047) +* Make union case tags affect union error messages by [@dmontagu](https://github.com/dmontagu) in [#8001](https://github.com/pydantic/pydantic/pull/8001) +* Add `examples` and `json_schema_extra` to `@computed_field` by [@alexmojaki](https://github.com/alexmojaki) in [#8013](https://github.com/pydantic/pydantic/pull/8013) +* Add `JsonValue` type by [@dmontagu](https://github.com/dmontagu) in [#7998](https://github.com/pydantic/pydantic/pull/7998) +* Allow `str` as argument to `Discriminator` by [@dmontagu](https://github.com/dmontagu) in [#8047](https://github.com/pydantic/pydantic/pull/8047) +* Add `SchemaSerializer.__reduce__` method to enable pickle serialization by [@edoakes](https://github.com/edoakes) in [pydantic/pydantic-core#1006](https://github.com/pydantic/pydantic-core/pull/1006) + +#### Changes + +* **Significant Change:** replace `ultra_strict` with new smart union implementation, the way unions are validated has changed significantly to improve performance and correctness, we have worked hard to absolutely minimise the number of cases where behaviour has changed, see the PR for details - by [@davidhewitt](https://github.com/davidhewitt) in [pydantic/pydantic-core#867](https://github.com/pydantic/pydantic-core/pull/867) +* Add support for instance method reassignment when `extra='allow'` by [@sydney-runkle](https://github.com/sydney-runkle) in [#7683](https://github.com/pydantic/pydantic/pull/7683) +* Support JSON schema generation for `Enum` types with no cases by [@sydney-runkle](https://github.com/sydney-runkle) in [#7927](https://github.com/pydantic/pydantic/pull/7927) +* Warn if a class inherits from `Generic` before `BaseModel` by [@alexmojaki](https://github.com/alexmojaki) in [#7891](https://github.com/pydantic/pydantic/pull/7891) + +#### Performance + +* New custom JSON parser, `jiter` by [@samuelcolvin](https://github.com/samuelcolvin) in [pydantic/pydantic-core#974](https://github.com/pydantic/pydantic-core/pull/974) +* PGO build for MacOS M1 by [@samuelcolvin](https://github.com/samuelcolvin) in [pydantic/pydantic-core#1063](https://github.com/pydantic/pydantic-core/pull/1063) +* Use `__getattr__` for all package imports, improve import time by [@samuelcolvin](https://github.com/samuelcolvin) in [#7947](https://github.com/pydantic/pydantic/pull/7947) + +#### Fixes + +* Fix `mypy` issue with subclasses of `RootModel` by [@sydney-runkle](https://github.com/sydney-runkle) in [#7677](https://github.com/pydantic/pydantic/pull/7677) +* Properly rebuild the `FieldInfo` when a forward ref gets evaluated by [@dmontagu](https://github.com/dmontagu) in [#7698](https://github.com/pydantic/pydantic/pull/7698) +* Fix failure to load `SecretStr` from JSON (regression in v2.4) by [@sydney-runkle](https://github.com/sydney-runkle) in [#7729](https://github.com/pydantic/pydantic/pull/7729) +* Fix `defer_build` behavior with `TypeAdapter` by [@sydney-runkle](https://github.com/sydney-runkle) in [#7736](https://github.com/pydantic/pydantic/pull/7736) +* Improve compatibility with legacy `mypy` versions by [@dmontagu](https://github.com/dmontagu) in [#7742](https://github.com/pydantic/pydantic/pull/7742) +* Fix: update `TypeVar` handling when default is not set by [@pmmmwh](https://github.com/pmmmwh) in [#7719](https://github.com/pydantic/pydantic/pull/7719) +* Support specification of `strict` on `Enum` type fields by [@sydney-runkle](https://github.com/sydney-runkle) in [#7761](https://github.com/pydantic/pydantic/pull/7761) +* Wrap `weakref.ref` instead of subclassing to fix `cloudpickle` serialization by [@edoakes](https://github.com/edoakes) in [#7780](https://github.com/pydantic/pydantic/pull/7780) +* Keep values of private attributes set within `model_post_init` in subclasses by [@alexmojaki](https://github.com/alexmojaki) in [#7775](https://github.com/pydantic/pydantic/pull/7775) +* Add more specific type for non-callable `json_schema_extra` by [@alexmojaki](https://github.com/alexmojaki) in [#7803](https://github.com/pydantic/pydantic/pull/7803) +* Raise an error when deleting frozen (model) fields by [@alexmojaki](https://github.com/alexmojaki) in [#7800](https://github.com/pydantic/pydantic/pull/7800) +* Fix schema sorting bug with default values by [@sydney-runkle](https://github.com/sydney-runkle) in [#7817](https://github.com/pydantic/pydantic/pull/7817) +* Use generated alias for aliases that are not specified otherwise by [@alexmojaki](https://github.com/alexmojaki) in [#7802](https://github.com/pydantic/pydantic/pull/7802) +* Support `strict` specification for `UUID` types by [@sydney-runkle](https://github.com/sydney-runkle) in [#7865](https://github.com/pydantic/pydantic/pull/7865) +* JSON schema: fix extra parameter handling by [@me-and](https://github.com/me-and) in [#7810](https://github.com/pydantic/pydantic/pull/7810) +* Fix: support `pydantic.Field(kw_only=True)` with inherited dataclasses by [@PrettyWood](https://github.com/PrettyWood) in [#7827](https://github.com/pydantic/pydantic/pull/7827) +* Support `validate_call` decorator for methods in classes with `__slots__` by [@sydney-runkle](https://github.com/sydney-runkle) in [#7883](https://github.com/pydantic/pydantic/pull/7883) +* Fix pydantic dataclass problem with `dataclasses.field` default by [@hramezani](https://github.com/hramezani) in [#7898](https://github.com/pydantic/pydantic/pull/7898) +* Fix schema generation for generics with union type bounds by [@sydney-runkle](https://github.com/sydney-runkle) in [#7899](https://github.com/pydantic/pydantic/pull/7899) +* Fix version for `importlib_metadata` on python 3.7 by [@sydney-runkle](https://github.com/sydney-runkle) in [#7904](https://github.com/pydantic/pydantic/pull/7904) +* Support `|` operator (Union) in PydanticRecursiveRef by [@alexmojaki](https://github.com/alexmojaki) in [#7892](https://github.com/pydantic/pydantic/pull/7892) +* Fix `display_as_type` for `TypeAliasType` in python 3.12 by [@dmontagu](https://github.com/dmontagu) in [#7929](https://github.com/pydantic/pydantic/pull/7929) +* Add support for `NotRequired` generics in `TypedDict` by [@sydney-runkle](https://github.com/sydney-runkle) in [#7932](https://github.com/pydantic/pydantic/pull/7932) +* Make generic `TypeAliasType` specifications produce different schema definitions by [@alexdrydew](https://github.com/alexdrydew) in [#7893](https://github.com/pydantic/pydantic/pull/7893) +* Added fix for signature of inherited dataclass by [@howsunjow](https://github.com/howsunjow) in [#7925](https://github.com/pydantic/pydantic/pull/7925) +* Make the model name generation more robust in JSON schema by [@joakimnordling](https://github.com/joakimnordling) in [#7881](https://github.com/pydantic/pydantic/pull/7881) +* Fix plurals in validation error messages (in tests) by [@Iipin](https://github.com/Iipin) in [#7972](https://github.com/pydantic/pydantic/pull/7972) +* `PrivateAttr` is passed from `Annotated` default position by [@tabassco](https://github.com/tabassco) in [#8004](https://github.com/pydantic/pydantic/pull/8004) +* Don't decode bytes (which may not be UTF8) when displaying SecretBytes by [@alexmojaki](https://github.com/alexmojaki) in [#8012](https://github.com/pydantic/pydantic/pull/8012) +* Use `classmethod` instead of `classmethod[Any, Any, Any]` by [@Mr-Pepe](https://github.com/Mr-Pepe) in [#7979](https://github.com/pydantic/pydantic/pull/7979) +* Clearer error on invalid Plugin by [@samuelcolvin](https://github.com/samuelcolvin) in [#8023](https://github.com/pydantic/pydantic/pull/8023) +* Correct pydantic dataclasses import by [@samuelcolvin](https://github.com/samuelcolvin) in [#8027](https://github.com/pydantic/pydantic/pull/8027) +* Fix misbehavior for models referencing redefined type aliases by [@dmontagu](https://github.com/dmontagu) in [#8050](https://github.com/pydantic/pydantic/pull/8050) +* Fix `Optional` field with `validate_default` only performing one field validation by [@sydney-runkle](https://github.com/sydney-runkle) in [pydantic/pydantic-core#1002](https://github.com/pydantic/pydantic-core/pull/1002) +* Fix `definition-ref` bug with `Dict` keys by [@sydney-runkle](https://github.com/sydney-runkle) in [pydantic/pydantic-core#1014](https://github.com/pydantic/pydantic-core/pull/1014) +* Fix bug allowing validation of `bool` types with `coerce_numbers_to_str=True` by [@sydney-runkle](https://github.com/sydney-runkle) in [pydantic/pydantic-core#1017](https://github.com/pydantic/pydantic-core/pull/1017) +* Don't accept `NaN` in float and decimal constraints by [@davidhewitt](https://github.com/davidhewitt) in [pydantic/pydantic-core#1037](https://github.com/pydantic/pydantic-core/pull/1037) +* Add `lax_str` and `lax_int` support for enum values not inherited from str/int by [@michaelhly](https://github.com/michaelhly) in [pydantic/pydantic-core#1015](https://github.com/pydantic/pydantic-core/pull/1015) +* Support subclasses in lists in `Union` of `List` types by [@sydney-runkle](https://github.com/sydney-runkle) in [pydantic/pydantic-core#1039](https://github.com/pydantic/pydantic-core/pull/1039) +* Allow validation against `max_digits` and `decimals` to pass if normalized or non-normalized input is valid by [@sydney-runkle](https://github.com/sydney-runkle) in [pydantic/pydantic-core#1049](https://github.com/pydantic/pydantic-core/pull/1049) +* Fix: proper pluralization in `ValidationError` messages by [@Iipin](https://github.com/Iipin) in [pydantic/pydantic-core#1050](https://github.com/pydantic/pydantic-core/pull/1050) +* Disallow the string `'-'` as `datetime` input by [@davidhewitt](https://github.com/davidhewitt) in [pydantic/speedate#52](https://github.com/pydantic/speedate/pull/52) & [pydantic/pydantic-core#1060](https://github.com/pydantic/pydantic-core/pull/1060) +* Fix: NaN and Inf float serialization by [@davidhewitt](https://github.com/davidhewitt) in [pydantic/pydantic-core#1062](https://github.com/pydantic/pydantic-core/pull/1062) +* Restore manylinux-compatible PGO builds by [@davidhewitt](https://github.com/davidhewitt) in [pydantic/pydantic-core#1068](https://github.com/pydantic/pydantic-core/pull/1068) + +### New Contributors + +#### `pydantic` +* [@schneebuzz](https://github.com/schneebuzz) made their first contribution in [#7699](https://github.com/pydantic/pydantic/pull/7699) +* [@edoakes](https://github.com/edoakes) made their first contribution in [#7780](https://github.com/pydantic/pydantic/pull/7780) +* [@alexmojaki](https://github.com/alexmojaki) made their first contribution in [#7775](https://github.com/pydantic/pydantic/pull/7775) +* [@NickG123](https://github.com/NickG123) made their first contribution in [#7751](https://github.com/pydantic/pydantic/pull/7751) +* [@gowthamgts](https://github.com/gowthamgts) made their first contribution in [#7830](https://github.com/pydantic/pydantic/pull/7830) +* [@jamesbraza](https://github.com/jamesbraza) made their first contribution in [#7848](https://github.com/pydantic/pydantic/pull/7848) +* [@laundmo](https://github.com/laundmo) made their first contribution in [#7850](https://github.com/pydantic/pydantic/pull/7850) +* [@rahmatnazali](https://github.com/rahmatnazali) made their first contribution in [#7870](https://github.com/pydantic/pydantic/pull/7870) +* [@waterfountain1996](https://github.com/waterfountain1996) made their first contribution in [#7878](https://github.com/pydantic/pydantic/pull/7878) +* [@chris-spann](https://github.com/chris-spann) made their first contribution in [#7863](https://github.com/pydantic/pydantic/pull/7863) +* [@me-and](https://github.com/me-and) made their first contribution in [#7810](https://github.com/pydantic/pydantic/pull/7810) +* [@utkini](https://github.com/utkini) made their first contribution in [#7768](https://github.com/pydantic/pydantic/pull/7768) +* [@bn-l](https://github.com/bn-l) made their first contribution in [#7744](https://github.com/pydantic/pydantic/pull/7744) +* [@alexdrydew](https://github.com/alexdrydew) made their first contribution in [#7893](https://github.com/pydantic/pydantic/pull/7893) +* [@Luca-Blight](https://github.com/Luca-Blight) made their first contribution in [#7930](https://github.com/pydantic/pydantic/pull/7930) +* [@howsunjow](https://github.com/howsunjow) made their first contribution in [#7925](https://github.com/pydantic/pydantic/pull/7925) +* [@joakimnordling](https://github.com/joakimnordling) made their first contribution in [#7881](https://github.com/pydantic/pydantic/pull/7881) +* [@icfly2](https://github.com/icfly2) made their first contribution in [#7976](https://github.com/pydantic/pydantic/pull/7976) +* [@Yummy-Yums](https://github.com/Yummy-Yums) made their first contribution in [#8003](https://github.com/pydantic/pydantic/pull/8003) +* [@Iipin](https://github.com/Iipin) made their first contribution in [#7972](https://github.com/pydantic/pydantic/pull/7972) +* [@tabassco](https://github.com/tabassco) made their first contribution in [#8004](https://github.com/pydantic/pydantic/pull/8004) +* [@Mr-Pepe](https://github.com/Mr-Pepe) made their first contribution in [#7979](https://github.com/pydantic/pydantic/pull/7979) +* [@0x00cl](https://github.com/0x00cl) made their first contribution in [#8010](https://github.com/pydantic/pydantic/pull/8010) +* [@barraponto](https://github.com/barraponto) made their first contribution in [#8032](https://github.com/pydantic/pydantic/pull/8032) + +#### `pydantic-core` +* [@sisp](https://github.com/sisp) made their first contribution in [pydantic/pydantic-core#995](https://github.com/pydantic/pydantic-core/pull/995) +* [@michaelhly](https://github.com/michaelhly) made their first contribution in [pydantic/pydantic-core#1015](https://github.com/pydantic/pydantic-core/pull/1015) + +## v2.5.0b1 (2023-11-09) + +Pre-release, see [the GitHub release](https://github.com/pydantic/pydantic/releases/tag/v2.5.0b1) for details. + +## v2.4.2 (2023-09-27) + +[GitHub release](https://github.com/pydantic/pydantic/releases/tag/v2.4.2) + +### What's Changed + +#### Fixes + +* Fix bug with JSON schema for sequence of discriminated union by [@dmontagu](https://github.com/dmontagu) in [#7647](https://github.com/pydantic/pydantic/pull/7647) +* Fix schema references in discriminated unions by [@adriangb](https://github.com/adriangb) in [#7646](https://github.com/pydantic/pydantic/pull/7646) +* Fix json schema generation for recursive models by [@adriangb](https://github.com/adriangb) in [#7653](https://github.com/pydantic/pydantic/pull/7653) +* Fix `models_json_schema` for generic models by [@adriangb](https://github.com/adriangb) in [#7654](https://github.com/pydantic/pydantic/pull/7654) +* Fix xfailed test for generic model signatures by [@adriangb](https://github.com/adriangb) in [#7658](https://github.com/pydantic/pydantic/pull/7658) + +### New Contributors + +* [@austinorr](https://github.com/austinorr) made their first contribution in [#7657](https://github.com/pydantic/pydantic/pull/7657) +* [@peterHoburg](https://github.com/peterHoburg) made their first contribution in [#7670](https://github.com/pydantic/pydantic/pull/7670) + +## v2.4.1 (2023-09-26) + +[GitHub release](https://github.com/pydantic/pydantic/releases/tag/v2.4.1) + +### What's Changed + +#### Packaging + +* Update pydantic-core to 2.10.1 by [@davidhewitt](https://github.com/davidhewitt) in [#7633](https://github.com/pydantic/pydantic/pull/7633) + +#### Fixes + +* Serialize unsubstituted type vars as `Any` by [@adriangb](https://github.com/adriangb) in [#7606](https://github.com/pydantic/pydantic/pull/7606) +* Remove schema building caches by [@adriangb](https://github.com/adriangb) in [#7624](https://github.com/pydantic/pydantic/pull/7624) +* Fix an issue where JSON schema extras weren't JSON encoded by [@dmontagu](https://github.com/dmontagu) in [#7625](https://github.com/pydantic/pydantic/pull/7625) + +## v2.4.0 (2023-09-22) + +[GitHub release](https://github.com/pydantic/pydantic/releases/tag/v2.4.0) + +### What's Changed + +#### Packaging + +* Update pydantic-core to 2.10.0 by [@samuelcolvin](https://github.com/samuelcolvin) in [#7542](https://github.com/pydantic/pydantic/pull/7542) + +#### New Features + +* Add `Base64Url` types by [@dmontagu](https://github.com/dmontagu) in [#7286](https://github.com/pydantic/pydantic/pull/7286) +* Implement optional `number` to `str` coercion by [@lig](https://github.com/lig) in [#7508](https://github.com/pydantic/pydantic/pull/7508) +* Allow access to `field_name` and `data` in all validators if there is data and a field name by [@samuelcolvin](https://github.com/samuelcolvin) in [#7542](https://github.com/pydantic/pydantic/pull/7542) +* Add `BaseModel.model_validate_strings` and `TypeAdapter.validate_strings` by [@hramezani](https://github.com/hramezani) in [#7552](https://github.com/pydantic/pydantic/pull/7552) +* Add Pydantic `plugins` experimental implementation by [@lig](https://github.com/lig) [@samuelcolvin](https://github.com/samuelcolvin) and [@Kludex](https://github.com/Kludex) in [#6820](https://github.com/pydantic/pydantic/pull/6820) + +#### Changes + +* Do not override `model_post_init` in subclass with private attrs by [@Viicos](https://github.com/Viicos) in [#7302](https://github.com/pydantic/pydantic/pull/7302) +* Make fields with defaults not required in the serialization schema by default by [@dmontagu](https://github.com/dmontagu) in [#7275](https://github.com/pydantic/pydantic/pull/7275) +* Mark `Extra` as deprecated by [@disrupted](https://github.com/disrupted) in [#7299](https://github.com/pydantic/pydantic/pull/7299) +* Make `EncodedStr` a dataclass by [@Kludex](https://github.com/Kludex) in [#7396](https://github.com/pydantic/pydantic/pull/7396) +* Move `annotated_handlers` to be public by [@samuelcolvin](https://github.com/samuelcolvin) in [#7569](https://github.com/pydantic/pydantic/pull/7569) + +#### Performance + +* Simplify flattening and inlining of `CoreSchema` by [@adriangb](https://github.com/adriangb) in [#7523](https://github.com/pydantic/pydantic/pull/7523) +* Remove unused copies in `CoreSchema` walking by [@adriangb](https://github.com/adriangb) in [#7528](https://github.com/pydantic/pydantic/pull/7528) +* Add caches for collecting definitions and invalid schemas from a CoreSchema by [@adriangb](https://github.com/adriangb) in [#7527](https://github.com/pydantic/pydantic/pull/7527) +* Eagerly resolve discriminated unions and cache cases where we can't by [@adriangb](https://github.com/adriangb) in [#7529](https://github.com/pydantic/pydantic/pull/7529) +* Replace `dict.get` and `dict.setdefault` with more verbose versions in `CoreSchema` building hot paths by [@adriangb](https://github.com/adriangb) in [#7536](https://github.com/pydantic/pydantic/pull/7536) +* Cache invalid `CoreSchema` discovery by [@adriangb](https://github.com/adriangb) in [#7535](https://github.com/pydantic/pydantic/pull/7535) +* Allow disabling `CoreSchema` validation for faster startup times by [@adriangb](https://github.com/adriangb) in [#7565](https://github.com/pydantic/pydantic/pull/7565) + +#### Fixes + +* Fix config detection for `TypedDict` from grandparent classes by [@dmontagu](https://github.com/dmontagu) in [#7272](https://github.com/pydantic/pydantic/pull/7272) +* Fix hash function generation for frozen models with unusual MRO by [@dmontagu](https://github.com/dmontagu) in [#7274](https://github.com/pydantic/pydantic/pull/7274) +* Make `strict` config overridable in field for Path by [@hramezani](https://github.com/hramezani) in [#7281](https://github.com/pydantic/pydantic/pull/7281) +* Use `ser_json_` on default in `GenerateJsonSchema` by [@Kludex](https://github.com/Kludex) in [#7269](https://github.com/pydantic/pydantic/pull/7269) +* Adding a check that alias is validated as an identifier for Python by [@andree0](https://github.com/andree0) in [#7319](https://github.com/pydantic/pydantic/pull/7319) +* Raise an error when computed field overrides field by [@sydney-runkle](https://github.com/sydney-runkle) in [#7346](https://github.com/pydantic/pydantic/pull/7346) +* Fix applying `SkipValidation` to referenced schemas by [@adriangb](https://github.com/adriangb) in [#7381](https://github.com/pydantic/pydantic/pull/7381) +* Enforce behavior of private attributes having double leading underscore by [@lig](https://github.com/lig) in [#7265](https://github.com/pydantic/pydantic/pull/7265) +* Standardize `__get_pydantic_core_schema__` signature by [@hramezani](https://github.com/hramezani) in [#7415](https://github.com/pydantic/pydantic/pull/7415) +* Fix generic dataclass fields mutation bug (when using `TypeAdapter`) by [@sydney-runkle](https://github.com/sydney-runkle) in [#7435](https://github.com/pydantic/pydantic/pull/7435) +* Fix `TypeError` on `model_validator` in `wrap` mode by [@pmmmwh](https://github.com/pmmmwh) in [#7496](https://github.com/pydantic/pydantic/pull/7496) +* Improve enum error message by [@hramezani](https://github.com/hramezani) in [#7506](https://github.com/pydantic/pydantic/pull/7506) +* Make `repr` work for instances that failed initialization when handling `ValidationError`s by [@dmontagu](https://github.com/dmontagu) in [#7439](https://github.com/pydantic/pydantic/pull/7439) +* Fixed a regular expression denial of service issue by limiting whitespaces by [@prodigysml](https://github.com/prodigysml) in [#7360](https://github.com/pydantic/pydantic/pull/7360) +* Fix handling of `UUID` values having `UUID.version=None` by [@lig](https://github.com/lig) in [#7566](https://github.com/pydantic/pydantic/pull/7566) +* Fix `__iter__` returning private `cached_property` info by [@sydney-runkle](https://github.com/sydney-runkle) in [#7570](https://github.com/pydantic/pydantic/pull/7570) +* Improvements to version info message by [@samuelcolvin](https://github.com/samuelcolvin) in [#7594](https://github.com/pydantic/pydantic/pull/7594) + +### New Contributors +* [@15498th](https://github.com/15498th) made their first contribution in [#7238](https://github.com/pydantic/pydantic/pull/7238) +* [@GabrielCappelli](https://github.com/GabrielCappelli) made their first contribution in [#7213](https://github.com/pydantic/pydantic/pull/7213) +* [@tobni](https://github.com/tobni) made their first contribution in [#7184](https://github.com/pydantic/pydantic/pull/7184) +* [@redruin1](https://github.com/redruin1) made their first contribution in [#7282](https://github.com/pydantic/pydantic/pull/7282) +* [@FacerAin](https://github.com/FacerAin) made their first contribution in [#7288](https://github.com/pydantic/pydantic/pull/7288) +* [@acdha](https://github.com/acdha) made their first contribution in [#7297](https://github.com/pydantic/pydantic/pull/7297) +* [@andree0](https://github.com/andree0) made their first contribution in [#7319](https://github.com/pydantic/pydantic/pull/7319) +* [@gordonhart](https://github.com/gordonhart) made their first contribution in [#7375](https://github.com/pydantic/pydantic/pull/7375) +* [@pmmmwh](https://github.com/pmmmwh) made their first contribution in [#7496](https://github.com/pydantic/pydantic/pull/7496) +* [@disrupted](https://github.com/disrupted) made their first contribution in [#7299](https://github.com/pydantic/pydantic/pull/7299) +* [@prodigysml](https://github.com/prodigysml) made their first contribution in [#7360](https://github.com/pydantic/pydantic/pull/7360) + +## v2.3.0 (2023-08-23) + +[GitHub release](https://github.com/pydantic/pydantic/releases/tag/v2.3.0) + +* 🔥 Remove orphaned changes file from repo by [@lig](https://github.com/lig) in [#7168](https://github.com/pydantic/pydantic/pull/7168) +* Add copy button on documentation by [@Kludex](https://github.com/Kludex) in [#7190](https://github.com/pydantic/pydantic/pull/7190) +* Fix docs on JSON type by [@Kludex](https://github.com/Kludex) in [#7189](https://github.com/pydantic/pydantic/pull/7189) +* Update mypy 1.5.0 to 1.5.1 in CI by [@hramezani](https://github.com/hramezani) in [#7191](https://github.com/pydantic/pydantic/pull/7191) +* fix download links badge by [@samuelcolvin](https://github.com/samuelcolvin) in [#7200](https://github.com/pydantic/pydantic/pull/7200) +* add 2.2.1 to changelog by [@samuelcolvin](https://github.com/samuelcolvin) in [#7212](https://github.com/pydantic/pydantic/pull/7212) +* Make ModelWrapValidator protocols generic by [@dmontagu](https://github.com/dmontagu) in [#7154](https://github.com/pydantic/pydantic/pull/7154) +* Correct `Field(..., exclude: bool)` docs by [@samuelcolvin](https://github.com/samuelcolvin) in [#7214](https://github.com/pydantic/pydantic/pull/7214) +* Make shadowing attributes a warning instead of an error by [@adriangb](https://github.com/adriangb) in [#7193](https://github.com/pydantic/pydantic/pull/7193) +* Document `Base64Str` and `Base64Bytes` by [@Kludex](https://github.com/Kludex) in [#7192](https://github.com/pydantic/pydantic/pull/7192) +* Fix `config.defer_build` for serialization first cases by [@samuelcolvin](https://github.com/samuelcolvin) in [#7024](https://github.com/pydantic/pydantic/pull/7024) +* clean Model docstrings in JSON Schema by [@samuelcolvin](https://github.com/samuelcolvin) in [#7210](https://github.com/pydantic/pydantic/pull/7210) +* fix [#7228](https://github.com/pydantic/pydantic/pull/7228) (typo): docs in `validators.md` to correct `validate_default` kwarg by [@lmmx](https://github.com/lmmx) in [#7229](https://github.com/pydantic/pydantic/pull/7229) +* ✅ Implement `tzinfo.fromutc` method for `TzInfo` in `pydantic-core` by [@lig](https://github.com/lig) in [#7019](https://github.com/pydantic/pydantic/pull/7019) +* Support `__get_validators__` by [@hramezani](https://github.com/hramezani) in [#7197](https://github.com/pydantic/pydantic/pull/7197) + +## v2.2.1 (2023-08-18) + +[GitHub release](https://github.com/pydantic/pydantic/releases/tag/v2.2.1) + +* Make `xfail`ing test for root model extra stop `xfail`ing by [@dmontagu](https://github.com/dmontagu) in [#6937](https://github.com/pydantic/pydantic/pull/6937) +* Optimize recursion detection by stopping on the second visit for the same object by [@mciucu](https://github.com/mciucu) in [#7160](https://github.com/pydantic/pydantic/pull/7160) +* fix link in docs by [@tlambert03](https://github.com/tlambert03) in [#7166](https://github.com/pydantic/pydantic/pull/7166) +* Replace MiMalloc w/ default allocator by [@adriangb](https://github.com/adriangb) in [pydantic/pydantic-core#900](https://github.com/pydantic/pydantic-core/pull/900) +* Bump pydantic-core to 2.6.1 and prepare 2.2.1 release by [@adriangb](https://github.com/adriangb) in [#7176](https://github.com/pydantic/pydantic/pull/7176) + +## v2.2.0 (2023-08-17) + +[GitHub release](https://github.com/pydantic/pydantic/releases/tag/v2.2.0) + +* Split "pipx install" setup command into two commands on the documentation site by [@nomadmtb](https://github.com/nomadmtb) in [#6869](https://github.com/pydantic/pydantic/pull/6869) +* Deprecate `Field.include` by [@hramezani](https://github.com/hramezani) in [#6852](https://github.com/pydantic/pydantic/pull/6852) +* Fix typo in default factory error msg by [@hramezani](https://github.com/hramezani) in [#6880](https://github.com/pydantic/pydantic/pull/6880) +* Simplify handling of typing.Annotated in GenerateSchema by [@dmontagu](https://github.com/dmontagu) in [#6887](https://github.com/pydantic/pydantic/pull/6887) +* Re-enable fastapi tests in CI by [@dmontagu](https://github.com/dmontagu) in [#6883](https://github.com/pydantic/pydantic/pull/6883) +* Make it harder to hit collisions with json schema defrefs by [@dmontagu](https://github.com/dmontagu) in [#6566](https://github.com/pydantic/pydantic/pull/6566) +* Cleaner error for invalid input to `Path` fields by [@samuelcolvin](https://github.com/samuelcolvin) in [#6903](https://github.com/pydantic/pydantic/pull/6903) +* :memo: support Coordinate Type by [@yezz123](https://github.com/yezz123) in [#6906](https://github.com/pydantic/pydantic/pull/6906) +* Fix `ForwardRef` wrapper for py 3.10.0 (shim until bpo-45166) by [@randomir](https://github.com/randomir) in [#6919](https://github.com/pydantic/pydantic/pull/6919) +* Fix misbehavior related to copying of RootModel by [@dmontagu](https://github.com/dmontagu) in [#6918](https://github.com/pydantic/pydantic/pull/6918) +* Fix issue with recursion error caused by ParamSpec by [@dmontagu](https://github.com/dmontagu) in [#6923](https://github.com/pydantic/pydantic/pull/6923) +* Add section about Constrained classes to the Migration Guide by [@Kludex](https://github.com/Kludex) in [#6924](https://github.com/pydantic/pydantic/pull/6924) +* Use `main` branch for badge links by [@Viicos](https://github.com/Viicos) in [#6925](https://github.com/pydantic/pydantic/pull/6925) +* Add test for v1/v2 Annotated discrepancy by [@carlbordum](https://github.com/carlbordum) in [#6926](https://github.com/pydantic/pydantic/pull/6926) +* Make the v1 mypy plugin work with both v1 and v2 by [@dmontagu](https://github.com/dmontagu) in [#6921](https://github.com/pydantic/pydantic/pull/6921) +* Fix issue where generic models couldn't be parametrized with BaseModel by [@dmontagu](https://github.com/dmontagu) in [#6933](https://github.com/pydantic/pydantic/pull/6933) +* Remove xfail for discriminated union with alias by [@dmontagu](https://github.com/dmontagu) in [#6938](https://github.com/pydantic/pydantic/pull/6938) +* add field_serializer to computed_field by [@andresliszt](https://github.com/andresliszt) in [#6965](https://github.com/pydantic/pydantic/pull/6965) +* Use union_schema with Type[Union[...]] by [@JeanArhancet](https://github.com/JeanArhancet) in [#6952](https://github.com/pydantic/pydantic/pull/6952) +* Fix inherited typeddict attributes / config by [@adriangb](https://github.com/adriangb) in [#6981](https://github.com/pydantic/pydantic/pull/6981) +* fix dataclass annotated before validator called twice by [@davidhewitt](https://github.com/davidhewitt) in [#6998](https://github.com/pydantic/pydantic/pull/6998) +* Update test-fastapi deselected tests by [@hramezani](https://github.com/hramezani) in [#7014](https://github.com/pydantic/pydantic/pull/7014) +* Fix validator doc format by [@hramezani](https://github.com/hramezani) in [#7015](https://github.com/pydantic/pydantic/pull/7015) +* Fix typo in docstring of model_json_schema by [@AdamVinch-Federated](https://github.com/AdamVinch-Federated) in [#7032](https://github.com/pydantic/pydantic/pull/7032) +* remove unused "type ignores" with pyright by [@samuelcolvin](https://github.com/samuelcolvin) in [#7026](https://github.com/pydantic/pydantic/pull/7026) +* Add benchmark representing FastAPI startup time by [@adriangb](https://github.com/adriangb) in [#7030](https://github.com/pydantic/pydantic/pull/7030) +* Fix json_encoders for Enum subclasses by [@adriangb](https://github.com/adriangb) in [#7029](https://github.com/pydantic/pydantic/pull/7029) +* Update docstring of `ser_json_bytes` regarding base64 encoding by [@Viicos](https://github.com/Viicos) in [#7052](https://github.com/pydantic/pydantic/pull/7052) +* Allow `@validate_call` to work on async methods by [@adriangb](https://github.com/adriangb) in [#7046](https://github.com/pydantic/pydantic/pull/7046) +* Fix: mypy error with `Settings` and `SettingsConfigDict` by [@JeanArhancet](https://github.com/JeanArhancet) in [#7002](https://github.com/pydantic/pydantic/pull/7002) +* Fix some typos (repeated words and it's/its) by [@eumiro](https://github.com/eumiro) in [#7063](https://github.com/pydantic/pydantic/pull/7063) +* Fix the typo in docstring by [@harunyasar](https://github.com/harunyasar) in [#7062](https://github.com/pydantic/pydantic/pull/7062) +* Docs: Fix broken URL in the pydantic-settings package recommendation by [@swetjen](https://github.com/swetjen) in [#6995](https://github.com/pydantic/pydantic/pull/6995) +* Handle constraints being applied to schemas that don't accept it by [@adriangb](https://github.com/adriangb) in [#6951](https://github.com/pydantic/pydantic/pull/6951) +* Replace almost_equal_floats with math.isclose by [@eumiro](https://github.com/eumiro) in [#7082](https://github.com/pydantic/pydantic/pull/7082) +* bump pydantic-core to 2.5.0 by [@davidhewitt](https://github.com/davidhewitt) in [#7077](https://github.com/pydantic/pydantic/pull/7077) +* Add `short_version` and use it in links by [@hramezani](https://github.com/hramezani) in [#7115](https://github.com/pydantic/pydantic/pull/7115) +* 📝 Add usage link to `RootModel` by [@Kludex](https://github.com/Kludex) in [#7113](https://github.com/pydantic/pydantic/pull/7113) +* Revert "Fix default port for mongosrv DSNs (#6827)" by [@Kludex](https://github.com/Kludex) in [#7116](https://github.com/pydantic/pydantic/pull/7116) +* Clarify validate_default and _Unset handling in usage docs and migration guide by [@benbenbang](https://github.com/benbenbang) in [#6950](https://github.com/pydantic/pydantic/pull/6950) +* Tweak documentation of `Field.exclude` by [@Viicos](https://github.com/Viicos) in [#7086](https://github.com/pydantic/pydantic/pull/7086) +* Do not require `validate_assignment` to use `Field.frozen` by [@Viicos](https://github.com/Viicos) in [#7103](https://github.com/pydantic/pydantic/pull/7103) +* tweaks to `_core_utils` by [@samuelcolvin](https://github.com/samuelcolvin) in [#7040](https://github.com/pydantic/pydantic/pull/7040) +* Make DefaultDict working with set by [@hramezani](https://github.com/hramezani) in [#7126](https://github.com/pydantic/pydantic/pull/7126) +* Don't always require typing.Generic as a base for partially parametrized models by [@dmontagu](https://github.com/dmontagu) in [#7119](https://github.com/pydantic/pydantic/pull/7119) +* Fix issue with JSON schema incorrectly using parent class core schema by [@dmontagu](https://github.com/dmontagu) in [#7020](https://github.com/pydantic/pydantic/pull/7020) +* Fix xfailed test related to TypedDict and alias_generator by [@dmontagu](https://github.com/dmontagu) in [#6940](https://github.com/pydantic/pydantic/pull/6940) +* Improve error message for NameEmail by [@dmontagu](https://github.com/dmontagu) in [#6939](https://github.com/pydantic/pydantic/pull/6939) +* Fix generic computed fields by [@dmontagu](https://github.com/dmontagu) in [#6988](https://github.com/pydantic/pydantic/pull/6988) +* Reflect namedtuple default values during validation by [@dmontagu](https://github.com/dmontagu) in [#7144](https://github.com/pydantic/pydantic/pull/7144) +* Update dependencies, fix pydantic-core usage, fix CI issues by [@dmontagu](https://github.com/dmontagu) in [#7150](https://github.com/pydantic/pydantic/pull/7150) +* Add mypy 1.5.0 by [@hramezani](https://github.com/hramezani) in [#7118](https://github.com/pydantic/pydantic/pull/7118) +* Handle non-json native enum values by [@adriangb](https://github.com/adriangb) in [#7056](https://github.com/pydantic/pydantic/pull/7056) +* document `round_trip` in Json type documentation by [@jc-louis](https://github.com/jc-louis) in [#7137](https://github.com/pydantic/pydantic/pull/7137) +* Relax signature checks to better support builtins and C extension functions as validators by [@adriangb](https://github.com/adriangb) in [#7101](https://github.com/pydantic/pydantic/pull/7101) +* add union_mode='left_to_right' by [@davidhewitt](https://github.com/davidhewitt) in [#7151](https://github.com/pydantic/pydantic/pull/7151) +* Include an error message hint for inherited ordering by [@yvalencia91](https://github.com/yvalencia91) in [#7124](https://github.com/pydantic/pydantic/pull/7124) +* Fix one docs link and resolve some warnings for two others by [@dmontagu](https://github.com/dmontagu) in [#7153](https://github.com/pydantic/pydantic/pull/7153) +* Include Field extra keys name in warning by [@hramezani](https://github.com/hramezani) in [#7136](https://github.com/pydantic/pydantic/pull/7136) + +## v2.1.1 (2023-07-25) + +[GitHub release](https://github.com/pydantic/pydantic/releases/tag/v2.1.1) + +* Skip FieldInfo merging when unnecessary by [@dmontagu](https://github.com/dmontagu) in [#6862](https://github.com/pydantic/pydantic/pull/6862) + +## v2.1.0 (2023-07-25) + +[GitHub release](https://github.com/pydantic/pydantic/releases/tag/v2.1.0) + +* Add `StringConstraints` for use as Annotated metadata by [@adriangb](https://github.com/adriangb) in [#6605](https://github.com/pydantic/pydantic/pull/6605) +* Try to fix intermittently failing CI by [@adriangb](https://github.com/adriangb) in [#6683](https://github.com/pydantic/pydantic/pull/6683) +* Remove redundant example of optional vs default. by [@ehiggs-deliverect](https://github.com/ehiggs-deliverect) in [#6676](https://github.com/pydantic/pydantic/pull/6676) +* Docs update by [@samuelcolvin](https://github.com/samuelcolvin) in [#6692](https://github.com/pydantic/pydantic/pull/6692) +* Remove the Validate always section in validator docs by [@adriangb](https://github.com/adriangb) in [#6679](https://github.com/pydantic/pydantic/pull/6679) +* Fix recursion error in json schema generation by [@adriangb](https://github.com/adriangb) in [#6720](https://github.com/pydantic/pydantic/pull/6720) +* Fix incorrect subclass check for secretstr by [@AlexVndnblcke](https://github.com/AlexVndnblcke) in [#6730](https://github.com/pydantic/pydantic/pull/6730) +* update pdm / pdm lockfile to 2.8.0 by [@davidhewitt](https://github.com/davidhewitt) in [#6714](https://github.com/pydantic/pydantic/pull/6714) +* unpin pdm on more CI jobs by [@davidhewitt](https://github.com/davidhewitt) in [#6755](https://github.com/pydantic/pydantic/pull/6755) +* improve source locations for auxiliary packages in docs by [@davidhewitt](https://github.com/davidhewitt) in [#6749](https://github.com/pydantic/pydantic/pull/6749) +* Assume builtins don't accept an info argument by [@adriangb](https://github.com/adriangb) in [#6754](https://github.com/pydantic/pydantic/pull/6754) +* Fix bug where calling `help(BaseModelSubclass)` raises errors by [@hramezani](https://github.com/hramezani) in [#6758](https://github.com/pydantic/pydantic/pull/6758) +* Fix mypy plugin handling of `@model_validator(mode="after")` by [@ljodal](https://github.com/ljodal) in [#6753](https://github.com/pydantic/pydantic/pull/6753) +* update pydantic-core to 2.3.1 by [@davidhewitt](https://github.com/davidhewitt) in [#6756](https://github.com/pydantic/pydantic/pull/6756) +* Mypy plugin for settings by [@hramezani](https://github.com/hramezani) in [#6760](https://github.com/pydantic/pydantic/pull/6760) +* Use `contentSchema` keyword for JSON schema by [@dmontagu](https://github.com/dmontagu) in [#6715](https://github.com/pydantic/pydantic/pull/6715) +* fast-path checking finite decimals by [@davidhewitt](https://github.com/davidhewitt) in [#6769](https://github.com/pydantic/pydantic/pull/6769) +* Docs update by [@samuelcolvin](https://github.com/samuelcolvin) in [#6771](https://github.com/pydantic/pydantic/pull/6771) +* Improve json schema doc by [@hramezani](https://github.com/hramezani) in [#6772](https://github.com/pydantic/pydantic/pull/6772) +* Update validator docs by [@adriangb](https://github.com/adriangb) in [#6695](https://github.com/pydantic/pydantic/pull/6695) +* Fix typehint for wrap validator by [@dmontagu](https://github.com/dmontagu) in [#6788](https://github.com/pydantic/pydantic/pull/6788) +* 🐛 Fix validation warning for unions of Literal and other type by [@lig](https://github.com/lig) in [#6628](https://github.com/pydantic/pydantic/pull/6628) +* Update documentation for generics support in V2 by [@tpdorsey](https://github.com/tpdorsey) in [#6685](https://github.com/pydantic/pydantic/pull/6685) +* add pydantic-core build info to `version_info()` by [@samuelcolvin](https://github.com/samuelcolvin) in [#6785](https://github.com/pydantic/pydantic/pull/6785) +* Fix pydantic dataclasses that use slots with default values by [@dmontagu](https://github.com/dmontagu) in [#6796](https://github.com/pydantic/pydantic/pull/6796) +* Fix inheritance of hash function for frozen models by [@dmontagu](https://github.com/dmontagu) in [#6789](https://github.com/pydantic/pydantic/pull/6789) +* ✨ Add `SkipJsonSchema` annotation by [@Kludex](https://github.com/Kludex) in [#6653](https://github.com/pydantic/pydantic/pull/6653) +* Error if an invalid field name is used with Field by [@dmontagu](https://github.com/dmontagu) in [#6797](https://github.com/pydantic/pydantic/pull/6797) +* Add `GenericModel` to `MOVED_IN_V2` by [@adriangb](https://github.com/adriangb) in [#6776](https://github.com/pydantic/pydantic/pull/6776) +* Remove unused code from `docs/usage/types/custom.md` by [@hramezani](https://github.com/hramezani) in [#6803](https://github.com/pydantic/pydantic/pull/6803) +* Fix `float` -> `Decimal` coercion precision loss by [@adriangb](https://github.com/adriangb) in [#6810](https://github.com/pydantic/pydantic/pull/6810) +* remove email validation from the north star benchmark by [@davidhewitt](https://github.com/davidhewitt) in [#6816](https://github.com/pydantic/pydantic/pull/6816) +* Fix link to mypy by [@progsmile](https://github.com/progsmile) in [#6824](https://github.com/pydantic/pydantic/pull/6824) +* Improve initialization hooks example by [@hramezani](https://github.com/hramezani) in [#6822](https://github.com/pydantic/pydantic/pull/6822) +* Fix default port for mongosrv DSNs by [@dmontagu](https://github.com/dmontagu) in [#6827](https://github.com/pydantic/pydantic/pull/6827) +* Improve API documentation, in particular more links between usage and API docs by [@samuelcolvin](https://github.com/samuelcolvin) in [#6780](https://github.com/pydantic/pydantic/pull/6780) +* update pydantic-core to 2.4.0 by [@davidhewitt](https://github.com/davidhewitt) in [#6831](https://github.com/pydantic/pydantic/pull/6831) +* Fix `annotated_types.MaxLen` validator for custom sequence types by [@ImogenBits](https://github.com/ImogenBits) in [#6809](https://github.com/pydantic/pydantic/pull/6809) +* Update V1 by [@hramezani](https://github.com/hramezani) in [#6833](https://github.com/pydantic/pydantic/pull/6833) +* Make it so callable JSON schema extra works by [@dmontagu](https://github.com/dmontagu) in [#6798](https://github.com/pydantic/pydantic/pull/6798) +* Fix serialization issue with `InstanceOf` by [@dmontagu](https://github.com/dmontagu) in [#6829](https://github.com/pydantic/pydantic/pull/6829) +* Add back support for `json_encoders` by [@adriangb](https://github.com/adriangb) in [#6811](https://github.com/pydantic/pydantic/pull/6811) +* Update field annotations when building the schema by [@dmontagu](https://github.com/dmontagu) in [#6838](https://github.com/pydantic/pydantic/pull/6838) +* Use `WeakValueDictionary` to fix generic memory leak by [@dmontagu](https://github.com/dmontagu) in [#6681](https://github.com/pydantic/pydantic/pull/6681) +* Add `config.defer_build` to optionally make model building lazy by [@samuelcolvin](https://github.com/samuelcolvin) in [#6823](https://github.com/pydantic/pydantic/pull/6823) +* delegate `UUID` serialization to pydantic-core by [@davidhewitt](https://github.com/davidhewitt) in [#6850](https://github.com/pydantic/pydantic/pull/6850) +* Update `json_encoders` docs by [@adriangb](https://github.com/adriangb) in [#6848](https://github.com/pydantic/pydantic/pull/6848) +* Fix error message for `staticmethod`/`classmethod` order with validate_call by [@dmontagu](https://github.com/dmontagu) in [#6686](https://github.com/pydantic/pydantic/pull/6686) +* Improve documentation for `Config` by [@samuelcolvin](https://github.com/samuelcolvin) in [#6847](https://github.com/pydantic/pydantic/pull/6847) +* Update serialization doc to mention `Field.exclude` takes priority over call-time `include/exclude` by [@hramezani](https://github.com/hramezani) in [#6851](https://github.com/pydantic/pydantic/pull/6851) +* Allow customizing core schema generation by making `GenerateSchema` public by [@adriangb](https://github.com/adriangb) in [#6737](https://github.com/pydantic/pydantic/pull/6737) + +## v2.0.3 (2023-07-05) + +[GitHub release](https://github.com/pydantic/pydantic/releases/tag/v2.0.3) + +* Mention PyObject (v1) moving to ImportString (v2) in migration doc by [@slafs](https://github.com/slafs) in [#6456](https://github.com/pydantic/pydantic/pull/6456) +* Fix release-tweet CI by [@Kludex](https://github.com/Kludex) in [#6461](https://github.com/pydantic/pydantic/pull/6461) +* Revise the section on required / optional / nullable fields. by [@ybressler](https://github.com/ybressler) in [#6468](https://github.com/pydantic/pydantic/pull/6468) +* Warn if a type hint is not in fact a type by [@adriangb](https://github.com/adriangb) in [#6479](https://github.com/pydantic/pydantic/pull/6479) +* Replace TransformSchema with GetPydanticSchema by [@dmontagu](https://github.com/dmontagu) in [#6484](https://github.com/pydantic/pydantic/pull/6484) +* Fix the un-hashability of various annotation types, for use in caching generic containers by [@dmontagu](https://github.com/dmontagu) in [#6480](https://github.com/pydantic/pydantic/pull/6480) +* PYD-164: Rework custom types docs by [@adriangb](https://github.com/adriangb) in [#6490](https://github.com/pydantic/pydantic/pull/6490) +* Fix ci by [@adriangb](https://github.com/adriangb) in [#6507](https://github.com/pydantic/pydantic/pull/6507) +* Fix forward ref in generic by [@adriangb](https://github.com/adriangb) in [#6511](https://github.com/pydantic/pydantic/pull/6511) +* Fix generation of serialization JSON schemas for core_schema.ChainSchema by [@dmontagu](https://github.com/dmontagu) in [#6515](https://github.com/pydantic/pydantic/pull/6515) +* Document the change in `Field.alias` behavior in Pydantic V2 by [@hramezani](https://github.com/hramezani) in [#6508](https://github.com/pydantic/pydantic/pull/6508) +* Give better error message attempting to compute the json schema of a model with undefined fields by [@dmontagu](https://github.com/dmontagu) in [#6519](https://github.com/pydantic/pydantic/pull/6519) +* Document `alias_priority` by [@tpdorsey](https://github.com/tpdorsey) in [#6520](https://github.com/pydantic/pydantic/pull/6520) +* Add redirect for types documentation by [@tpdorsey](https://github.com/tpdorsey) in [#6513](https://github.com/pydantic/pydantic/pull/6513) +* Allow updating docs without release by [@samuelcolvin](https://github.com/samuelcolvin) in [#6551](https://github.com/pydantic/pydantic/pull/6551) +* Ensure docs tests always run in the right folder by [@dmontagu](https://github.com/dmontagu) in [#6487](https://github.com/pydantic/pydantic/pull/6487) +* Defer evaluation of return type hints for serializer functions by [@dmontagu](https://github.com/dmontagu) in [#6516](https://github.com/pydantic/pydantic/pull/6516) +* Disable E501 from Ruff and rely on just Black by [@adriangb](https://github.com/adriangb) in [#6552](https://github.com/pydantic/pydantic/pull/6552) +* Update JSON Schema documentation for V2 by [@tpdorsey](https://github.com/tpdorsey) in [#6492](https://github.com/pydantic/pydantic/pull/6492) +* Add documentation of cyclic reference handling by [@dmontagu](https://github.com/dmontagu) in [#6493](https://github.com/pydantic/pydantic/pull/6493) +* Remove the need for change files by [@samuelcolvin](https://github.com/samuelcolvin) in [#6556](https://github.com/pydantic/pydantic/pull/6556) +* add "north star" benchmark by [@davidhewitt](https://github.com/davidhewitt) in [#6547](https://github.com/pydantic/pydantic/pull/6547) +* Update Dataclasses docs by [@tpdorsey](https://github.com/tpdorsey) in [#6470](https://github.com/pydantic/pydantic/pull/6470) +* ♻️ Use different error message on v1 redirects by [@Kludex](https://github.com/Kludex) in [#6595](https://github.com/pydantic/pydantic/pull/6595) +* ⬆ Upgrade `pydantic-core` to v2.2.0 by [@lig](https://github.com/lig) in [#6589](https://github.com/pydantic/pydantic/pull/6589) +* Fix serialization for IPvAny by [@dmontagu](https://github.com/dmontagu) in [#6572](https://github.com/pydantic/pydantic/pull/6572) +* Improve CI by using PDM instead of pip to install typing-extensions by [@adriangb](https://github.com/adriangb) in [#6602](https://github.com/pydantic/pydantic/pull/6602) +* Add `enum` error type docs by [@lig](https://github.com/lig) in [#6603](https://github.com/pydantic/pydantic/pull/6603) +* 🐛 Fix `max_length` for unicode strings by [@lig](https://github.com/lig) in [#6559](https://github.com/pydantic/pydantic/pull/6559) +* Add documentation for accessing features via `pydantic.v1` by [@tpdorsey](https://github.com/tpdorsey) in [#6604](https://github.com/pydantic/pydantic/pull/6604) +* Include extra when iterating over a model by [@adriangb](https://github.com/adriangb) in [#6562](https://github.com/pydantic/pydantic/pull/6562) +* Fix typing of model_validator by [@adriangb](https://github.com/adriangb) in [#6514](https://github.com/pydantic/pydantic/pull/6514) +* Touch up Decimal validator by [@adriangb](https://github.com/adriangb) in [#6327](https://github.com/pydantic/pydantic/pull/6327) +* Fix various docstrings using fixed pytest-examples by [@dmontagu](https://github.com/dmontagu) in [#6607](https://github.com/pydantic/pydantic/pull/6607) +* Handle function validators in a discriminated union by [@dmontagu](https://github.com/dmontagu) in [#6570](https://github.com/pydantic/pydantic/pull/6570) +* Review json_schema.md by [@tpdorsey](https://github.com/tpdorsey) in [#6608](https://github.com/pydantic/pydantic/pull/6608) +* Make validate_call work on basemodel methods by [@dmontagu](https://github.com/dmontagu) in [#6569](https://github.com/pydantic/pydantic/pull/6569) +* add test for big int json serde by [@davidhewitt](https://github.com/davidhewitt) in [#6614](https://github.com/pydantic/pydantic/pull/6614) +* Fix pydantic dataclass problem with dataclasses.field default_factory by [@hramezani](https://github.com/hramezani) in [#6616](https://github.com/pydantic/pydantic/pull/6616) +* Fixed mypy type inference for TypeAdapter by [@zakstucke](https://github.com/zakstucke) in [#6617](https://github.com/pydantic/pydantic/pull/6617) +* Make it work to use None as a generic parameter by [@dmontagu](https://github.com/dmontagu) in [#6609](https://github.com/pydantic/pydantic/pull/6609) +* Make it work to use `$ref` as an alias by [@dmontagu](https://github.com/dmontagu) in [#6568](https://github.com/pydantic/pydantic/pull/6568) +* add note to migration guide about changes to `AnyUrl` etc by [@davidhewitt](https://github.com/davidhewitt) in [#6618](https://github.com/pydantic/pydantic/pull/6618) +* 🐛 Support defining `json_schema_extra` on `RootModel` using `Field` by [@lig](https://github.com/lig) in [#6622](https://github.com/pydantic/pydantic/pull/6622) +* Update pre-commit to prevent commits to main branch on accident by [@dmontagu](https://github.com/dmontagu) in [#6636](https://github.com/pydantic/pydantic/pull/6636) +* Fix PDM CI for python 3.7 on MacOS/windows by [@dmontagu](https://github.com/dmontagu) in [#6627](https://github.com/pydantic/pydantic/pull/6627) +* Produce more accurate signatures for pydantic dataclasses by [@dmontagu](https://github.com/dmontagu) in [#6633](https://github.com/pydantic/pydantic/pull/6633) +* Updates to Url types for Pydantic V2 by [@tpdorsey](https://github.com/tpdorsey) in [#6638](https://github.com/pydantic/pydantic/pull/6638) +* Fix list markdown in `transform` docstring by [@StefanBRas](https://github.com/StefanBRas) in [#6649](https://github.com/pydantic/pydantic/pull/6649) +* simplify slots_dataclass construction to appease mypy by [@davidhewitt](https://github.com/davidhewitt) in [#6639](https://github.com/pydantic/pydantic/pull/6639) +* Update TypedDict schema generation docstring by [@adriangb](https://github.com/adriangb) in [#6651](https://github.com/pydantic/pydantic/pull/6651) +* Detect and lint-error for prints by [@dmontagu](https://github.com/dmontagu) in [#6655](https://github.com/pydantic/pydantic/pull/6655) +* Add xfailing test for pydantic-core PR 766 by [@dmontagu](https://github.com/dmontagu) in [#6641](https://github.com/pydantic/pydantic/pull/6641) +* Ignore unrecognized fields from dataclasses metadata by [@dmontagu](https://github.com/dmontagu) in [#6634](https://github.com/pydantic/pydantic/pull/6634) +* Make non-existent class getattr a mypy error by [@dmontagu](https://github.com/dmontagu) in [#6658](https://github.com/pydantic/pydantic/pull/6658) +* Update pydantic-core to 2.3.0 by [@hramezani](https://github.com/hramezani) in [#6648](https://github.com/pydantic/pydantic/pull/6648) +* Use OrderedDict from typing_extensions by [@dmontagu](https://github.com/dmontagu) in [#6664](https://github.com/pydantic/pydantic/pull/6664) +* Fix typehint for JSON schema extra callable by [@dmontagu](https://github.com/dmontagu) in [#6659](https://github.com/pydantic/pydantic/pull/6659) + +## v2.0.2 (2023-07-05) + +[GitHub release](https://github.com/pydantic/pydantic/releases/tag/v2.0.2) + +* Fix bug where round-trip pickling/unpickling a `RootModel` would change the value of `__dict__`, [#6457](https://github.com/pydantic/pydantic/pull/6457) by [@dmontagu](https://github.com/dmontagu) +* Allow single-item discriminated unions, [#6405](https://github.com/pydantic/pydantic/pull/6405) by [@dmontagu](https://github.com/dmontagu) +* Fix issue with union parsing of enums, [#6440](https://github.com/pydantic/pydantic/pull/6440) by [@dmontagu](https://github.com/dmontagu) +* Docs: Fixed `constr` documentation, renamed old `regex` to new `pattern`, [#6452](https://github.com/pydantic/pydantic/pull/6452) by [@miili](https://github.com/miili) +* Change `GenerateJsonSchema.generate_definitions` signature, [#6436](https://github.com/pydantic/pydantic/pull/6436) by [@dmontagu](https://github.com/dmontagu) + +See the full changelog [here](https://github.com/pydantic/pydantic/releases/tag/v2.0.2) + +## v2.0.1 (2023-07-04) + +[GitHub release](https://github.com/pydantic/pydantic/releases/tag/v2.0.1) + +First patch release of Pydantic V2 + +* Extra fields added via `setattr` (i.e. `m.some_extra_field = 'extra_value'`) + are added to `.model_extra` if `model_config` `extra='allowed'`. Fixed [#6333](https://github.com/pydantic/pydantic/pull/6333), [#6365](https://github.com/pydantic/pydantic/pull/6365) by [@aaraney](https://github.com/aaraney) +* Automatically unpack JSON schema '$ref' for custom types, [#6343](https://github.com/pydantic/pydantic/pull/6343) by [@adriangb](https://github.com/adriangb) +* Fix tagged unions multiple processing in submodels, [#6340](https://github.com/pydantic/pydantic/pull/6340) by [@suharnikov](https://github.com/suharnikov) + +See the full changelog [here](https://github.com/pydantic/pydantic/releases/tag/v2.0.1) + +## v2.0 (2023-06-30) + +[GitHub release](https://github.com/pydantic/pydantic/releases/tag/v2.0) + +Pydantic V2 is here! :tada: + +See [this post](https://docs.pydantic.dev/2.0/blog/pydantic-v2-final/) for more details. + +## v2.0b3 (2023-06-16) + +Third beta pre-release of Pydantic V2 + +See the full changelog [here](https://github.com/pydantic/pydantic/releases/tag/v2.0b3) + +## v2.0b2 (2023-06-03) + +Add `from_attributes` runtime flag to `TypeAdapter.validate_python` and `BaseModel.model_validate`. + +See the full changelog [here](https://github.com/pydantic/pydantic/releases/tag/v2.0b2) + +## v2.0b1 (2023-06-01) + +First beta pre-release of Pydantic V2 + +See the full changelog [here](https://github.com/pydantic/pydantic/releases/tag/v2.0b1) + +## v2.0a4 (2023-05-05) + +Fourth pre-release of Pydantic V2 + +See the full changelog [here](https://github.com/pydantic/pydantic/releases/tag/v2.0a4) + +## v2.0a3 (2023-04-20) + +Third pre-release of Pydantic V2 + +See the full changelog [here](https://github.com/pydantic/pydantic/releases/tag/v2.0a3) + +## v2.0a2 (2023-04-12) + +Second pre-release of Pydantic V2 + +See the full changelog [here](https://github.com/pydantic/pydantic/releases/tag/v2.0a2) + +## v2.0a1 (2023-04-03) + +First pre-release of Pydantic V2! + +See [this post](https://docs.pydantic.dev/blog/pydantic-v2-alpha/) for more details. + + +... see [here](https://docs.pydantic.dev/changelog/#v0322-2019-08-17) for earlier changes. diff --git a/vllm/lib/python3.10/site-packages/pydantic-2.10.3.dist-info/RECORD b/vllm/lib/python3.10/site-packages/pydantic-2.10.3.dist-info/RECORD new file mode 100644 index 0000000000000000000000000000000000000000..1402a51223e1d2c31912ef6b1337a2ad4e08ca05 --- /dev/null +++ b/vllm/lib/python3.10/site-packages/pydantic-2.10.3.dist-info/RECORD @@ -0,0 +1,214 @@ +pydantic-2.10.3.dist-info/INSTALLER,sha256=zuuue4knoyJ-UwPPXg8fezS7VCrXJQrAP7zeNuwvFQg,4 +pydantic-2.10.3.dist-info/METADATA,sha256=_bZynjHWDgFn7dKBCsNVHMyJ1PIyyrmNlOK9obxtSS4,172031 +pydantic-2.10.3.dist-info/RECORD,, +pydantic-2.10.3.dist-info/REQUESTED,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0 +pydantic-2.10.3.dist-info/WHEEL,sha256=C2FUgwZgiLbznR-k0b_5k3Ai_1aASOXDss3lzCUsUug,87 +pydantic-2.10.3.dist-info/licenses/LICENSE,sha256=qeGG88oWte74QxjnpwFyE1GgDLe4rjpDlLZ7SeNSnvM,1129 +pydantic/__init__.py,sha256=d9oJ0Lvot81l-V1483LEf_UbsGxxz88tOiptSpWYGww,14714 +pydantic/__pycache__/__init__.cpython-310.pyc,, +pydantic/__pycache__/_migration.cpython-310.pyc,, +pydantic/__pycache__/alias_generators.cpython-310.pyc,, +pydantic/__pycache__/aliases.cpython-310.pyc,, +pydantic/__pycache__/annotated_handlers.cpython-310.pyc,, +pydantic/__pycache__/class_validators.cpython-310.pyc,, +pydantic/__pycache__/color.cpython-310.pyc,, +pydantic/__pycache__/config.cpython-310.pyc,, +pydantic/__pycache__/dataclasses.cpython-310.pyc,, +pydantic/__pycache__/datetime_parse.cpython-310.pyc,, +pydantic/__pycache__/decorator.cpython-310.pyc,, +pydantic/__pycache__/env_settings.cpython-310.pyc,, +pydantic/__pycache__/error_wrappers.cpython-310.pyc,, +pydantic/__pycache__/errors.cpython-310.pyc,, +pydantic/__pycache__/fields.cpython-310.pyc,, +pydantic/__pycache__/functional_serializers.cpython-310.pyc,, +pydantic/__pycache__/functional_validators.cpython-310.pyc,, +pydantic/__pycache__/generics.cpython-310.pyc,, +pydantic/__pycache__/json.cpython-310.pyc,, +pydantic/__pycache__/json_schema.cpython-310.pyc,, +pydantic/__pycache__/main.cpython-310.pyc,, +pydantic/__pycache__/mypy.cpython-310.pyc,, +pydantic/__pycache__/networks.cpython-310.pyc,, +pydantic/__pycache__/parse.cpython-310.pyc,, +pydantic/__pycache__/root_model.cpython-310.pyc,, +pydantic/__pycache__/schema.cpython-310.pyc,, +pydantic/__pycache__/tools.cpython-310.pyc,, +pydantic/__pycache__/type_adapter.cpython-310.pyc,, +pydantic/__pycache__/types.cpython-310.pyc,, +pydantic/__pycache__/typing.cpython-310.pyc,, +pydantic/__pycache__/utils.cpython-310.pyc,, +pydantic/__pycache__/validate_call_decorator.cpython-310.pyc,, +pydantic/__pycache__/validators.cpython-310.pyc,, +pydantic/__pycache__/version.cpython-310.pyc,, +pydantic/__pycache__/warnings.cpython-310.pyc,, +pydantic/_internal/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0 +pydantic/_internal/__pycache__/__init__.cpython-310.pyc,, +pydantic/_internal/__pycache__/_config.cpython-310.pyc,, +pydantic/_internal/__pycache__/_core_metadata.cpython-310.pyc,, +pydantic/_internal/__pycache__/_core_utils.cpython-310.pyc,, +pydantic/_internal/__pycache__/_dataclasses.cpython-310.pyc,, +pydantic/_internal/__pycache__/_decorators.cpython-310.pyc,, +pydantic/_internal/__pycache__/_decorators_v1.cpython-310.pyc,, +pydantic/_internal/__pycache__/_discriminated_union.cpython-310.pyc,, +pydantic/_internal/__pycache__/_docs_extraction.cpython-310.pyc,, +pydantic/_internal/__pycache__/_fields.cpython-310.pyc,, +pydantic/_internal/__pycache__/_forward_ref.cpython-310.pyc,, +pydantic/_internal/__pycache__/_generate_schema.cpython-310.pyc,, +pydantic/_internal/__pycache__/_generics.cpython-310.pyc,, +pydantic/_internal/__pycache__/_git.cpython-310.pyc,, +pydantic/_internal/__pycache__/_import_utils.cpython-310.pyc,, +pydantic/_internal/__pycache__/_internal_dataclass.cpython-310.pyc,, +pydantic/_internal/__pycache__/_known_annotated_metadata.cpython-310.pyc,, +pydantic/_internal/__pycache__/_mock_val_ser.cpython-310.pyc,, +pydantic/_internal/__pycache__/_model_construction.cpython-310.pyc,, +pydantic/_internal/__pycache__/_namespace_utils.cpython-310.pyc,, +pydantic/_internal/__pycache__/_repr.cpython-310.pyc,, +pydantic/_internal/__pycache__/_schema_generation_shared.cpython-310.pyc,, +pydantic/_internal/__pycache__/_serializers.cpython-310.pyc,, +pydantic/_internal/__pycache__/_signature.cpython-310.pyc,, +pydantic/_internal/__pycache__/_std_types_schema.cpython-310.pyc,, +pydantic/_internal/__pycache__/_typing_extra.cpython-310.pyc,, +pydantic/_internal/__pycache__/_utils.cpython-310.pyc,, +pydantic/_internal/__pycache__/_validate_call.cpython-310.pyc,, +pydantic/_internal/__pycache__/_validators.cpython-310.pyc,, +pydantic/_internal/_config.py,sha256=HsKc9guTGHyRdseuTA1gIBSfwAV_psyhEh5M_vzCPP0,12548 +pydantic/_internal/_core_metadata.py,sha256=-gcTwNkQP-hJl93mZIswbG3a1MRnPi5yup1ut3uyzLU,4840 +pydantic/_internal/_core_utils.py,sha256=rsC70zT-W_KALZ9f1xBJrnKE-wlMTOPYSdDjiE9Xb4o,25168 +pydantic/_internal/_dataclasses.py,sha256=x6sVAFvWfTpyePPd5gflPsedM0kIB-r4TX7M_UEBh3A,9486 +pydantic/_internal/_decorators.py,sha256=xWTj0zYYpk2KTuWeWjTIGDEwDqSS7rYDoU9k8dQ4lis,32304 +pydantic/_internal/_decorators_v1.py,sha256=bkjIhVdah-M9V3Y4laICdD1B4cFwS7f03ewvvf5JnFY,6198 +pydantic/_internal/_discriminated_union.py,sha256=eUu06ze9wOIe3WQmilssyoOGsdJ_L8I3E1owpeod5ls,26446 +pydantic/_internal/_docs_extraction.py,sha256=bIWhw7nFKFt-qD-txtKRAb5VqGlAD0H9YzEEXE3PHj4,3791 +pydantic/_internal/_fields.py,sha256=xgz-p1rNwxhjbblXNZmMCXH_OsbKKbcmaurDdVIOHN8,17060 +pydantic/_internal/_forward_ref.py,sha256=5n3Y7-3AKLn8_FS3Yc7KutLiPUhyXmAtkEZOaFnonwM,611 +pydantic/_internal/_generate_schema.py,sha256=IAP2dk0xXAbk5b70kSfvR8yFOJ-PJg38tcE0Ayon69k,114493 +pydantic/_internal/_generics.py,sha256=l-t1Ul-txCyJFkjk26xfXC9MCUQrJz_dU2-CcowKcbo,22696 +pydantic/_internal/_git.py,sha256=lN6QlZm8RNSuNsUdHHC1F5a4VToe8vu5tlUxAfaJgGE,784 +pydantic/_internal/_import_utils.py,sha256=eLe9Aa1GMQB4TD9sKz42yyJBTiTAjBfCuua_rYFTrQo,448 +pydantic/_internal/_internal_dataclass.py,sha256=_bedc1XbuuygRGiLZqkUkwwFpQaoR1hKLlR501nyySY,144 +pydantic/_internal/_known_annotated_metadata.py,sha256=EhAj2V2SMkOvTRII01dJDeSasUY2JTaEELJIoxG23YM,16186 +pydantic/_internal/_mock_val_ser.py,sha256=Z0ipRGcU_EhVyiJjkAhX2YS6ibRNbuIf2Ng-Y8BpIT0,9166 +pydantic/_internal/_model_construction.py,sha256=SFykQmWuNIjcEfojDMMazQHaGguRZl2Xf2qse6h6RDQ,36807 +pydantic/_internal/_namespace_utils.py,sha256=Mb7GRVZ0dEya72VuPuMnZPOxsxab5OG1vDLbD7xRyos,11818 +pydantic/_internal/_repr.py,sha256=AP0I2216KzlbKR47WxjTarBitIWWvbj4hN6yfXYznkU,4960 +pydantic/_internal/_schema_generation_shared.py,sha256=fgjsYtE0mxRN6Ip7wt02V7mMSHcmD9zNHPoplGRtUrk,4897 +pydantic/_internal/_serializers.py,sha256=s0peFJTaHTfdmr4V-l6n3Dh-57ZHeUo8oJcq3NOjJNI,1356 +pydantic/_internal/_signature.py,sha256=6kwSolb53Y6NMq2RPRWMLwa1eBpnza6duJQqGC0Sea4,6779 +pydantic/_internal/_std_types_schema.py,sha256=XrPtBN0H9iBfkRDDallczOyrvBnM0pOa2CxEkt6KnWw,16163 +pydantic/_internal/_typing_extra.py,sha256=lBl9wTpPdgn-xfTH7y0rX-E9sLPDnp0CHN6rUo-Bk10,33396 +pydantic/_internal/_utils.py,sha256=BAFf8ql0hnTeVhH1L87amvS6L-DjsjXWfiCqfuD4uAI,13537 +pydantic/_internal/_validate_call.py,sha256=6EuHEQeYzr7A1oH_DYl68RpkkhQ9Rz0PpgsgF-HbH_0,4536 +pydantic/_internal/_validators.py,sha256=YpveqmG9FkuRedOaVhUaEzXsJsf39GHCeNhMNDdsZC4,15933 +pydantic/_migration.py,sha256=j6TbRpJofjAX8lr-k2nVnQcBR9RD2B91I7Ulcw_ZzEo,11913 +pydantic/alias_generators.py,sha256=KM1n3u4JfLSBl1UuYg3hoYHzXJD-yvgrnq8u1ccwh_A,2124 +pydantic/aliases.py,sha256=lzfmL-dZUh9b9kGXqk0ai3yviUZzY61rs2sgDOeWrnQ,4822 +pydantic/annotated_handlers.py,sha256=WfyFSqwoEIFXBh7T73PycKloI1DiX45GWi0-JOsCR4Y,4407 +pydantic/class_validators.py,sha256=i_V3j-PYdGLSLmj_IJZekTRjunO8SIVz8LMlquPyP7E,148 +pydantic/color.py,sha256=4GrtPvFCBKdM-1NpLVFOC7KkLejyZd1BiELfCKvT2yw,21494 +pydantic/config.py,sha256=iI9sEiVQPwW6LshLUzbxTcNyaDyeOBm7KsmgNhyll7U,35611 +pydantic/dataclasses.py,sha256=S7MOr6CzM_yhx9tlK8rsuGzTIy2OcFdJSPe5c6g0vEg,15900 +pydantic/datetime_parse.py,sha256=QC-WgMxMr_wQ_mNXUS7AVf-2hLEhvvsPY1PQyhSGOdk,150 +pydantic/decorator.py,sha256=YX-jUApu5AKaVWKPoaV-n-4l7UbS69GEt9Ra3hszmKI,145 +pydantic/deprecated/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0 +pydantic/deprecated/__pycache__/__init__.cpython-310.pyc,, +pydantic/deprecated/__pycache__/class_validators.cpython-310.pyc,, +pydantic/deprecated/__pycache__/config.cpython-310.pyc,, +pydantic/deprecated/__pycache__/copy_internals.cpython-310.pyc,, +pydantic/deprecated/__pycache__/decorator.cpython-310.pyc,, +pydantic/deprecated/__pycache__/json.cpython-310.pyc,, +pydantic/deprecated/__pycache__/parse.cpython-310.pyc,, +pydantic/deprecated/__pycache__/tools.cpython-310.pyc,, +pydantic/deprecated/class_validators.py,sha256=IARV4v0NuHCZVVc_ItfaVNJOk1Vful7iv8bawj9o4Uw,10245 +pydantic/deprecated/config.py,sha256=eKhnG--ZQtJ4A7KA3xeF76E15-4pVau3B5T8D39ptFs,2663 +pydantic/deprecated/copy_internals.py,sha256=2y3G0pAJMuahxIvN4IJzaE9hzN1C81h2KQspcWB4RuU,7630 +pydantic/deprecated/decorator.py,sha256=Fa9ou1uUmR3NvyJsPSFGfQLI4n27y4Bs2LhlkDPmNmA,10843 +pydantic/deprecated/json.py,sha256=PIzc-zAZxL-7O2gW7tY986XASvgsgD-J1D5pmhd-JLM,4669 +pydantic/deprecated/parse.py,sha256=Gzd6b_g8zJXcuE7QRq5adhx_EMJahXfcpXCF0RgrqqI,2511 +pydantic/deprecated/tools.py,sha256=XUoIW9W4sgOUWQ6Xzf-Z_NukUC1l_yUwz2_n0fE3MEI,3336 +pydantic/env_settings.py,sha256=6IHeeWEqlUPRUv3V-AXiF_W91fg2Jw_M3O0l34J_eyA,148 +pydantic/error_wrappers.py,sha256=RK6mqATc9yMD-KBD9IJS9HpKCprWHd8wo84Bnm-3fR8,150 +pydantic/errors.py,sha256=axy4Uzfdqme7zjLXmqUM-Q3Z8lpit7MNn7PsZ7WLu80,5009 +pydantic/experimental/__init__.py,sha256=j08eROfz-xW4k_X9W4m2AW26IVdyF3Eg1OzlIGA11vk,328 +pydantic/experimental/__pycache__/__init__.cpython-310.pyc,, +pydantic/experimental/__pycache__/pipeline.cpython-310.pyc,, +pydantic/experimental/pipeline.py,sha256=PjiTnmaN5XwLUk-Nlzfr2C2R-xrMz7Nz-XVC8kiFGyU,23979 +pydantic/fields.py,sha256=8HbAEdQv6DMV7DH6oj5erOLpLPORf0rTbKLWd83FLQI,62631 +pydantic/functional_serializers.py,sha256=gqOVOETSVZfQqZg6jrkFSA1yMXCqVwpkaVy-oDNsKJA,17005 +pydantic/functional_validators.py,sha256=_QsZEL92th_3RjaJj1ZNvrmsCCFLcAkZjnm-nGz3ihA,30476 +pydantic/generics.py,sha256=0ZqZ9O9annIj_3mGBRqps4htey3b5lV1-d2tUxPMMnA,144 +pydantic/json.py,sha256=ZH8RkI7h4Bz-zp8OdTAxbJUoVvcoU-jhMdRZ0B-k0xc,140 +pydantic/json_schema.py,sha256=uAXBbjCYRVQMcwaxIzVwEZ5rGk0ONzX_z9VAn0_XTlg,112810 +pydantic/main.py,sha256=lkTnG0_YPSyoW-Z0BkS7XbdMKBBshaR3xkk4JfhmKmk,76287 +pydantic/mypy.py,sha256=70iw65os6WJuuVQvNMqzZDehRAuVRL-79NVr0B8NuEc,56366 +pydantic/networks.py,sha256=UsQqJGVRIk5CgpC5xFqgzwVjYiON0vzXFlXfQE7tP9s,39156 +pydantic/parse.py,sha256=wkd82dgtvWtD895U_I6E1htqMlGhBSYEV39cuBSeo3A,141 +pydantic/plugin/__init__.py,sha256=63AkDEwbqLQKtN6Pj9BSTnRp1iOyTNv0KrXP3N0dHZ0,6117 +pydantic/plugin/__pycache__/__init__.cpython-310.pyc,, +pydantic/plugin/__pycache__/_loader.cpython-310.pyc,, +pydantic/plugin/__pycache__/_schema_validator.cpython-310.pyc,, +pydantic/plugin/_loader.py,sha256=rmLbIwThDmVR1JwFVi_XvrLH7b1A5teMED-O3pr6Gk4,2140 +pydantic/plugin/_schema_validator.py,sha256=VFaNQpVNSuI2ymRDkTwBGaMKeKmySk1TbW-3rQeozxk,5240 +pydantic/py.typed,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0 +pydantic/root_model.py,sha256=g4SGnHIlXlQZAJG-udwt7BFjq82_D_2U3W_LlkCcCCI,6231 +pydantic/schema.py,sha256=Vqqjvq_LnapVknebUd3Bp_J1p2gXZZnZRgL48bVEG7o,142 +pydantic/tools.py,sha256=iHQpd8SJ5DCTtPV5atAV06T89bjSaMFeZZ2LX9lasZY,141 +pydantic/type_adapter.py,sha256=j7-FPYjWJXFVYozJ5qwOmmBGO349gVbnQ8aIdvf_8sU,28730 +pydantic/types.py,sha256=UMUaWU3O_bdrH57JDSN1G34BvqVpAudW6Y7GoN1u7P8,103958 +pydantic/typing.py,sha256=P7feA35MwTcLsR1uL7db0S-oydBxobmXa55YDoBgajQ,138 +pydantic/utils.py,sha256=15nR2QpqTBFlQV4TNtTItMyTJx_fbyV-gPmIEY1Gooc,141 +pydantic/v1/__init__.py,sha256=SxQPklgBs4XHJwE6BZ9qoewYoGiNyYUnmHzEFCZbfnI,2946 +pydantic/v1/__pycache__/__init__.cpython-310.pyc,, +pydantic/v1/__pycache__/_hypothesis_plugin.cpython-310.pyc,, +pydantic/v1/__pycache__/annotated_types.cpython-310.pyc,, +pydantic/v1/__pycache__/class_validators.cpython-310.pyc,, +pydantic/v1/__pycache__/color.cpython-310.pyc,, +pydantic/v1/__pycache__/config.cpython-310.pyc,, +pydantic/v1/__pycache__/dataclasses.cpython-310.pyc,, +pydantic/v1/__pycache__/datetime_parse.cpython-310.pyc,, +pydantic/v1/__pycache__/decorator.cpython-310.pyc,, +pydantic/v1/__pycache__/env_settings.cpython-310.pyc,, +pydantic/v1/__pycache__/error_wrappers.cpython-310.pyc,, +pydantic/v1/__pycache__/errors.cpython-310.pyc,, +pydantic/v1/__pycache__/fields.cpython-310.pyc,, +pydantic/v1/__pycache__/generics.cpython-310.pyc,, +pydantic/v1/__pycache__/json.cpython-310.pyc,, +pydantic/v1/__pycache__/main.cpython-310.pyc,, +pydantic/v1/__pycache__/mypy.cpython-310.pyc,, +pydantic/v1/__pycache__/networks.cpython-310.pyc,, +pydantic/v1/__pycache__/parse.cpython-310.pyc,, +pydantic/v1/__pycache__/schema.cpython-310.pyc,, +pydantic/v1/__pycache__/tools.cpython-310.pyc,, +pydantic/v1/__pycache__/types.cpython-310.pyc,, +pydantic/v1/__pycache__/typing.cpython-310.pyc,, +pydantic/v1/__pycache__/utils.cpython-310.pyc,, +pydantic/v1/__pycache__/validators.cpython-310.pyc,, +pydantic/v1/__pycache__/version.cpython-310.pyc,, +pydantic/v1/_hypothesis_plugin.py,sha256=5ES5xWuw1FQAsymLezy8QgnVz0ZpVfU3jkmT74H27VQ,14847 +pydantic/v1/annotated_types.py,sha256=uk2NAAxqiNELKjiHhyhxKaIOh8F1lYW_LzrW3X7oZBc,3157 +pydantic/v1/class_validators.py,sha256=ULOaIUgYUDBsHL7EEVEarcM-UubKUggoN8hSbDonsFE,14672 +pydantic/v1/color.py,sha256=iZABLYp6OVoo2AFkP9Ipri_wSc6-Kklu8YuhSartd5g,16844 +pydantic/v1/config.py,sha256=a6P0Wer9x4cbwKW7Xv8poSUqM4WP-RLWwX6YMpYq9AA,6532 +pydantic/v1/dataclasses.py,sha256=784cqvInbwIPWr9usfpX3ch7z4t3J2tTK6N067_wk1o,18172 +pydantic/v1/datetime_parse.py,sha256=4Qy1kQpq3rNVZJeIHeSPDpuS2Bvhp1KPtzJG1xu-H00,7724 +pydantic/v1/decorator.py,sha256=zaaxxxoWPCm818D1bs0yhapRjXm32V8G0ZHWCdM1uXA,10339 +pydantic/v1/env_settings.py,sha256=A9VXwtRl02AY-jH0C0ouy5VNw3fi6F_pkzuHDjgAAOM,14105 +pydantic/v1/error_wrappers.py,sha256=6625Mfw9qkC2NwitB_JFAWe8B-Xv6zBU7rL9k28tfyo,5196 +pydantic/v1/errors.py,sha256=mIwPED5vGM5Q5v4C4Z1JPldTRH-omvEylH6ksMhOmPw,17726 +pydantic/v1/fields.py,sha256=VqWJCriUNiEyptXroDVJ501JpVA0en2VANcksqXL2b8,50649 +pydantic/v1/generics.py,sha256=VzC9YUV-EbPpQ3aAfk1cNFej79_IzznkQ7WrmTTZS9E,17871 +pydantic/v1/json.py,sha256=WQ5Hy_hIpfdR3YS8k6N2E6KMJzsdbBi_ldWOPJaV81M,3390 +pydantic/v1/main.py,sha256=nFvpoUH2UKAe2vzw7wVUNkXBeIUe1Xs4EIv-JBqSo2k,44555 +pydantic/v1/mypy.py,sha256=AiZYkv127-WsgL9vwvLqj0dS8dz-HUMbH9Yvvlq4bfE,38949 +pydantic/v1/networks.py,sha256=HYNtKAfOmOnKJpsDg1g6SIkj9WPhU_-i8l5e2JKBpG4,22124 +pydantic/v1/parse.py,sha256=BJtdqiZRtav9VRFCmOxoY-KImQmjPy-A_NoojiFUZxY,1821 +pydantic/v1/py.typed,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0 +pydantic/v1/schema.py,sha256=aqBuA--cq8gAVkim5BJPFASHzOZ8dFtmFX_fNGr6ip4,47801 +pydantic/v1/tools.py,sha256=1lDdXHk0jL5uP3u5RCYAvUAlGClgAO-45lkq9j7fyBA,2881 +pydantic/v1/types.py,sha256=Fltx5GoP_qaUmAktlGz7nFeJa13yNy3FY1-RcMzEVt8,35455 +pydantic/v1/typing.py,sha256=GjThObaqHMhLaECzYUrDk0X-RHjo7x6vsv4Z4qUYV8I,19387 +pydantic/v1/utils.py,sha256=fvjXCNeaU1jB-_mi0SUk4fzN4OpvX-SKTu4yJMPRIlQ,25941 +pydantic/v1/validators.py,sha256=lyUkn1MWhHxlCX5ZfEgFj_CAHojoiPcaQeMdEM9XviU,22187 +pydantic/v1/version.py,sha256=IHqnBuD5DuqB3Pxzw1L8EJsYheQBzxaIZeCZkPvI7Rk,1039 +pydantic/validate_call_decorator.py,sha256=B41EPSwgiZev4kK0xPsCOizs04P4GKUQGq1g7XwdDww,4151 +pydantic/validators.py,sha256=pwbIJXVb1CV2mAE4w_EGfNj7DwzsKaWw_tTL6cviTus,146 +pydantic/version.py,sha256=HwQykJSextYMfoYcuo95agKTKW6c839cpIy2vIRZlWY,2498 +pydantic/warnings.py,sha256=5Ok3GxyP1LkcEvONxqjQkuzFaOuly7QdCUhmEpFM5Lk,3350 diff --git a/vllm/lib/python3.10/site-packages/pydantic-2.10.3.dist-info/REQUESTED b/vllm/lib/python3.10/site-packages/pydantic-2.10.3.dist-info/REQUESTED new file mode 100644 index 0000000000000000000000000000000000000000..e69de29bb2d1d6434b8b29ae775ad8c2e48c5391 diff --git a/vllm/lib/python3.10/site-packages/pydantic-2.10.3.dist-info/WHEEL b/vllm/lib/python3.10/site-packages/pydantic-2.10.3.dist-info/WHEEL new file mode 100644 index 0000000000000000000000000000000000000000..21aaa72961a8af71c17d2cb3b76d5f7f567100e4 --- /dev/null +++ b/vllm/lib/python3.10/site-packages/pydantic-2.10.3.dist-info/WHEEL @@ -0,0 +1,4 @@ +Wheel-Version: 1.0 +Generator: hatchling 1.26.3 +Root-Is-Purelib: true +Tag: py3-none-any diff --git a/vllm/lib/python3.10/site-packages/pydantic-2.10.3.dist-info/licenses/LICENSE b/vllm/lib/python3.10/site-packages/pydantic-2.10.3.dist-info/licenses/LICENSE new file mode 100644 index 0000000000000000000000000000000000000000..488c6260c10f2e88fa1fae58a63fccec8d600cd1 --- /dev/null +++ b/vllm/lib/python3.10/site-packages/pydantic-2.10.3.dist-info/licenses/LICENSE @@ -0,0 +1,21 @@ +The MIT License (MIT) + +Copyright (c) 2017 to present Pydantic Services Inc. and individual contributors. + +Permission is hereby granted, free of charge, to any person obtaining a copy +of this software and associated documentation files (the "Software"), to deal +in the Software without restriction, including without limitation the rights +to use, copy, modify, merge, publish, distribute, sublicense, and/or sell +copies of the Software, and to permit persons to whom the Software is +furnished to do so, subject to the following conditions: + +The above copyright notice and this permission notice shall be included in all +copies or substantial portions of the Software. + +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE +AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER +LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, +OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE +SOFTWARE. diff --git a/vllm/lib/python3.10/site-packages/xxhash/__init__.py b/vllm/lib/python3.10/site-packages/xxhash/__init__.py new file mode 100644 index 0000000000000000000000000000000000000000..043c486ddf9d2770d7fbee4bff1c26985ac6cd7e --- /dev/null +++ b/vllm/lib/python3.10/site-packages/xxhash/__init__.py @@ -0,0 +1,63 @@ +from ._xxhash import ( + xxh32, + xxh32_digest, + xxh32_intdigest, + xxh32_hexdigest, + xxh64, + xxh64_digest, + xxh64_intdigest, + xxh64_hexdigest, + xxh3_64, + xxh3_64_digest, + xxh3_64_intdigest, + xxh3_64_hexdigest, + xxh3_128, + xxh3_128_digest, + xxh3_128_intdigest, + xxh3_128_hexdigest, + XXHASH_VERSION, +) + +from .version import VERSION, VERSION_TUPLE + + +xxh128 = xxh3_128 +xxh128_hexdigest = xxh3_128_hexdigest +xxh128_intdigest = xxh3_128_intdigest +xxh128_digest = xxh3_128_digest + +algorithms_available = set([ + "xxh32", + "xxh64", + "xxh3_64", + "xxh128", + "xxh3_128", +]) + + +__all__ = [ + "xxh32", + "xxh32_digest", + "xxh32_intdigest", + "xxh32_hexdigest", + "xxh64", + "xxh64_digest", + "xxh64_intdigest", + "xxh64_hexdigest", + "xxh3_64", + "xxh3_64_digest", + "xxh3_64_intdigest", + "xxh3_64_hexdigest", + "xxh3_128", + "xxh3_128_digest", + "xxh3_128_intdigest", + "xxh3_128_hexdigest", + "xxh128", + "xxh128_digest", + "xxh128_intdigest", + "xxh128_hexdigest", + "VERSION", + "VERSION_TUPLE", + "XXHASH_VERSION", + "algorithms_available", +] diff --git a/vllm/lib/python3.10/site-packages/xxhash/__init__.pyi b/vllm/lib/python3.10/site-packages/xxhash/__init__.pyi new file mode 100644 index 0000000000000000000000000000000000000000..03c62497a3aba0fc5695ee033c0a402e41ae8c44 --- /dev/null +++ b/vllm/lib/python3.10/site-packages/xxhash/__init__.pyi @@ -0,0 +1,62 @@ +import array +from typing import Union +from typing_extensions import final + +_InputType = Union[str, bytes, bytearray, memoryview, array.ArrayType[int]] + +VERSION: str +XXHASH_VERSION: str +VERSION_TUPLE: tuple[int, ...] + +algorithms_available: set[str] + +class _Hasher: + def __init__(self, input: _InputType = ..., seed: int = ...) -> None: ... + def update(self, input: _InputType) -> None: ... + def digest(self) -> bytes: ... + def hexdigest(self) -> str: ... + def intdigest(self) -> int: ... + def copy(self) -> _Hasher: ... + def reset(self) -> None: ... + @property + def digestsize(self) -> int: ... + @property + def digest_size(self) -> int: ... + @property + def block_size(self) -> int: ... + @property + def name(self) -> str: ... + @property + def seed(self) -> int: ... + +@final +class xxh32(_Hasher): ... + +@final +class xxh3_64(_Hasher): ... + +@final +class xxh3_128(_Hasher): ... + +xxh64 = xxh3_64 +xxh128 = xxh3_128 + +def xxh32_digest(args: _InputType, seed: int = ...) -> bytes: ... +def xxh32_hexdigest(args: _InputType, seed: int = ...) -> str: ... +def xxh32_intdigest(args: _InputType, seed: int = ...) -> int: ... + +def xxh3_64_digest(args: _InputType, seed: int = ...) -> bytes: ... +def xxh3_64_hexdigest(args: _InputType, seed: int = ...) -> str: ... +def xxh3_64_intdigest(args: _InputType, seed: int = ...) -> int: ... + +def xxh3_128_digest(args: _InputType, seed: int = ...) -> bytes: ... +def xxh3_128_hexdigest(args: _InputType, seed: int = ...) -> str: ... +def xxh3_128_intdigest(args: _InputType, seed: int = ...) -> int: ... + +xxh64_digest = xxh3_64_digest +xxh64_hexdigest = xxh3_64_hexdigest +xxh64_intdigest = xxh3_64_intdigest + +xxh128_digest = xxh3_128_digest +xxh128_hexdigest = xxh3_128_hexdigest +xxh128_intdigest = xxh3_128_intdigest diff --git a/vllm/lib/python3.10/site-packages/xxhash/__pycache__/__init__.cpython-310.pyc b/vllm/lib/python3.10/site-packages/xxhash/__pycache__/__init__.cpython-310.pyc new file mode 100644 index 0000000000000000000000000000000000000000..503259e3a8441b36859830b4338097c499b8c574 Binary files /dev/null and b/vllm/lib/python3.10/site-packages/xxhash/__pycache__/__init__.cpython-310.pyc differ diff --git a/vllm/lib/python3.10/site-packages/xxhash/__pycache__/version.cpython-310.pyc b/vllm/lib/python3.10/site-packages/xxhash/__pycache__/version.cpython-310.pyc new file mode 100644 index 0000000000000000000000000000000000000000..7ece39fe6baa73c61ac9f33f9447558788d85bfd Binary files /dev/null and b/vllm/lib/python3.10/site-packages/xxhash/__pycache__/version.cpython-310.pyc differ diff --git a/vllm/lib/python3.10/site-packages/xxhash/py.typed b/vllm/lib/python3.10/site-packages/xxhash/py.typed new file mode 100644 index 0000000000000000000000000000000000000000..e69de29bb2d1d6434b8b29ae775ad8c2e48c5391 diff --git a/vllm/lib/python3.10/site-packages/xxhash/version.py b/vllm/lib/python3.10/site-packages/xxhash/version.py new file mode 100644 index 0000000000000000000000000000000000000000..91fde107bab8c18449d2fe179b3ceee4d96bdbf6 --- /dev/null +++ b/vllm/lib/python3.10/site-packages/xxhash/version.py @@ -0,0 +1,2 @@ +VERSION = "3.5.0" +VERSION_TUPLE = (3, 5, 0)