ZTWHHH commited on
Commit
b09c2d6
·
verified ·
1 Parent(s): 00b87c5

Add files using upload-large-folder tool

Browse files
This view is limited to 50 files because it contains too many changes.   See raw diff
Files changed (50) hide show
  1. .gitattributes +1 -0
  2. videollama2/lib/python3.10/site-packages/torch/include/ATen/ExpandBase.h +30 -0
  3. videollama2/lib/python3.10/site-packages/torch/include/ATen/Layout.h +2 -0
  4. videollama2/lib/python3.10/site-packages/torch/include/ATen/NumericUtils.h +194 -0
  5. videollama2/lib/python3.10/site-packages/torch/include/ATen/TracerMode.h +132 -0
  6. vllm/lib/python3.10/site-packages/_multiprocess/__init__.py +8 -0
  7. vllm/lib/python3.10/site-packages/_multiprocess/__pycache__/__init__.cpython-310.pyc +0 -0
  8. vllm/lib/python3.10/site-packages/blake3-1.0.4.dist-info/INSTALLER +1 -0
  9. vllm/lib/python3.10/site-packages/blake3-1.0.4.dist-info/METADATA +108 -0
  10. vllm/lib/python3.10/site-packages/blake3-1.0.4.dist-info/RECORD +10 -0
  11. vllm/lib/python3.10/site-packages/blake3-1.0.4.dist-info/REQUESTED +0 -0
  12. vllm/lib/python3.10/site-packages/blake3-1.0.4.dist-info/WHEEL +4 -0
  13. vllm/lib/python3.10/site-packages/einops_exts-0.0.4.dist-info/LICENSE +21 -0
  14. vllm/lib/python3.10/site-packages/einops_exts-0.0.4.dist-info/METADATA +18 -0
  15. vllm/lib/python3.10/site-packages/einops_exts-0.0.4.dist-info/RECORD +13 -0
  16. vllm/lib/python3.10/site-packages/einops_exts-0.0.4.dist-info/REQUESTED +0 -0
  17. vllm/lib/python3.10/site-packages/einops_exts-0.0.4.dist-info/WHEEL +5 -0
  18. vllm/lib/python3.10/site-packages/einops_exts-0.0.4.dist-info/top_level.txt +1 -0
  19. vllm/lib/python3.10/site-packages/grpc/__pycache__/__init__.cpython-310.pyc +0 -0
  20. vllm/lib/python3.10/site-packages/grpc/__pycache__/_auth.cpython-310.pyc +0 -0
  21. vllm/lib/python3.10/site-packages/grpc/__pycache__/_channel.cpython-310.pyc +0 -0
  22. vllm/lib/python3.10/site-packages/grpc/__pycache__/_common.cpython-310.pyc +0 -0
  23. vllm/lib/python3.10/site-packages/grpc/__pycache__/_compression.cpython-310.pyc +0 -0
  24. vllm/lib/python3.10/site-packages/grpc/__pycache__/_grpcio_metadata.cpython-310.pyc +0 -0
  25. vllm/lib/python3.10/site-packages/grpc/__pycache__/_interceptor.cpython-310.pyc +0 -0
  26. vllm/lib/python3.10/site-packages/grpc/__pycache__/_observability.cpython-310.pyc +0 -0
  27. vllm/lib/python3.10/site-packages/grpc/__pycache__/_plugin_wrapping.cpython-310.pyc +0 -0
  28. vllm/lib/python3.10/site-packages/grpc/__pycache__/_runtime_protos.cpython-310.pyc +0 -0
  29. vllm/lib/python3.10/site-packages/grpc/__pycache__/_server.cpython-310.pyc +0 -0
  30. vllm/lib/python3.10/site-packages/grpc/__pycache__/_simple_stubs.cpython-310.pyc +0 -0
  31. vllm/lib/python3.10/site-packages/grpc/__pycache__/_typing.cpython-310.pyc +0 -0
  32. vllm/lib/python3.10/site-packages/grpc/__pycache__/_utilities.cpython-310.pyc +0 -0
  33. vllm/lib/python3.10/site-packages/grpc/_cython/__init__.py +13 -0
  34. vllm/lib/python3.10/site-packages/grpc/_cython/__pycache__/__init__.cpython-310.pyc +0 -0
  35. vllm/lib/python3.10/site-packages/grpc/_cython/_credentials/roots.pem +0 -0
  36. vllm/lib/python3.10/site-packages/grpc/_cython/_cygrpc/__init__.py +13 -0
  37. vllm/lib/python3.10/site-packages/grpc/_cython/_cygrpc/__pycache__/__init__.cpython-310.pyc +0 -0
  38. vllm/lib/python3.10/site-packages/grpc/aio/__init__.py +95 -0
  39. vllm/lib/python3.10/site-packages/grpc/aio/__pycache__/_call.cpython-310.pyc +0 -0
  40. vllm/lib/python3.10/site-packages/grpc/aio/_base_server.py +385 -0
  41. vllm/lib/python3.10/site-packages/grpc/aio/_channel.py +627 -0
  42. vllm/lib/python3.10/site-packages/grpc/aio/_interceptor.py +1178 -0
  43. vllm/lib/python3.10/site-packages/grpc/aio/_metadata.py +137 -0
  44. vllm/lib/python3.10/site-packages/grpc/aio/_server.py +239 -0
  45. vllm/lib/python3.10/site-packages/grpc/aio/_utils.py +22 -0
  46. vllm/lib/python3.10/site-packages/grpc/beta/__init__.py +13 -0
  47. vllm/lib/python3.10/site-packages/grpc/beta/__pycache__/__init__.cpython-310.pyc +0 -0
  48. vllm/lib/python3.10/site-packages/grpc/beta/__pycache__/_client_adaptations.cpython-310.pyc +0 -0
  49. vllm/lib/python3.10/site-packages/grpc/beta/__pycache__/_metadata.cpython-310.pyc +0 -0
  50. vllm/lib/python3.10/site-packages/grpc/beta/__pycache__/_server_adaptations.cpython-310.pyc +0 -0
.gitattributes CHANGED
@@ -1419,3 +1419,4 @@ vllm/lib/python3.10/site-packages/pyarrow/tests/__pycache__/test_array.cpython-3
1419
  vllm/lib/python3.10/site-packages/yaml/_yaml.cpython-310-x86_64-linux-gnu.so filter=lfs diff=lfs merge=lfs -text
1420
  vllm/lib/python3.10/site-packages/pyarrow/tests/__pycache__/test_compute.cpython-310.pyc filter=lfs diff=lfs merge=lfs -text
1421
  vllm/lib/python3.10/site-packages/pyarrow/tests/__pycache__/test_pandas.cpython-310.pyc filter=lfs diff=lfs merge=lfs -text
 
 
1419
  vllm/lib/python3.10/site-packages/yaml/_yaml.cpython-310-x86_64-linux-gnu.so filter=lfs diff=lfs merge=lfs -text
1420
  vllm/lib/python3.10/site-packages/pyarrow/tests/__pycache__/test_compute.cpython-310.pyc filter=lfs diff=lfs merge=lfs -text
1421
  vllm/lib/python3.10/site-packages/pyarrow/tests/__pycache__/test_pandas.cpython-310.pyc filter=lfs diff=lfs merge=lfs -text
1422
+ vllm/lib/python3.10/site-packages/shapely.libs/libgeos-e4f0e0c1.so.3.11.4 filter=lfs diff=lfs merge=lfs -text
videollama2/lib/python3.10/site-packages/torch/include/ATen/ExpandBase.h ADDED
@@ -0,0 +1,30 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ #include <ATen/core/TensorBase.h>
2
+
3
+ // Broadcasting utilities for working with TensorBase
4
+ namespace at {
5
+ namespace internal {
6
+ TORCH_API TensorBase expand_slow_path(const TensorBase& self, IntArrayRef size);
7
+ } // namespace internal
8
+
9
+ inline c10::MaybeOwned<TensorBase> expand_size(
10
+ const TensorBase& self,
11
+ IntArrayRef size) {
12
+ if (size.equals(self.sizes())) {
13
+ return c10::MaybeOwned<TensorBase>::borrowed(self);
14
+ }
15
+ return c10::MaybeOwned<TensorBase>::owned(
16
+ at::internal::expand_slow_path(self, size));
17
+ }
18
+ c10::MaybeOwned<TensorBase> expand_size(TensorBase&& self, IntArrayRef size) =
19
+ delete;
20
+
21
+ inline c10::MaybeOwned<TensorBase> expand_inplace(
22
+ const TensorBase& tensor,
23
+ const TensorBase& to_expand) {
24
+ return expand_size(to_expand, tensor.sizes());
25
+ }
26
+ c10::MaybeOwned<TensorBase> expand_inplace(
27
+ const TensorBase& tensor,
28
+ TensorBase&& to_expand) = delete;
29
+
30
+ } // namespace at
videollama2/lib/python3.10/site-packages/torch/include/ATen/Layout.h ADDED
@@ -0,0 +1,2 @@
 
 
 
1
+ #pragma once
2
+ #include <c10/core/Layout.h>
videollama2/lib/python3.10/site-packages/torch/include/ATen/NumericUtils.h ADDED
@@ -0,0 +1,194 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ #pragma once
2
+
3
+ #ifdef __HIPCC__
4
+ #include <hip/hip_runtime.h>
5
+ #endif
6
+
7
+ #include <c10/macros/Macros.h>
8
+ #include <c10/util/BFloat16.h>
9
+ #include <c10/util/Float8_e4m3fn.h>
10
+ #include <c10/util/Float8_e5m2.h>
11
+ #include <c10/util/Half.h>
12
+ #include <c10/util/complex.h>
13
+
14
+ #include <cmath>
15
+ #include <type_traits>
16
+
17
+ namespace at {
18
+
19
+ // std::isnan isn't performant to use on integral types; it will
20
+ // (uselessly) convert to floating point and then do the test.
21
+ // This function is.
22
+
23
+ template <
24
+ typename T,
25
+ typename std::enable_if<std::is_integral<T>::value, int>::type = 0>
26
+ inline C10_HOST_DEVICE bool _isnan(T /*val*/) {
27
+ return false;
28
+ }
29
+
30
+ template <
31
+ typename T,
32
+ typename std::enable_if<std::is_floating_point<T>::value, int>::type = 0>
33
+ inline C10_HOST_DEVICE bool _isnan(T val) {
34
+ #if defined(__CUDACC__) || defined(__HIPCC__)
35
+ return ::isnan(val);
36
+ #else
37
+ return std::isnan(val);
38
+ #endif
39
+ }
40
+
41
+ template <
42
+ typename T,
43
+ typename std::enable_if<c10::is_complex<T>::value, int>::type = 0>
44
+ inline C10_HOST_DEVICE bool _isnan(T val) {
45
+ return std::isnan(val.real()) || std::isnan(val.imag());
46
+ }
47
+
48
+ template <
49
+ typename T,
50
+ typename std::enable_if<std::is_same<T, at::Half>::value, int>::type = 0>
51
+ inline C10_HOST_DEVICE bool _isnan(T val) {
52
+ return at::_isnan(static_cast<float>(val));
53
+ }
54
+
55
+ template <
56
+ typename T,
57
+ typename std::enable_if<std::is_same<T, at::BFloat16>::value, int>::type =
58
+ 0>
59
+ inline C10_HOST_DEVICE bool _isnan(at::BFloat16 val) {
60
+ return at::_isnan(static_cast<float>(val));
61
+ }
62
+
63
+ inline C10_HOST_DEVICE bool _isnan(at::BFloat16 val) {
64
+ return at::_isnan(static_cast<float>(val));
65
+ }
66
+
67
+ template <
68
+ typename T,
69
+ typename std::enable_if<std::is_same<T, at::Float8_e5m2>::value, int>::
70
+ type = 0>
71
+ inline C10_HOST_DEVICE bool _isnan(T val) {
72
+ return val.isnan();
73
+ }
74
+
75
+ template <
76
+ typename T,
77
+ typename std::enable_if<std::is_same<T, at::Float8_e4m3fn>::value, int>::
78
+ type = 0>
79
+ inline C10_HOST_DEVICE bool _isnan(T val) {
80
+ return val.isnan();
81
+ }
82
+
83
+ // std::isinf isn't performant to use on integral types; it will
84
+ // (uselessly) convert to floating point and then do the test.
85
+ // This function is.
86
+
87
+ template <
88
+ typename T,
89
+ typename std::enable_if<std::is_integral<T>::value, int>::type = 0>
90
+ inline C10_HOST_DEVICE bool _isinf(T /*val*/) {
91
+ return false;
92
+ }
93
+
94
+ template <
95
+ typename T,
96
+ typename std::enable_if<std::is_floating_point<T>::value, int>::type = 0>
97
+ inline C10_HOST_DEVICE bool _isinf(T val) {
98
+ #if defined(__CUDACC__) || defined(__HIPCC__)
99
+ return ::isinf(val);
100
+ #else
101
+ return std::isinf(val);
102
+ #endif
103
+ }
104
+
105
+ inline C10_HOST_DEVICE bool _isinf(at::Half val) {
106
+ return at::_isinf(static_cast<float>(val));
107
+ }
108
+
109
+ inline C10_HOST_DEVICE bool _isinf(at::BFloat16 val) {
110
+ return at::_isinf(static_cast<float>(val));
111
+ }
112
+
113
+ inline C10_HOST_DEVICE bool _isinf(at::Float8_e5m2 val) {
114
+ return val.isinf();
115
+ }
116
+
117
+ inline C10_HOST_DEVICE bool _isinf(at::Float8_e4m3fn val) {
118
+ return false;
119
+ }
120
+
121
+ template <typename T>
122
+ C10_HOST_DEVICE inline T exp(T x) {
123
+ static_assert(
124
+ !std::is_same<T, double>::value,
125
+ "this template must be used with float or less precise type");
126
+ #if defined(__CUDA_ARCH__) || defined(__HIP_ARCH__)
127
+ // use __expf fast approximation for peak bandwidth
128
+ return __expf(x);
129
+ #else
130
+ return ::exp(x);
131
+ #endif
132
+ }
133
+
134
+ template <>
135
+ C10_HOST_DEVICE inline double exp<double>(double x) {
136
+ return ::exp(x);
137
+ }
138
+
139
+ template <typename T>
140
+ C10_HOST_DEVICE inline T log(T x) {
141
+ static_assert(
142
+ !std::is_same<T, double>::value,
143
+ "this template must be used with float or less precise type");
144
+ #if defined(__CUDA_ARCH__) || defined(__HIP_ARCH__)
145
+ // use __logf fast approximation for peak bandwidth
146
+ return __logf(x);
147
+ #else
148
+ return ::log(x);
149
+ #endif
150
+ }
151
+
152
+ template <>
153
+ C10_HOST_DEVICE inline double log<double>(double x) {
154
+ return ::log(x);
155
+ }
156
+
157
+ template <typename T>
158
+ C10_HOST_DEVICE inline T log1p(T x) {
159
+ static_assert(
160
+ !std::is_same<T, double>::value,
161
+ "this template must be used with float or less precise type");
162
+ #if defined(__CUDA_ARCH__) || defined(__HIP_ARCH__)
163
+ // use __logf fast approximation for peak bandwidth
164
+ // NOTE: There is no __log1pf so unfortunately we lose precision.
165
+ return __logf(1.0f + x);
166
+ #else
167
+ return ::log1p(x);
168
+ #endif
169
+ }
170
+
171
+ template <>
172
+ C10_HOST_DEVICE inline double log1p<double>(double x) {
173
+ return ::log1p(x);
174
+ }
175
+
176
+ template <typename T>
177
+ C10_HOST_DEVICE inline T tan(T x) {
178
+ static_assert(
179
+ !std::is_same<T, double>::value,
180
+ "this template must be used with float or less precise type");
181
+ #if defined(__CUDA_ARCH__) || defined(__HIP_ARCH__)
182
+ // use __tanf fast approximation for peak bandwidth
183
+ return __tanf(x);
184
+ #else
185
+ return ::tan(x);
186
+ #endif
187
+ }
188
+
189
+ template <>
190
+ C10_HOST_DEVICE inline double tan<double>(double x) {
191
+ return ::tan(x);
192
+ }
193
+
194
+ } // namespace at
videollama2/lib/python3.10/site-packages/torch/include/ATen/TracerMode.h ADDED
@@ -0,0 +1,132 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ #pragma once
2
+
3
+ #include <c10/core/impl/LocalDispatchKeySet.h>
4
+ #include <c10/macros/Export.h>
5
+ #include <c10/macros/Macros.h>
6
+
7
+ // NOTE [Tracing Mode Switches]
8
+ //
9
+ // Historically, tracing function was controlled by two switches:
10
+ //
11
+ // - `AutoDispatchBelowADInplaceOrView` guard
12
+ //
13
+ // Tracing function used to be script-generated inside `VariableType_*.cpp`
14
+ // kernels, sharing the same `Autograd` dispatch key with autograd function.
15
+ // Therefore, before tracing function was moved out of VariableType,
16
+ // `AutoDispatchBelowADInplaceOrView` guard can also disable tracing as a
17
+ // side effect of disabling `Autograd` dispatching.
18
+ //
19
+ // - `setTracingState()` API in `torch/csrc/jit/frontend/tracer.h`
20
+ //
21
+ // It stores tracing data in a `TracingState` object in TLS. If the
22
+ // `TracingState` object in TLS is `null`, then tracing is paused.
23
+ //
24
+ // The `TracingState` object is created in `tracer::trace()` - the main
25
+ // entrance of tracing function. It's temporarily set to `null` inside
26
+ // generated VariableType (now TraceType) to bypass tracing for intermediate
27
+ // ops (ops being called by other ops). After the intermediate op call
28
+ // finishes it's set back to the original `TracingState` object.
29
+ //
30
+ // The `TracingState` obect in TLS can also be read/written via its Python
31
+ // binding in `python_tracer.cpp`, and `get/setTracingState()` C++ APIs,
32
+ // which are also exposed as `TORCH_API`.
33
+ //
34
+ // Two new switches were introduced since tracing function was moved out of
35
+ // VariableType:
36
+ //
37
+ // - `tracer::impl::set_dispatch_enabled()` API
38
+ //
39
+ // Unlike the special `Autograd` dispatch key which is included in dispatch
40
+ // key set by default, `Tracer` dispatch key is off by default. The
41
+ // dispatching switch can be toggled via this new API.
42
+ //
43
+ // - `tracer::impl::NoTracerDispatchMode` guard
44
+ //
45
+ // It's used to cover the old semantics of `AutoDispatchBelowADInplaceOrView`
46
+ // after tracing was moved out of VariableType.
47
+ //
48
+ // Before tracing function was moved out of VariableType, tracing was enabled
49
+ // when the following conditions are satisfied:
50
+ //
51
+ // 1) `TracingState` object in TLS != null;
52
+ // - Either inside the execution scope of `tracer::trace()`, or
53
+ // - Eagerly called `setTracingState()` with non-null object.
54
+ // 2) Not inside `AutoDispatchBelowADInplaceOrView` scope;
55
+ //
56
+ // After:
57
+ //
58
+ // 1) `TracingState` object in TLS != null;
59
+ // 2) Has called `tracer::impl::set_dispatch_enabled(true)`;
60
+ // 3) Not inside `tracer::impl::NonDispatchGuard` scope;
61
+ //
62
+ // [TODOs]
63
+ //
64
+ // - `setTracingState()` v.s. `tracer::impl::set_dispatch_enabled()`
65
+ //
66
+ // Currently `set_dispatch_enabled()` is set/unset inside `setTracingState()`
67
+ // to keep the semantics exactly the same as before - it's confusing to keep
68
+ // both switches, though. We should consider simplifying/limiting the exposed
69
+ // `setTracingState()` Python/C++ APIs (and other APIs calling it) so that
70
+ // these two can be unified.
71
+ //
72
+ // - `AutoDispatchBelowADInplaceOrView` v.s.
73
+ // `tracer::impl::NoTracerDispatchMode`
74
+ //
75
+ // We don't need to always set both guards together to keep semantics
76
+ // unchanged. For the follow use cases of `AutoDispatchBelowADInplaceOrView`
77
+ // we don't need set the new tracer guard:
78
+ //
79
+ // * Script-generated VariableType kernels. The guard is not necessary as
80
+ // tracing is already disabled explicitly by `setTracingState(null)` in
81
+ // generated TraceType kernels - we could keep it as is or use the new guard
82
+ // instead.
83
+ //
84
+ // * Custom ops. Will be handled by fallback kernel for `Tracer`.
85
+ //
86
+ // * Functions that are not likely to be called in tracing context (no python
87
+ // binding / not an operator), e.g.: all mobile forward() wrappers, test
88
+ // binaries, and etc.
89
+ //
90
+ // * Where new threads are spawned, e.g.: ATen/native/ConvolutionMM2d.cpp.
91
+ // It's not necessary as tracing is off by default.
92
+ //
93
+ // For the rest of cases we might need have both:
94
+ //
95
+ // * Functions that might be reachable from eager mode python (especially
96
+ // factory methods), e.g.:
97
+ // `internal_new_from_data()` in `torch/csrc/utils/tensor_new.cpp`.
98
+ // Without the new guard it will add `aten::empty` to the traced graph.
99
+ //
100
+ // * Some manually maintained functions, e.g.:
101
+ // `torch/csrc/autograd/VariableTypeManual.cpp`.
102
+ // Set the new guard if it's not obvious whether `setTracingState(null)`
103
+ // has been called before it reaches the `AutoDispatchBelowADInplaceOrView`
104
+ // guard.
105
+ //
106
+ // We might need tweak the usage of the new guard to optimize/fix things.
107
+ // It should only affect the correctness of tracing function, because the
108
+ // guard is essentially no-op when the master `setTracingState()` switch is
109
+ // off.
110
+
111
+ // TODO: move this from `at::` to `jit::torch::` after
112
+ // `aten/src/ATen/cpp_custom_type_hack.h` is removed.
113
+
114
+ namespace at::tracer::impl {
115
+
116
+ static inline bool is_dispatch_enabled() {
117
+ return c10::impl::tls_is_dispatch_key_included(at::DispatchKey::Tracer) &&
118
+ !c10::impl::tls_is_dispatch_key_excluded(at::DispatchKey::Tracer);
119
+ }
120
+
121
+ static inline void set_dispatch_enabled(bool enabled) {
122
+ TORCH_INTERNAL_ASSERT(
123
+ !c10::impl::tls_is_dispatch_key_excluded(at::DispatchKey::Tracer),
124
+ "Cannot enable tracing within the scope of NoTracerDispatchMode!");
125
+ c10::impl::tls_set_dispatch_key_included(at::DispatchKey::Tracer, enabled);
126
+ }
127
+
128
+ struct NoTracerDispatchMode {
129
+ c10::impl::ExcludeDispatchKeyGuard guard_{at::DispatchKey::Tracer};
130
+ };
131
+
132
+ } // namespace at::tracer::impl
vllm/lib/python3.10/site-packages/_multiprocess/__init__.py ADDED
@@ -0,0 +1,8 @@
 
 
 
 
 
 
 
 
 
1
+ #!/usr/bin/env python
2
+ #
3
+ # Author: Mike McKerns (mmckerns @caltech and @uqfoundation)
4
+ # Copyright (c) 2022-2024 The Uncertainty Quantification Foundation.
5
+ # License: 3-clause BSD. The full license text is available at:
6
+ # - https://github.com/uqfoundation/multiprocess/blob/master/LICENSE
7
+
8
+ from _multiprocessing import *
vllm/lib/python3.10/site-packages/_multiprocess/__pycache__/__init__.cpython-310.pyc ADDED
Binary file (196 Bytes). View file
 
vllm/lib/python3.10/site-packages/blake3-1.0.4.dist-info/INSTALLER ADDED
@@ -0,0 +1 @@
 
 
1
+ pip
vllm/lib/python3.10/site-packages/blake3-1.0.4.dist-info/METADATA ADDED
@@ -0,0 +1,108 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ Metadata-Version: 2.4
2
+ Name: blake3
3
+ Version: 1.0.4
4
+ Summary: Python bindings for the Rust blake3 crate
5
+ Home-Page: https://github.com/oconnor663/blake3-py
6
+ Author: Jack O'Connor <oconnor663@gmail.com>
7
+ Author-email: Jack O'Connor <oconnor663@gmail.com>
8
+ License: CC0-1.0 OR Apache-2.0
9
+ Description-Content-Type: text/markdown; charset=UTF-8; variant=GFM
10
+ Project-URL: Source Code, https://github.com/oconnor663/blake3-py
11
+
12
+ # blake3-py [![tests](https://github.com/oconnor663/blake3-py/actions/workflows/tests.yml/badge.svg?branch=master&event=push)](https://github.com/oconnor663/blake3-py/actions/workflows/tests.yml) [![PyPI version](https://badge.fury.io/py/blake3.svg)](https://pypi.python.org/pypi/blake3)
13
+
14
+ Python bindings for the [official Rust implementation of
15
+ BLAKE3](https://github.com/BLAKE3-team/BLAKE3), based on
16
+ [PyO3](https://github.com/PyO3/pyo3). These bindings expose all the features of
17
+ BLAKE3, including extendable output, keying, and multithreading. The basic API
18
+ matches that of Python's standard
19
+ [`hashlib`](https://docs.python.org/3/library/hashlib.html) module.
20
+
21
+ ## Examples
22
+
23
+ ```python
24
+ from blake3 import blake3
25
+
26
+ # Hash some input all at once. The input can be bytes, a bytearray, or a memoryview.
27
+ hash1 = blake3(b"foobarbaz").digest()
28
+
29
+ # Hash the same input incrementally.
30
+ hasher = blake3()
31
+ hasher.update(b"foo")
32
+ hasher.update(b"bar")
33
+ hasher.update(b"baz")
34
+ hash2 = hasher.digest()
35
+ assert hash1 == hash2
36
+
37
+ # Hash the same input fluently.
38
+ assert hash1 == blake3(b"foo").update(b"bar").update(b"baz").digest()
39
+
40
+ # Hexadecimal output.
41
+ print("The hash of 'hello world' is", blake3(b"hello world").hexdigest())
42
+
43
+ # Use the keyed hashing mode, which takes a 32-byte key.
44
+ import secrets
45
+ random_key = secrets.token_bytes(32)
46
+ message = b"a message to authenticate"
47
+ mac = blake3(message, key=random_key).digest()
48
+
49
+ # Use the key derivation mode, which takes a context string. Context strings
50
+ # should be hardcoded, globally unique, and application-specific.
51
+ context = "blake3-py 2020-03-04 11:13:10 example context"
52
+ key_material = b"usually at least 32 random bytes, not a password"
53
+ derived_key = blake3(key_material, derive_key_context=context).digest()
54
+
55
+ # Extendable output. The default digest size is 32 bytes.
56
+ extended = blake3(b"foo").digest(length=100)
57
+ assert extended[:32] == blake3(b"foo").digest()
58
+ assert extended[75:100] == blake3(b"foo").digest(length=25, seek=75)
59
+
60
+ # Hash a large input using multiple threads. Note that this can be slower for
61
+ # inputs shorter than ~1 MB, and it's a good idea to benchmark it for your use
62
+ # case on your platform.
63
+ large_input = bytearray(1_000_000)
64
+ hash_single = blake3(large_input).digest()
65
+ hash_two = blake3(large_input, max_threads=2).digest()
66
+ hash_many = blake3(large_input, max_threads=blake3.AUTO).digest()
67
+ assert hash_single == hash_two == hash_many
68
+
69
+ # Hash a file with multiple threads using memory mapping. This is what b3sum
70
+ # does by default.
71
+ file_hasher = blake3(max_threads=blake3.AUTO)
72
+ file_hasher.update_mmap("/big/file.txt")
73
+ file_hash = file_hasher.digest()
74
+
75
+ # Copy a hasher that's already accepted some input.
76
+ hasher1 = blake3(b"foo")
77
+ hasher2 = hasher1.copy()
78
+ hasher1.update(b"bar")
79
+ hasher2.update(b"baz")
80
+ assert hasher1.digest() == blake3(b"foobar").digest()
81
+ assert hasher2.digest() == blake3(b"foobaz").digest()
82
+ ```
83
+
84
+ ## Installation
85
+
86
+ ```
87
+ pip install blake3
88
+ ```
89
+
90
+ As usual with Pip, you might need to use `sudo` or the `--user` flag
91
+ with the command above, depending on how you installed Python on your
92
+ system.
93
+
94
+ There are binary wheels [available on
95
+ PyPI](https://pypi.org/project/blake3/#files) for most environments. But
96
+ if you're building the source distribution, or if a binary wheel isn't
97
+ available for your environment, you'll need to [install the Rust
98
+ toolchain](https://rustup.rs).
99
+
100
+ ## C Bindings
101
+
102
+ Experimental bindings for the official BLAKE3 C implementation are available in
103
+ the [`c_impl`](c_impl) directory. These will probably not be published on PyPI,
104
+ and most applications should prefer the Rust-based bindings. But if you can't
105
+ depend on the Rust toolchain, and you're on some platform that this project
106
+ doesn't provide binary wheels for, the C-based bindings might be an
107
+ alternative.
108
+
vllm/lib/python3.10/site-packages/blake3-1.0.4.dist-info/RECORD ADDED
@@ -0,0 +1,10 @@
 
 
 
 
 
 
 
 
 
 
 
1
+ blake3-1.0.4.dist-info/INSTALLER,sha256=zuuue4knoyJ-UwPPXg8fezS7VCrXJQrAP7zeNuwvFQg,4
2
+ blake3-1.0.4.dist-info/METADATA,sha256=t8zfV6RI_1YtG_N6JgFUG1Z73sK6n31N9bv0rejh4eY,4166
3
+ blake3-1.0.4.dist-info/RECORD,,
4
+ blake3-1.0.4.dist-info/REQUESTED,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
5
+ blake3-1.0.4.dist-info/WHEEL,sha256=irisT0qT7YShBVNvrzR_NcetcJbAQuklGcGkS9f7r1w,129
6
+ blake3/__init__.py,sha256=i5GXKa35g4Dt_hOK8OmCFGY-6xDtzmTAGlepSFv_0ns,107
7
+ blake3/__init__.pyi,sha256=Ngl-UCmwX3q3E9IWmQGCqbEQhfdkaoSVd1G1QaHtQNg,750
8
+ blake3/__pycache__/__init__.cpython-310.pyc,,
9
+ blake3/blake3.cpython-310-x86_64-linux-gnu.so,sha256=APPv5fSVQsJZc2WSbFdUqusI9vZ2C5NBntOzbtBczRM,965432
10
+ blake3/py.typed,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
vllm/lib/python3.10/site-packages/blake3-1.0.4.dist-info/REQUESTED ADDED
File without changes
vllm/lib/python3.10/site-packages/blake3-1.0.4.dist-info/WHEEL ADDED
@@ -0,0 +1,4 @@
 
 
 
 
 
1
+ Wheel-Version: 1.0
2
+ Generator: maturin (1.8.1)
3
+ Root-Is-Purelib: false
4
+ Tag: cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64
vllm/lib/python3.10/site-packages/einops_exts-0.0.4.dist-info/LICENSE ADDED
@@ -0,0 +1,21 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ MIT License
2
+
3
+ Copyright (c) 2022 Phil Wang
4
+
5
+ Permission is hereby granted, free of charge, to any person obtaining a copy
6
+ of this software and associated documentation files (the "Software"), to deal
7
+ in the Software without restriction, including without limitation the rights
8
+ to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
9
+ copies of the Software, and to permit persons to whom the Software is
10
+ furnished to do so, subject to the following conditions:
11
+
12
+ The above copyright notice and this permission notice shall be included in all
13
+ copies or substantial portions of the Software.
14
+
15
+ THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
16
+ IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
17
+ FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
18
+ AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
19
+ LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
20
+ OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
21
+ SOFTWARE.
vllm/lib/python3.10/site-packages/einops_exts-0.0.4.dist-info/METADATA ADDED
@@ -0,0 +1,18 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ Metadata-Version: 2.1
2
+ Name: einops-exts
3
+ Version: 0.0.4
4
+ Summary: Einops Extensions
5
+ Home-page: https://github.com/lucidrains/einops-exts
6
+ Author: Phil Wang
7
+ Author-email: lucidrains@gmail.com
8
+ License: MIT
9
+ Keywords: artificial intelligence,deep learning,tensor manipulation
10
+ Classifier: Development Status :: 4 - Beta
11
+ Classifier: Intended Audience :: Developers
12
+ Classifier: Topic :: Scientific/Engineering :: Artificial Intelligence
13
+ Classifier: License :: OSI Approved :: MIT License
14
+ Classifier: Programming Language :: Python :: 3.6
15
+ Description-Content-Type: text/markdown
16
+ License-File: LICENSE
17
+ Requires-Dist: einops (>=0.4)
18
+
vllm/lib/python3.10/site-packages/einops_exts-0.0.4.dist-info/RECORD ADDED
@@ -0,0 +1,13 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ einops_exts-0.0.4.dist-info/INSTALLER,sha256=zuuue4knoyJ-UwPPXg8fezS7VCrXJQrAP7zeNuwvFQg,4
2
+ einops_exts-0.0.4.dist-info/LICENSE,sha256=xZDkKtpHE2TPCAeqKe1fjdpKernl1YW-d01j_1ltkAU,1066
3
+ einops_exts-0.0.4.dist-info/METADATA,sha256=yIU5EfeQdzzh8Dc-Feg8_a6p4LVHj8J1OrDFWHbvOdw,621
4
+ einops_exts-0.0.4.dist-info/RECORD,,
5
+ einops_exts-0.0.4.dist-info/REQUESTED,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
6
+ einops_exts-0.0.4.dist-info/WHEEL,sha256=2wepM1nk4DS4eFpYrW1TTqPcoGNfHhhO_i5m4cOimbo,92
7
+ einops_exts-0.0.4.dist-info/top_level.txt,sha256=SckcduaUqHpfn7q_H49iPlKEPbKXTwKcCczc806qzes,12
8
+ einops_exts/__init__.py,sha256=FT0AocRvAC7bgRVinoglTY4uNjWZwfSfu9xZYHEwV4k,232
9
+ einops_exts/__pycache__/__init__.cpython-310.pyc,,
10
+ einops_exts/__pycache__/einops_exts.cpython-310.pyc,,
11
+ einops_exts/__pycache__/torch.cpython-310.pyc,,
12
+ einops_exts/einops_exts.py,sha256=HHeQbJgZcuA_04R9NpRRlBFASCb9xBNtgwlmsABcU7U,2131
13
+ einops_exts/torch.py,sha256=A0orev4xcv41qp3EmXiDjWFcXclNCs9kHkadqYiOpv8,1045
vllm/lib/python3.10/site-packages/einops_exts-0.0.4.dist-info/REQUESTED ADDED
File without changes
vllm/lib/python3.10/site-packages/einops_exts-0.0.4.dist-info/WHEEL ADDED
@@ -0,0 +1,5 @@
 
 
 
 
 
 
1
+ Wheel-Version: 1.0
2
+ Generator: bdist_wheel (0.38.4)
3
+ Root-Is-Purelib: true
4
+ Tag: py3-none-any
5
+
vllm/lib/python3.10/site-packages/einops_exts-0.0.4.dist-info/top_level.txt ADDED
@@ -0,0 +1 @@
 
 
1
+ einops_exts
vllm/lib/python3.10/site-packages/grpc/__pycache__/__init__.cpython-310.pyc ADDED
Binary file (80.4 kB). View file
 
vllm/lib/python3.10/site-packages/grpc/__pycache__/_auth.cpython-310.pyc ADDED
Binary file (2.27 kB). View file
 
vllm/lib/python3.10/site-packages/grpc/__pycache__/_channel.cpython-310.pyc ADDED
Binary file (54.9 kB). View file
 
vllm/lib/python3.10/site-packages/grpc/__pycache__/_common.cpython-310.pyc ADDED
Binary file (5.84 kB). View file
 
vllm/lib/python3.10/site-packages/grpc/__pycache__/_compression.cpython-310.pyc ADDED
Binary file (1.5 kB). View file
 
vllm/lib/python3.10/site-packages/grpc/__pycache__/_grpcio_metadata.cpython-310.pyc ADDED
Binary file (184 Bytes). View file
 
vllm/lib/python3.10/site-packages/grpc/__pycache__/_interceptor.cpython-310.pyc ADDED
Binary file (18.7 kB). View file
 
vllm/lib/python3.10/site-packages/grpc/__pycache__/_observability.cpython-310.pyc ADDED
Binary file (11 kB). View file
 
vllm/lib/python3.10/site-packages/grpc/__pycache__/_plugin_wrapping.cpython-310.pyc ADDED
Binary file (3.6 kB). View file
 
vllm/lib/python3.10/site-packages/grpc/__pycache__/_runtime_protos.cpython-310.pyc ADDED
Binary file (5.14 kB). View file
 
vllm/lib/python3.10/site-packages/grpc/__pycache__/_server.cpython-310.pyc ADDED
Binary file (38.9 kB). View file
 
vllm/lib/python3.10/site-packages/grpc/__pycache__/_simple_stubs.cpython-310.pyc ADDED
Binary file (18.7 kB). View file
 
vllm/lib/python3.10/site-packages/grpc/__pycache__/_typing.cpython-310.pyc ADDED
Binary file (1.5 kB). View file
 
vllm/lib/python3.10/site-packages/grpc/__pycache__/_utilities.cpython-310.pyc ADDED
Binary file (6.87 kB). View file
 
vllm/lib/python3.10/site-packages/grpc/_cython/__init__.py ADDED
@@ -0,0 +1,13 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ # Copyright 2015 gRPC authors.
2
+ #
3
+ # Licensed under the Apache License, Version 2.0 (the "License");
4
+ # you may not use this file except in compliance with the License.
5
+ # You may obtain a copy of the License at
6
+ #
7
+ # http://www.apache.org/licenses/LICENSE-2.0
8
+ #
9
+ # Unless required by applicable law or agreed to in writing, software
10
+ # distributed under the License is distributed on an "AS IS" BASIS,
11
+ # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
12
+ # See the License for the specific language governing permissions and
13
+ # limitations under the License.
vllm/lib/python3.10/site-packages/grpc/_cython/__pycache__/__init__.cpython-310.pyc ADDED
Binary file (162 Bytes). View file
 
vllm/lib/python3.10/site-packages/grpc/_cython/_credentials/roots.pem ADDED
The diff for this file is too large to render. See raw diff
 
vllm/lib/python3.10/site-packages/grpc/_cython/_cygrpc/__init__.py ADDED
@@ -0,0 +1,13 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ # Copyright 2015 gRPC authors.
2
+ #
3
+ # Licensed under the Apache License, Version 2.0 (the "License");
4
+ # you may not use this file except in compliance with the License.
5
+ # You may obtain a copy of the License at
6
+ #
7
+ # http://www.apache.org/licenses/LICENSE-2.0
8
+ #
9
+ # Unless required by applicable law or agreed to in writing, software
10
+ # distributed under the License is distributed on an "AS IS" BASIS,
11
+ # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
12
+ # See the License for the specific language governing permissions and
13
+ # limitations under the License.
vllm/lib/python3.10/site-packages/grpc/_cython/_cygrpc/__pycache__/__init__.cpython-310.pyc ADDED
Binary file (170 Bytes). View file
 
vllm/lib/python3.10/site-packages/grpc/aio/__init__.py ADDED
@@ -0,0 +1,95 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ # Copyright 2019 gRPC authors.
2
+ #
3
+ # Licensed under the Apache License, Version 2.0 (the "License");
4
+ # you may not use this file except in compliance with the License.
5
+ # You may obtain a copy of the License at
6
+ #
7
+ # http://www.apache.org/licenses/LICENSE-2.0
8
+ #
9
+ # Unless required by applicable law or agreed to in writing, software
10
+ # distributed under the License is distributed on an "AS IS" BASIS,
11
+ # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
12
+ # See the License for the specific language governing permissions and
13
+ # limitations under the License.
14
+ """gRPC's Asynchronous Python API.
15
+
16
+ gRPC Async API objects may only be used on the thread on which they were
17
+ created. AsyncIO doesn't provide thread safety for most of its APIs.
18
+ """
19
+
20
+ from typing import Any, Optional, Sequence, Tuple
21
+
22
+ import grpc
23
+ from grpc._cython.cygrpc import AbortError
24
+ from grpc._cython.cygrpc import BaseError
25
+ from grpc._cython.cygrpc import EOF
26
+ from grpc._cython.cygrpc import InternalError
27
+ from grpc._cython.cygrpc import UsageError
28
+ from grpc._cython.cygrpc import init_grpc_aio
29
+ from grpc._cython.cygrpc import shutdown_grpc_aio
30
+
31
+ from ._base_call import Call
32
+ from ._base_call import RpcContext
33
+ from ._base_call import StreamStreamCall
34
+ from ._base_call import StreamUnaryCall
35
+ from ._base_call import UnaryStreamCall
36
+ from ._base_call import UnaryUnaryCall
37
+ from ._base_channel import Channel
38
+ from ._base_channel import StreamStreamMultiCallable
39
+ from ._base_channel import StreamUnaryMultiCallable
40
+ from ._base_channel import UnaryStreamMultiCallable
41
+ from ._base_channel import UnaryUnaryMultiCallable
42
+ from ._base_server import Server
43
+ from ._base_server import ServicerContext
44
+ from ._call import AioRpcError
45
+ from ._channel import insecure_channel
46
+ from ._channel import secure_channel
47
+ from ._interceptor import ClientCallDetails
48
+ from ._interceptor import ClientInterceptor
49
+ from ._interceptor import InterceptedUnaryUnaryCall
50
+ from ._interceptor import ServerInterceptor
51
+ from ._interceptor import StreamStreamClientInterceptor
52
+ from ._interceptor import StreamUnaryClientInterceptor
53
+ from ._interceptor import UnaryStreamClientInterceptor
54
+ from ._interceptor import UnaryUnaryClientInterceptor
55
+ from ._metadata import Metadata
56
+ from ._server import server
57
+ from ._typing import ChannelArgumentType
58
+
59
+ ################################### __all__ #################################
60
+
61
+ __all__ = (
62
+ "init_grpc_aio",
63
+ "shutdown_grpc_aio",
64
+ "AioRpcError",
65
+ "RpcContext",
66
+ "Call",
67
+ "UnaryUnaryCall",
68
+ "UnaryStreamCall",
69
+ "StreamUnaryCall",
70
+ "StreamStreamCall",
71
+ "Channel",
72
+ "UnaryUnaryMultiCallable",
73
+ "UnaryStreamMultiCallable",
74
+ "StreamUnaryMultiCallable",
75
+ "StreamStreamMultiCallable",
76
+ "ClientCallDetails",
77
+ "ClientInterceptor",
78
+ "UnaryStreamClientInterceptor",
79
+ "UnaryUnaryClientInterceptor",
80
+ "StreamUnaryClientInterceptor",
81
+ "StreamStreamClientInterceptor",
82
+ "InterceptedUnaryUnaryCall",
83
+ "ServerInterceptor",
84
+ "insecure_channel",
85
+ "server",
86
+ "Server",
87
+ "ServicerContext",
88
+ "EOF",
89
+ "secure_channel",
90
+ "AbortError",
91
+ "BaseError",
92
+ "UsageError",
93
+ "InternalError",
94
+ "Metadata",
95
+ )
vllm/lib/python3.10/site-packages/grpc/aio/__pycache__/_call.cpython-310.pyc ADDED
Binary file (20.9 kB). View file
 
vllm/lib/python3.10/site-packages/grpc/aio/_base_server.py ADDED
@@ -0,0 +1,385 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ # Copyright 2020 The gRPC Authors
2
+ #
3
+ # Licensed under the Apache License, Version 2.0 (the "License");
4
+ # you may not use this file except in compliance with the License.
5
+ # You may obtain a copy of the License at
6
+ #
7
+ # http://www.apache.org/licenses/LICENSE-2.0
8
+ #
9
+ # Unless required by applicable law or agreed to in writing, software
10
+ # distributed under the License is distributed on an "AS IS" BASIS,
11
+ # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
12
+ # See the License for the specific language governing permissions and
13
+ # limitations under the License.
14
+ """Abstract base classes for server-side classes."""
15
+
16
+ import abc
17
+ from typing import Generic, Iterable, Mapping, NoReturn, Optional, Sequence
18
+
19
+ import grpc
20
+
21
+ from ._metadata import Metadata # pylint: disable=unused-import
22
+ from ._typing import DoneCallbackType
23
+ from ._typing import MetadataType
24
+ from ._typing import RequestType
25
+ from ._typing import ResponseType
26
+
27
+
28
+ class Server(abc.ABC):
29
+ """Serves RPCs."""
30
+
31
+ @abc.abstractmethod
32
+ def add_generic_rpc_handlers(
33
+ self, generic_rpc_handlers: Sequence[grpc.GenericRpcHandler]
34
+ ) -> None:
35
+ """Registers GenericRpcHandlers with this Server.
36
+
37
+ This method is only safe to call before the server is started.
38
+
39
+ Args:
40
+ generic_rpc_handlers: A sequence of GenericRpcHandlers that will be
41
+ used to service RPCs.
42
+ """
43
+
44
+ @abc.abstractmethod
45
+ def add_insecure_port(self, address: str) -> int:
46
+ """Opens an insecure port for accepting RPCs.
47
+
48
+ A port is a communication endpoint that used by networking protocols,
49
+ like TCP and UDP. To date, we only support TCP.
50
+
51
+ This method may only be called before starting the server.
52
+
53
+ Args:
54
+ address: The address for which to open a port. If the port is 0,
55
+ or not specified in the address, then the gRPC runtime will choose a port.
56
+
57
+ Returns:
58
+ An integer port on which the server will accept RPC requests.
59
+ """
60
+
61
+ @abc.abstractmethod
62
+ def add_secure_port(
63
+ self, address: str, server_credentials: grpc.ServerCredentials
64
+ ) -> int:
65
+ """Opens a secure port for accepting RPCs.
66
+
67
+ A port is a communication endpoint that used by networking protocols,
68
+ like TCP and UDP. To date, we only support TCP.
69
+
70
+ This method may only be called before starting the server.
71
+
72
+ Args:
73
+ address: The address for which to open a port.
74
+ if the port is 0, or not specified in the address, then the gRPC
75
+ runtime will choose a port.
76
+ server_credentials: A ServerCredentials object.
77
+
78
+ Returns:
79
+ An integer port on which the server will accept RPC requests.
80
+ """
81
+
82
+ @abc.abstractmethod
83
+ async def start(self) -> None:
84
+ """Starts this Server.
85
+
86
+ This method may only be called once. (i.e. it is not idempotent).
87
+ """
88
+
89
+ @abc.abstractmethod
90
+ async def stop(self, grace: Optional[float]) -> None:
91
+ """Stops this Server.
92
+
93
+ This method immediately stops the server from servicing new RPCs in
94
+ all cases.
95
+
96
+ If a grace period is specified, this method waits until all active
97
+ RPCs are finished or until the grace period is reached. RPCs that haven't
98
+ been terminated within the grace period are aborted.
99
+ If a grace period is not specified (by passing None for grace), all
100
+ existing RPCs are aborted immediately and this method blocks until
101
+ the last RPC handler terminates.
102
+
103
+ This method is idempotent and may be called at any time. Passing a
104
+ smaller grace value in a subsequent call will have the effect of
105
+ stopping the Server sooner (passing None will have the effect of
106
+ stopping the server immediately). Passing a larger grace value in a
107
+ subsequent call will not have the effect of stopping the server later
108
+ (i.e. the most restrictive grace value is used).
109
+
110
+ Args:
111
+ grace: A duration of time in seconds or None.
112
+ """
113
+
114
+ @abc.abstractmethod
115
+ async def wait_for_termination(
116
+ self, timeout: Optional[float] = None
117
+ ) -> bool:
118
+ """Continues current coroutine once the server stops.
119
+
120
+ This is an EXPERIMENTAL API.
121
+
122
+ The wait will not consume computational resources during blocking, and
123
+ it will block until one of the two following conditions are met:
124
+
125
+ 1) The server is stopped or terminated;
126
+ 2) A timeout occurs if timeout is not `None`.
127
+
128
+ The timeout argument works in the same way as `threading.Event.wait()`.
129
+ https://docs.python.org/3/library/threading.html#threading.Event.wait
130
+
131
+ Args:
132
+ timeout: A floating point number specifying a timeout for the
133
+ operation in seconds.
134
+
135
+ Returns:
136
+ A bool indicates if the operation times out.
137
+ """
138
+
139
+ def add_registered_method_handlers(self, service_name, method_handlers):
140
+ """Registers GenericRpcHandlers with this Server.
141
+
142
+ This method is only safe to call before the server is started.
143
+
144
+ Args:
145
+ service_name: The service name.
146
+ method_handlers: A dictionary that maps method names to corresponding
147
+ RpcMethodHandler.
148
+ """
149
+
150
+
151
+ # pylint: disable=too-many-public-methods
152
+ class ServicerContext(Generic[RequestType, ResponseType], abc.ABC):
153
+ """A context object passed to method implementations."""
154
+
155
+ @abc.abstractmethod
156
+ async def read(self) -> RequestType:
157
+ """Reads one message from the RPC.
158
+
159
+ Only one read operation is allowed simultaneously.
160
+
161
+ Returns:
162
+ A response message of the RPC.
163
+
164
+ Raises:
165
+ An RpcError exception if the read failed.
166
+ """
167
+
168
+ @abc.abstractmethod
169
+ async def write(self, message: ResponseType) -> None:
170
+ """Writes one message to the RPC.
171
+
172
+ Only one write operation is allowed simultaneously.
173
+
174
+ Raises:
175
+ An RpcError exception if the write failed.
176
+ """
177
+
178
+ @abc.abstractmethod
179
+ async def send_initial_metadata(
180
+ self, initial_metadata: MetadataType
181
+ ) -> None:
182
+ """Sends the initial metadata value to the client.
183
+
184
+ This method need not be called by implementations if they have no
185
+ metadata to add to what the gRPC runtime will transmit.
186
+
187
+ Args:
188
+ initial_metadata: The initial :term:`metadata`.
189
+ """
190
+
191
+ @abc.abstractmethod
192
+ async def abort(
193
+ self,
194
+ code: grpc.StatusCode,
195
+ details: str = "",
196
+ trailing_metadata: MetadataType = tuple(),
197
+ ) -> NoReturn:
198
+ """Raises an exception to terminate the RPC with a non-OK status.
199
+
200
+ The code and details passed as arguments will supersede any existing
201
+ ones.
202
+
203
+ Args:
204
+ code: A StatusCode object to be sent to the client.
205
+ It must not be StatusCode.OK.
206
+ details: A UTF-8-encodable string to be sent to the client upon
207
+ termination of the RPC.
208
+ trailing_metadata: A sequence of tuple represents the trailing
209
+ :term:`metadata`.
210
+
211
+ Raises:
212
+ Exception: An exception is always raised to signal the abortion the
213
+ RPC to the gRPC runtime.
214
+ """
215
+
216
+ @abc.abstractmethod
217
+ def set_trailing_metadata(self, trailing_metadata: MetadataType) -> None:
218
+ """Sends the trailing metadata for the RPC.
219
+
220
+ This method need not be called by implementations if they have no
221
+ metadata to add to what the gRPC runtime will transmit.
222
+
223
+ Args:
224
+ trailing_metadata: The trailing :term:`metadata`.
225
+ """
226
+
227
+ @abc.abstractmethod
228
+ def invocation_metadata(self) -> Optional[MetadataType]:
229
+ """Accesses the metadata sent by the client.
230
+
231
+ Returns:
232
+ The invocation :term:`metadata`.
233
+ """
234
+
235
+ @abc.abstractmethod
236
+ def set_code(self, code: grpc.StatusCode) -> None:
237
+ """Sets the value to be used as status code upon RPC completion.
238
+
239
+ This method need not be called by method implementations if they wish
240
+ the gRPC runtime to determine the status code of the RPC.
241
+
242
+ Args:
243
+ code: A StatusCode object to be sent to the client.
244
+ """
245
+
246
+ @abc.abstractmethod
247
+ def set_details(self, details: str) -> None:
248
+ """Sets the value to be used the as detail string upon RPC completion.
249
+
250
+ This method need not be called by method implementations if they have
251
+ no details to transmit.
252
+
253
+ Args:
254
+ details: A UTF-8-encodable string to be sent to the client upon
255
+ termination of the RPC.
256
+ """
257
+
258
+ @abc.abstractmethod
259
+ def set_compression(self, compression: grpc.Compression) -> None:
260
+ """Set the compression algorithm to be used for the entire call.
261
+
262
+ Args:
263
+ compression: An element of grpc.compression, e.g.
264
+ grpc.compression.Gzip.
265
+ """
266
+
267
+ @abc.abstractmethod
268
+ def disable_next_message_compression(self) -> None:
269
+ """Disables compression for the next response message.
270
+
271
+ This method will override any compression configuration set during
272
+ server creation or set on the call.
273
+ """
274
+
275
+ @abc.abstractmethod
276
+ def peer(self) -> str:
277
+ """Identifies the peer that invoked the RPC being serviced.
278
+
279
+ Returns:
280
+ A string identifying the peer that invoked the RPC being serviced.
281
+ The string format is determined by gRPC runtime.
282
+ """
283
+
284
+ @abc.abstractmethod
285
+ def peer_identities(self) -> Optional[Iterable[bytes]]:
286
+ """Gets one or more peer identity(s).
287
+
288
+ Equivalent to
289
+ servicer_context.auth_context().get(servicer_context.peer_identity_key())
290
+
291
+ Returns:
292
+ An iterable of the identities, or None if the call is not
293
+ authenticated. Each identity is returned as a raw bytes type.
294
+ """
295
+
296
+ @abc.abstractmethod
297
+ def peer_identity_key(self) -> Optional[str]:
298
+ """The auth property used to identify the peer.
299
+
300
+ For example, "x509_common_name" or "x509_subject_alternative_name" are
301
+ used to identify an SSL peer.
302
+
303
+ Returns:
304
+ The auth property (string) that indicates the
305
+ peer identity, or None if the call is not authenticated.
306
+ """
307
+
308
+ @abc.abstractmethod
309
+ def auth_context(self) -> Mapping[str, Iterable[bytes]]:
310
+ """Gets the auth context for the call.
311
+
312
+ Returns:
313
+ A map of strings to an iterable of bytes for each auth property.
314
+ """
315
+
316
+ def time_remaining(self) -> float:
317
+ """Describes the length of allowed time remaining for the RPC.
318
+
319
+ Returns:
320
+ A nonnegative float indicating the length of allowed time in seconds
321
+ remaining for the RPC to complete before it is considered to have
322
+ timed out, or None if no deadline was specified for the RPC.
323
+ """
324
+
325
+ def trailing_metadata(self):
326
+ """Access value to be used as trailing metadata upon RPC completion.
327
+
328
+ This is an EXPERIMENTAL API.
329
+
330
+ Returns:
331
+ The trailing :term:`metadata` for the RPC.
332
+ """
333
+ raise NotImplementedError()
334
+
335
+ def code(self):
336
+ """Accesses the value to be used as status code upon RPC completion.
337
+
338
+ This is an EXPERIMENTAL API.
339
+
340
+ Returns:
341
+ The StatusCode value for the RPC.
342
+ """
343
+ raise NotImplementedError()
344
+
345
+ def details(self):
346
+ """Accesses the value to be used as detail string upon RPC completion.
347
+
348
+ This is an EXPERIMENTAL API.
349
+
350
+ Returns:
351
+ The details string of the RPC.
352
+ """
353
+ raise NotImplementedError()
354
+
355
+ def add_done_callback(self, callback: DoneCallbackType) -> None:
356
+ """Registers a callback to be called on RPC termination.
357
+
358
+ This is an EXPERIMENTAL API.
359
+
360
+ Args:
361
+ callback: A callable object will be called with the servicer context
362
+ object as its only argument.
363
+ """
364
+
365
+ def cancelled(self) -> bool:
366
+ """Return True if the RPC is cancelled.
367
+
368
+ The RPC is cancelled when the cancellation was requested with cancel().
369
+
370
+ This is an EXPERIMENTAL API.
371
+
372
+ Returns:
373
+ A bool indicates whether the RPC is cancelled or not.
374
+ """
375
+
376
+ def done(self) -> bool:
377
+ """Return True if the RPC is done.
378
+
379
+ An RPC is done if the RPC is completed, cancelled or aborted.
380
+
381
+ This is an EXPERIMENTAL API.
382
+
383
+ Returns:
384
+ A bool indicates if the RPC is done.
385
+ """
vllm/lib/python3.10/site-packages/grpc/aio/_channel.py ADDED
@@ -0,0 +1,627 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ # Copyright 2019 gRPC authors.
2
+ #
3
+ # Licensed under the Apache License, Version 2.0 (the "License");
4
+ # you may not use this file except in compliance with the License.
5
+ # You may obtain a copy of the License at
6
+ #
7
+ # http://www.apache.org/licenses/LICENSE-2.0
8
+ #
9
+ # Unless required by applicable law or agreed to in writing, software
10
+ # distributed under the License is distributed on an "AS IS" BASIS,
11
+ # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
12
+ # See the License for the specific language governing permissions and
13
+ # limitations under the License.
14
+ """Invocation-side implementation of gRPC Asyncio Python."""
15
+
16
+ import asyncio
17
+ import sys
18
+ from typing import Any, Iterable, List, Optional, Sequence
19
+
20
+ import grpc
21
+ from grpc import _common
22
+ from grpc import _compression
23
+ from grpc import _grpcio_metadata
24
+ from grpc._cython import cygrpc
25
+
26
+ from . import _base_call
27
+ from . import _base_channel
28
+ from ._call import StreamStreamCall
29
+ from ._call import StreamUnaryCall
30
+ from ._call import UnaryStreamCall
31
+ from ._call import UnaryUnaryCall
32
+ from ._interceptor import ClientInterceptor
33
+ from ._interceptor import InterceptedStreamStreamCall
34
+ from ._interceptor import InterceptedStreamUnaryCall
35
+ from ._interceptor import InterceptedUnaryStreamCall
36
+ from ._interceptor import InterceptedUnaryUnaryCall
37
+ from ._interceptor import StreamStreamClientInterceptor
38
+ from ._interceptor import StreamUnaryClientInterceptor
39
+ from ._interceptor import UnaryStreamClientInterceptor
40
+ from ._interceptor import UnaryUnaryClientInterceptor
41
+ from ._metadata import Metadata
42
+ from ._typing import ChannelArgumentType
43
+ from ._typing import DeserializingFunction
44
+ from ._typing import MetadataType
45
+ from ._typing import RequestIterableType
46
+ from ._typing import RequestType
47
+ from ._typing import ResponseType
48
+ from ._typing import SerializingFunction
49
+ from ._utils import _timeout_to_deadline
50
+
51
+ _USER_AGENT = "grpc-python-asyncio/{}".format(_grpcio_metadata.__version__)
52
+
53
+ if sys.version_info[1] < 7:
54
+
55
+ def _all_tasks() -> Iterable[asyncio.Task]:
56
+ return asyncio.Task.all_tasks() # pylint: disable=no-member
57
+
58
+ else:
59
+
60
+ def _all_tasks() -> Iterable[asyncio.Task]:
61
+ return asyncio.all_tasks()
62
+
63
+
64
+ def _augment_channel_arguments(
65
+ base_options: ChannelArgumentType, compression: Optional[grpc.Compression]
66
+ ):
67
+ compression_channel_argument = _compression.create_channel_option(
68
+ compression
69
+ )
70
+ user_agent_channel_argument = (
71
+ (
72
+ cygrpc.ChannelArgKey.primary_user_agent_string,
73
+ _USER_AGENT,
74
+ ),
75
+ )
76
+ return (
77
+ tuple(base_options)
78
+ + compression_channel_argument
79
+ + user_agent_channel_argument
80
+ )
81
+
82
+
83
+ class _BaseMultiCallable:
84
+ """Base class of all multi callable objects.
85
+
86
+ Handles the initialization logic and stores common attributes.
87
+ """
88
+
89
+ _loop: asyncio.AbstractEventLoop
90
+ _channel: cygrpc.AioChannel
91
+ _method: bytes
92
+ _request_serializer: SerializingFunction
93
+ _response_deserializer: DeserializingFunction
94
+ _interceptors: Optional[Sequence[ClientInterceptor]]
95
+ _references: List[Any]
96
+ _loop: asyncio.AbstractEventLoop
97
+
98
+ # pylint: disable=too-many-arguments
99
+ def __init__(
100
+ self,
101
+ channel: cygrpc.AioChannel,
102
+ method: bytes,
103
+ request_serializer: SerializingFunction,
104
+ response_deserializer: DeserializingFunction,
105
+ interceptors: Optional[Sequence[ClientInterceptor]],
106
+ references: List[Any],
107
+ loop: asyncio.AbstractEventLoop,
108
+ ) -> None:
109
+ self._loop = loop
110
+ self._channel = channel
111
+ self._method = method
112
+ self._request_serializer = request_serializer
113
+ self._response_deserializer = response_deserializer
114
+ self._interceptors = interceptors
115
+ self._references = references
116
+
117
+ @staticmethod
118
+ def _init_metadata(
119
+ metadata: Optional[MetadataType] = None,
120
+ compression: Optional[grpc.Compression] = None,
121
+ ) -> Metadata:
122
+ """Based on the provided values for <metadata> or <compression> initialise the final
123
+ metadata, as it should be used for the current call.
124
+ """
125
+ metadata = metadata or Metadata()
126
+ if not isinstance(metadata, Metadata) and isinstance(metadata, tuple):
127
+ metadata = Metadata.from_tuple(metadata)
128
+ if compression:
129
+ metadata = Metadata(
130
+ *_compression.augment_metadata(metadata, compression)
131
+ )
132
+ return metadata
133
+
134
+
135
+ class UnaryUnaryMultiCallable(
136
+ _BaseMultiCallable, _base_channel.UnaryUnaryMultiCallable
137
+ ):
138
+ def __call__(
139
+ self,
140
+ request: RequestType,
141
+ *,
142
+ timeout: Optional[float] = None,
143
+ metadata: Optional[MetadataType] = None,
144
+ credentials: Optional[grpc.CallCredentials] = None,
145
+ wait_for_ready: Optional[bool] = None,
146
+ compression: Optional[grpc.Compression] = None,
147
+ ) -> _base_call.UnaryUnaryCall[RequestType, ResponseType]:
148
+ metadata = self._init_metadata(metadata, compression)
149
+ if not self._interceptors:
150
+ call = UnaryUnaryCall(
151
+ request,
152
+ _timeout_to_deadline(timeout),
153
+ metadata,
154
+ credentials,
155
+ wait_for_ready,
156
+ self._channel,
157
+ self._method,
158
+ self._request_serializer,
159
+ self._response_deserializer,
160
+ self._loop,
161
+ )
162
+ else:
163
+ call = InterceptedUnaryUnaryCall(
164
+ self._interceptors,
165
+ request,
166
+ timeout,
167
+ metadata,
168
+ credentials,
169
+ wait_for_ready,
170
+ self._channel,
171
+ self._method,
172
+ self._request_serializer,
173
+ self._response_deserializer,
174
+ self._loop,
175
+ )
176
+
177
+ return call
178
+
179
+
180
+ class UnaryStreamMultiCallable(
181
+ _BaseMultiCallable, _base_channel.UnaryStreamMultiCallable
182
+ ):
183
+ def __call__(
184
+ self,
185
+ request: RequestType,
186
+ *,
187
+ timeout: Optional[float] = None,
188
+ metadata: Optional[MetadataType] = None,
189
+ credentials: Optional[grpc.CallCredentials] = None,
190
+ wait_for_ready: Optional[bool] = None,
191
+ compression: Optional[grpc.Compression] = None,
192
+ ) -> _base_call.UnaryStreamCall[RequestType, ResponseType]:
193
+ metadata = self._init_metadata(metadata, compression)
194
+
195
+ if not self._interceptors:
196
+ call = UnaryStreamCall(
197
+ request,
198
+ _timeout_to_deadline(timeout),
199
+ metadata,
200
+ credentials,
201
+ wait_for_ready,
202
+ self._channel,
203
+ self._method,
204
+ self._request_serializer,
205
+ self._response_deserializer,
206
+ self._loop,
207
+ )
208
+ else:
209
+ call = InterceptedUnaryStreamCall(
210
+ self._interceptors,
211
+ request,
212
+ timeout,
213
+ metadata,
214
+ credentials,
215
+ wait_for_ready,
216
+ self._channel,
217
+ self._method,
218
+ self._request_serializer,
219
+ self._response_deserializer,
220
+ self._loop,
221
+ )
222
+
223
+ return call
224
+
225
+
226
+ class StreamUnaryMultiCallable(
227
+ _BaseMultiCallable, _base_channel.StreamUnaryMultiCallable
228
+ ):
229
+ def __call__(
230
+ self,
231
+ request_iterator: Optional[RequestIterableType] = None,
232
+ timeout: Optional[float] = None,
233
+ metadata: Optional[MetadataType] = None,
234
+ credentials: Optional[grpc.CallCredentials] = None,
235
+ wait_for_ready: Optional[bool] = None,
236
+ compression: Optional[grpc.Compression] = None,
237
+ ) -> _base_call.StreamUnaryCall:
238
+ metadata = self._init_metadata(metadata, compression)
239
+
240
+ if not self._interceptors:
241
+ call = StreamUnaryCall(
242
+ request_iterator,
243
+ _timeout_to_deadline(timeout),
244
+ metadata,
245
+ credentials,
246
+ wait_for_ready,
247
+ self._channel,
248
+ self._method,
249
+ self._request_serializer,
250
+ self._response_deserializer,
251
+ self._loop,
252
+ )
253
+ else:
254
+ call = InterceptedStreamUnaryCall(
255
+ self._interceptors,
256
+ request_iterator,
257
+ timeout,
258
+ metadata,
259
+ credentials,
260
+ wait_for_ready,
261
+ self._channel,
262
+ self._method,
263
+ self._request_serializer,
264
+ self._response_deserializer,
265
+ self._loop,
266
+ )
267
+
268
+ return call
269
+
270
+
271
+ class StreamStreamMultiCallable(
272
+ _BaseMultiCallable, _base_channel.StreamStreamMultiCallable
273
+ ):
274
+ def __call__(
275
+ self,
276
+ request_iterator: Optional[RequestIterableType] = None,
277
+ timeout: Optional[float] = None,
278
+ metadata: Optional[MetadataType] = None,
279
+ credentials: Optional[grpc.CallCredentials] = None,
280
+ wait_for_ready: Optional[bool] = None,
281
+ compression: Optional[grpc.Compression] = None,
282
+ ) -> _base_call.StreamStreamCall:
283
+ metadata = self._init_metadata(metadata, compression)
284
+
285
+ if not self._interceptors:
286
+ call = StreamStreamCall(
287
+ request_iterator,
288
+ _timeout_to_deadline(timeout),
289
+ metadata,
290
+ credentials,
291
+ wait_for_ready,
292
+ self._channel,
293
+ self._method,
294
+ self._request_serializer,
295
+ self._response_deserializer,
296
+ self._loop,
297
+ )
298
+ else:
299
+ call = InterceptedStreamStreamCall(
300
+ self._interceptors,
301
+ request_iterator,
302
+ timeout,
303
+ metadata,
304
+ credentials,
305
+ wait_for_ready,
306
+ self._channel,
307
+ self._method,
308
+ self._request_serializer,
309
+ self._response_deserializer,
310
+ self._loop,
311
+ )
312
+
313
+ return call
314
+
315
+
316
+ class Channel(_base_channel.Channel):
317
+ _loop: asyncio.AbstractEventLoop
318
+ _channel: cygrpc.AioChannel
319
+ _unary_unary_interceptors: List[UnaryUnaryClientInterceptor]
320
+ _unary_stream_interceptors: List[UnaryStreamClientInterceptor]
321
+ _stream_unary_interceptors: List[StreamUnaryClientInterceptor]
322
+ _stream_stream_interceptors: List[StreamStreamClientInterceptor]
323
+
324
+ def __init__(
325
+ self,
326
+ target: str,
327
+ options: ChannelArgumentType,
328
+ credentials: Optional[grpc.ChannelCredentials],
329
+ compression: Optional[grpc.Compression],
330
+ interceptors: Optional[Sequence[ClientInterceptor]],
331
+ ):
332
+ """Constructor.
333
+
334
+ Args:
335
+ target: The target to which to connect.
336
+ options: Configuration options for the channel.
337
+ credentials: A cygrpc.ChannelCredentials or None.
338
+ compression: An optional value indicating the compression method to be
339
+ used over the lifetime of the channel.
340
+ interceptors: An optional list of interceptors that would be used for
341
+ intercepting any RPC executed with that channel.
342
+ """
343
+ self._unary_unary_interceptors = []
344
+ self._unary_stream_interceptors = []
345
+ self._stream_unary_interceptors = []
346
+ self._stream_stream_interceptors = []
347
+
348
+ if interceptors is not None:
349
+ for interceptor in interceptors:
350
+ if isinstance(interceptor, UnaryUnaryClientInterceptor):
351
+ self._unary_unary_interceptors.append(interceptor)
352
+ elif isinstance(interceptor, UnaryStreamClientInterceptor):
353
+ self._unary_stream_interceptors.append(interceptor)
354
+ elif isinstance(interceptor, StreamUnaryClientInterceptor):
355
+ self._stream_unary_interceptors.append(interceptor)
356
+ elif isinstance(interceptor, StreamStreamClientInterceptor):
357
+ self._stream_stream_interceptors.append(interceptor)
358
+ else:
359
+ raise ValueError(
360
+ "Interceptor {} must be ".format(interceptor)
361
+ + "{} or ".format(UnaryUnaryClientInterceptor.__name__)
362
+ + "{} or ".format(UnaryStreamClientInterceptor.__name__)
363
+ + "{} or ".format(StreamUnaryClientInterceptor.__name__)
364
+ + "{}. ".format(StreamStreamClientInterceptor.__name__)
365
+ )
366
+
367
+ self._loop = cygrpc.get_working_loop()
368
+ self._channel = cygrpc.AioChannel(
369
+ _common.encode(target),
370
+ _augment_channel_arguments(options, compression),
371
+ credentials,
372
+ self._loop,
373
+ )
374
+
375
+ async def __aenter__(self):
376
+ return self
377
+
378
+ async def __aexit__(self, exc_type, exc_val, exc_tb):
379
+ await self._close(None)
380
+
381
+ async def _close(self, grace): # pylint: disable=too-many-branches
382
+ if self._channel.closed():
383
+ return
384
+
385
+ # No new calls will be accepted by the Cython channel.
386
+ self._channel.closing()
387
+
388
+ # Iterate through running tasks
389
+ tasks = _all_tasks()
390
+ calls = []
391
+ call_tasks = []
392
+ for task in tasks:
393
+ try:
394
+ stack = task.get_stack(limit=1)
395
+ except AttributeError as attribute_error:
396
+ # NOTE(lidiz) tl;dr: If the Task is created with a CPython
397
+ # object, it will trigger AttributeError.
398
+ #
399
+ # In the global finalizer, the event loop schedules
400
+ # a CPython PyAsyncGenAThrow object.
401
+ # https://github.com/python/cpython/blob/00e45877e33d32bb61aa13a2033e3bba370bda4d/Lib/asyncio/base_events.py#L484
402
+ #
403
+ # However, the PyAsyncGenAThrow object is written in C and
404
+ # failed to include the normal Python frame objects. Hence,
405
+ # this exception is a false negative, and it is safe to ignore
406
+ # the failure. It is fixed by https://github.com/python/cpython/pull/18669,
407
+ # but not available until 3.9 or 3.8.3. So, we have to keep it
408
+ # for a while.
409
+ # TODO(lidiz) drop this hack after 3.8 deprecation
410
+ if "frame" in str(attribute_error):
411
+ continue
412
+ else:
413
+ raise
414
+
415
+ # If the Task is created by a C-extension, the stack will be empty.
416
+ if not stack:
417
+ continue
418
+
419
+ # Locate ones created by `aio.Call`.
420
+ frame = stack[0]
421
+ candidate = frame.f_locals.get("self")
422
+ # Explicitly check for a non-null candidate instead of the more pythonic 'if candidate:'
423
+ # because doing 'if candidate:' assumes that the coroutine implements '__bool__' which
424
+ # might not always be the case.
425
+ if candidate is not None:
426
+ if isinstance(candidate, _base_call.Call):
427
+ if hasattr(candidate, "_channel"):
428
+ # For intercepted Call object
429
+ if candidate._channel is not self._channel:
430
+ continue
431
+ elif hasattr(candidate, "_cython_call"):
432
+ # For normal Call object
433
+ if candidate._cython_call._channel is not self._channel:
434
+ continue
435
+ else:
436
+ # Unidentified Call object
437
+ raise cygrpc.InternalError(
438
+ f"Unrecognized call object: {candidate}"
439
+ )
440
+
441
+ calls.append(candidate)
442
+ call_tasks.append(task)
443
+
444
+ # If needed, try to wait for them to finish.
445
+ # Call objects are not always awaitables.
446
+ if grace and call_tasks:
447
+ await asyncio.wait(call_tasks, timeout=grace)
448
+
449
+ # Time to cancel existing calls.
450
+ for call in calls:
451
+ call.cancel()
452
+
453
+ # Destroy the channel
454
+ self._channel.close()
455
+
456
+ async def close(self, grace: Optional[float] = None):
457
+ await self._close(grace)
458
+
459
+ def __del__(self):
460
+ if hasattr(self, "_channel"):
461
+ if not self._channel.closed():
462
+ self._channel.close()
463
+
464
+ def get_state(
465
+ self, try_to_connect: bool = False
466
+ ) -> grpc.ChannelConnectivity:
467
+ result = self._channel.check_connectivity_state(try_to_connect)
468
+ return _common.CYGRPC_CONNECTIVITY_STATE_TO_CHANNEL_CONNECTIVITY[result]
469
+
470
+ async def wait_for_state_change(
471
+ self,
472
+ last_observed_state: grpc.ChannelConnectivity,
473
+ ) -> None:
474
+ assert await self._channel.watch_connectivity_state(
475
+ last_observed_state.value[0], None
476
+ )
477
+
478
+ async def channel_ready(self) -> None:
479
+ state = self.get_state(try_to_connect=True)
480
+ while state != grpc.ChannelConnectivity.READY:
481
+ await self.wait_for_state_change(state)
482
+ state = self.get_state(try_to_connect=True)
483
+
484
+ # TODO(xuanwn): Implement this method after we have
485
+ # observability for Asyncio.
486
+ def _get_registered_call_handle(self, method: str) -> int:
487
+ pass
488
+
489
+ # TODO(xuanwn): Implement _registered_method after we have
490
+ # observability for Asyncio.
491
+ # pylint: disable=arguments-differ,unused-argument
492
+ def unary_unary(
493
+ self,
494
+ method: str,
495
+ request_serializer: Optional[SerializingFunction] = None,
496
+ response_deserializer: Optional[DeserializingFunction] = None,
497
+ _registered_method: Optional[bool] = False,
498
+ ) -> UnaryUnaryMultiCallable:
499
+ return UnaryUnaryMultiCallable(
500
+ self._channel,
501
+ _common.encode(method),
502
+ request_serializer,
503
+ response_deserializer,
504
+ self._unary_unary_interceptors,
505
+ [self],
506
+ self._loop,
507
+ )
508
+
509
+ # TODO(xuanwn): Implement _registered_method after we have
510
+ # observability for Asyncio.
511
+ # pylint: disable=arguments-differ,unused-argument
512
+ def unary_stream(
513
+ self,
514
+ method: str,
515
+ request_serializer: Optional[SerializingFunction] = None,
516
+ response_deserializer: Optional[DeserializingFunction] = None,
517
+ _registered_method: Optional[bool] = False,
518
+ ) -> UnaryStreamMultiCallable:
519
+ return UnaryStreamMultiCallable(
520
+ self._channel,
521
+ _common.encode(method),
522
+ request_serializer,
523
+ response_deserializer,
524
+ self._unary_stream_interceptors,
525
+ [self],
526
+ self._loop,
527
+ )
528
+
529
+ # TODO(xuanwn): Implement _registered_method after we have
530
+ # observability for Asyncio.
531
+ # pylint: disable=arguments-differ,unused-argument
532
+ def stream_unary(
533
+ self,
534
+ method: str,
535
+ request_serializer: Optional[SerializingFunction] = None,
536
+ response_deserializer: Optional[DeserializingFunction] = None,
537
+ _registered_method: Optional[bool] = False,
538
+ ) -> StreamUnaryMultiCallable:
539
+ return StreamUnaryMultiCallable(
540
+ self._channel,
541
+ _common.encode(method),
542
+ request_serializer,
543
+ response_deserializer,
544
+ self._stream_unary_interceptors,
545
+ [self],
546
+ self._loop,
547
+ )
548
+
549
+ # TODO(xuanwn): Implement _registered_method after we have
550
+ # observability for Asyncio.
551
+ # pylint: disable=arguments-differ,unused-argument
552
+ def stream_stream(
553
+ self,
554
+ method: str,
555
+ request_serializer: Optional[SerializingFunction] = None,
556
+ response_deserializer: Optional[DeserializingFunction] = None,
557
+ _registered_method: Optional[bool] = False,
558
+ ) -> StreamStreamMultiCallable:
559
+ return StreamStreamMultiCallable(
560
+ self._channel,
561
+ _common.encode(method),
562
+ request_serializer,
563
+ response_deserializer,
564
+ self._stream_stream_interceptors,
565
+ [self],
566
+ self._loop,
567
+ )
568
+
569
+
570
+ def insecure_channel(
571
+ target: str,
572
+ options: Optional[ChannelArgumentType] = None,
573
+ compression: Optional[grpc.Compression] = None,
574
+ interceptors: Optional[Sequence[ClientInterceptor]] = None,
575
+ ):
576
+ """Creates an insecure asynchronous Channel to a server.
577
+
578
+ Args:
579
+ target: The server address
580
+ options: An optional list of key-value pairs (:term:`channel_arguments`
581
+ in gRPC Core runtime) to configure the channel.
582
+ compression: An optional value indicating the compression method to be
583
+ used over the lifetime of the channel.
584
+ interceptors: An optional sequence of interceptors that will be executed for
585
+ any call executed with this channel.
586
+
587
+ Returns:
588
+ A Channel.
589
+ """
590
+ return Channel(
591
+ target,
592
+ () if options is None else options,
593
+ None,
594
+ compression,
595
+ interceptors,
596
+ )
597
+
598
+
599
+ def secure_channel(
600
+ target: str,
601
+ credentials: grpc.ChannelCredentials,
602
+ options: Optional[ChannelArgumentType] = None,
603
+ compression: Optional[grpc.Compression] = None,
604
+ interceptors: Optional[Sequence[ClientInterceptor]] = None,
605
+ ):
606
+ """Creates a secure asynchronous Channel to a server.
607
+
608
+ Args:
609
+ target: The server address.
610
+ credentials: A ChannelCredentials instance.
611
+ options: An optional list of key-value pairs (:term:`channel_arguments`
612
+ in gRPC Core runtime) to configure the channel.
613
+ compression: An optional value indicating the compression method to be
614
+ used over the lifetime of the channel.
615
+ interceptors: An optional sequence of interceptors that will be executed for
616
+ any call executed with this channel.
617
+
618
+ Returns:
619
+ An aio.Channel.
620
+ """
621
+ return Channel(
622
+ target,
623
+ () if options is None else options,
624
+ credentials._credentials,
625
+ compression,
626
+ interceptors,
627
+ )
vllm/lib/python3.10/site-packages/grpc/aio/_interceptor.py ADDED
@@ -0,0 +1,1178 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ # Copyright 2019 gRPC authors.
2
+ #
3
+ # Licensed under the Apache License, Version 2.0 (the "License");
4
+ # you may not use this file except in compliance with the License.
5
+ # You may obtain a copy of the License at
6
+ #
7
+ # http://www.apache.org/licenses/LICENSE-2.0
8
+ #
9
+ # Unless required by applicable law or agreed to in writing, software
10
+ # distributed under the License is distributed on an "AS IS" BASIS,
11
+ # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
12
+ # See the License for the specific language governing permissions and
13
+ # limitations under the License.
14
+ """Interceptors implementation of gRPC Asyncio Python."""
15
+ from abc import ABCMeta
16
+ from abc import abstractmethod
17
+ import asyncio
18
+ import collections
19
+ import functools
20
+ from typing import (
21
+ AsyncIterable,
22
+ Awaitable,
23
+ Callable,
24
+ Iterator,
25
+ List,
26
+ Optional,
27
+ Sequence,
28
+ Union,
29
+ )
30
+
31
+ import grpc
32
+ from grpc._cython import cygrpc
33
+
34
+ from . import _base_call
35
+ from ._call import AioRpcError
36
+ from ._call import StreamStreamCall
37
+ from ._call import StreamUnaryCall
38
+ from ._call import UnaryStreamCall
39
+ from ._call import UnaryUnaryCall
40
+ from ._call import _API_STYLE_ERROR
41
+ from ._call import _RPC_ALREADY_FINISHED_DETAILS
42
+ from ._call import _RPC_HALF_CLOSED_DETAILS
43
+ from ._metadata import Metadata
44
+ from ._typing import DeserializingFunction
45
+ from ._typing import DoneCallbackType
46
+ from ._typing import EOFType
47
+ from ._typing import RequestIterableType
48
+ from ._typing import RequestType
49
+ from ._typing import ResponseIterableType
50
+ from ._typing import ResponseType
51
+ from ._typing import SerializingFunction
52
+ from ._utils import _timeout_to_deadline
53
+
54
+ _LOCAL_CANCELLATION_DETAILS = "Locally cancelled by application!"
55
+
56
+
57
+ class ServerInterceptor(metaclass=ABCMeta):
58
+ """Affords intercepting incoming RPCs on the service-side.
59
+
60
+ This is an EXPERIMENTAL API.
61
+ """
62
+
63
+ @abstractmethod
64
+ async def intercept_service(
65
+ self,
66
+ continuation: Callable[
67
+ [grpc.HandlerCallDetails], Awaitable[grpc.RpcMethodHandler]
68
+ ],
69
+ handler_call_details: grpc.HandlerCallDetails,
70
+ ) -> grpc.RpcMethodHandler:
71
+ """Intercepts incoming RPCs before handing them over to a handler.
72
+
73
+ State can be passed from an interceptor to downstream interceptors
74
+ via contextvars. The first interceptor is called from an empty
75
+ contextvars.Context, and the same Context is used for downstream
76
+ interceptors and for the final handler call. Note that there are no
77
+ guarantees that interceptors and handlers will be called from the
78
+ same thread.
79
+
80
+ Args:
81
+ continuation: A function that takes a HandlerCallDetails and
82
+ proceeds to invoke the next interceptor in the chain, if any,
83
+ or the RPC handler lookup logic, with the call details passed
84
+ as an argument, and returns an RpcMethodHandler instance if
85
+ the RPC is considered serviced, or None otherwise.
86
+ handler_call_details: A HandlerCallDetails describing the RPC.
87
+
88
+ Returns:
89
+ An RpcMethodHandler with which the RPC may be serviced if the
90
+ interceptor chooses to service this RPC, or None otherwise.
91
+ """
92
+
93
+
94
+ class ClientCallDetails(
95
+ collections.namedtuple(
96
+ "ClientCallDetails",
97
+ ("method", "timeout", "metadata", "credentials", "wait_for_ready"),
98
+ ),
99
+ grpc.ClientCallDetails,
100
+ ):
101
+ """Describes an RPC to be invoked.
102
+
103
+ This is an EXPERIMENTAL API.
104
+
105
+ Args:
106
+ method: The method name of the RPC.
107
+ timeout: An optional duration of time in seconds to allow for the RPC.
108
+ metadata: Optional metadata to be transmitted to the service-side of
109
+ the RPC.
110
+ credentials: An optional CallCredentials for the RPC.
111
+ wait_for_ready: An optional flag to enable :term:`wait_for_ready` mechanism.
112
+ """
113
+
114
+ method: str
115
+ timeout: Optional[float]
116
+ metadata: Optional[Metadata]
117
+ credentials: Optional[grpc.CallCredentials]
118
+ wait_for_ready: Optional[bool]
119
+
120
+
121
+ class ClientInterceptor(metaclass=ABCMeta):
122
+ """Base class used for all Aio Client Interceptor classes"""
123
+
124
+
125
+ class UnaryUnaryClientInterceptor(ClientInterceptor, metaclass=ABCMeta):
126
+ """Affords intercepting unary-unary invocations."""
127
+
128
+ @abstractmethod
129
+ async def intercept_unary_unary(
130
+ self,
131
+ continuation: Callable[
132
+ [ClientCallDetails, RequestType], UnaryUnaryCall
133
+ ],
134
+ client_call_details: ClientCallDetails,
135
+ request: RequestType,
136
+ ) -> Union[UnaryUnaryCall, ResponseType]:
137
+ """Intercepts a unary-unary invocation asynchronously.
138
+
139
+ Args:
140
+ continuation: A coroutine that proceeds with the invocation by
141
+ executing the next interceptor in the chain or invoking the
142
+ actual RPC on the underlying Channel. It is the interceptor's
143
+ responsibility to call it if it decides to move the RPC forward.
144
+ The interceptor can use
145
+ `call = await continuation(client_call_details, request)`
146
+ to continue with the RPC. `continuation` returns the call to the
147
+ RPC.
148
+ client_call_details: A ClientCallDetails object describing the
149
+ outgoing RPC.
150
+ request: The request value for the RPC.
151
+
152
+ Returns:
153
+ An object with the RPC response.
154
+
155
+ Raises:
156
+ AioRpcError: Indicating that the RPC terminated with non-OK status.
157
+ asyncio.CancelledError: Indicating that the RPC was canceled.
158
+ """
159
+
160
+
161
+ class UnaryStreamClientInterceptor(ClientInterceptor, metaclass=ABCMeta):
162
+ """Affords intercepting unary-stream invocations."""
163
+
164
+ @abstractmethod
165
+ async def intercept_unary_stream(
166
+ self,
167
+ continuation: Callable[
168
+ [ClientCallDetails, RequestType], UnaryStreamCall
169
+ ],
170
+ client_call_details: ClientCallDetails,
171
+ request: RequestType,
172
+ ) -> Union[ResponseIterableType, UnaryStreamCall]:
173
+ """Intercepts a unary-stream invocation asynchronously.
174
+
175
+ The function could return the call object or an asynchronous
176
+ iterator, in case of being an asyncrhonous iterator this will
177
+ become the source of the reads done by the caller.
178
+
179
+ Args:
180
+ continuation: A coroutine that proceeds with the invocation by
181
+ executing the next interceptor in the chain or invoking the
182
+ actual RPC on the underlying Channel. It is the interceptor's
183
+ responsibility to call it if it decides to move the RPC forward.
184
+ The interceptor can use
185
+ `call = await continuation(client_call_details, request)`
186
+ to continue with the RPC. `continuation` returns the call to the
187
+ RPC.
188
+ client_call_details: A ClientCallDetails object describing the
189
+ outgoing RPC.
190
+ request: The request value for the RPC.
191
+
192
+ Returns:
193
+ The RPC Call or an asynchronous iterator.
194
+
195
+ Raises:
196
+ AioRpcError: Indicating that the RPC terminated with non-OK status.
197
+ asyncio.CancelledError: Indicating that the RPC was canceled.
198
+ """
199
+
200
+
201
+ class StreamUnaryClientInterceptor(ClientInterceptor, metaclass=ABCMeta):
202
+ """Affords intercepting stream-unary invocations."""
203
+
204
+ @abstractmethod
205
+ async def intercept_stream_unary(
206
+ self,
207
+ continuation: Callable[
208
+ [ClientCallDetails, RequestType], StreamUnaryCall
209
+ ],
210
+ client_call_details: ClientCallDetails,
211
+ request_iterator: RequestIterableType,
212
+ ) -> StreamUnaryCall:
213
+ """Intercepts a stream-unary invocation asynchronously.
214
+
215
+ Within the interceptor the usage of the call methods like `write` or
216
+ even awaiting the call should be done carefully, since the caller
217
+ could be expecting an untouched call, for example for start writing
218
+ messages to it.
219
+
220
+ Args:
221
+ continuation: A coroutine that proceeds with the invocation by
222
+ executing the next interceptor in the chain or invoking the
223
+ actual RPC on the underlying Channel. It is the interceptor's
224
+ responsibility to call it if it decides to move the RPC forward.
225
+ The interceptor can use
226
+ `call = await continuation(client_call_details, request_iterator)`
227
+ to continue with the RPC. `continuation` returns the call to the
228
+ RPC.
229
+ client_call_details: A ClientCallDetails object describing the
230
+ outgoing RPC.
231
+ request_iterator: The request iterator that will produce requests
232
+ for the RPC.
233
+
234
+ Returns:
235
+ The RPC Call.
236
+
237
+ Raises:
238
+ AioRpcError: Indicating that the RPC terminated with non-OK status.
239
+ asyncio.CancelledError: Indicating that the RPC was canceled.
240
+ """
241
+
242
+
243
+ class StreamStreamClientInterceptor(ClientInterceptor, metaclass=ABCMeta):
244
+ """Affords intercepting stream-stream invocations."""
245
+
246
+ @abstractmethod
247
+ async def intercept_stream_stream(
248
+ self,
249
+ continuation: Callable[
250
+ [ClientCallDetails, RequestType], StreamStreamCall
251
+ ],
252
+ client_call_details: ClientCallDetails,
253
+ request_iterator: RequestIterableType,
254
+ ) -> Union[ResponseIterableType, StreamStreamCall]:
255
+ """Intercepts a stream-stream invocation asynchronously.
256
+
257
+ Within the interceptor the usage of the call methods like `write` or
258
+ even awaiting the call should be done carefully, since the caller
259
+ could be expecting an untouched call, for example for start writing
260
+ messages to it.
261
+
262
+ The function could return the call object or an asynchronous
263
+ iterator, in case of being an asyncrhonous iterator this will
264
+ become the source of the reads done by the caller.
265
+
266
+ Args:
267
+ continuation: A coroutine that proceeds with the invocation by
268
+ executing the next interceptor in the chain or invoking the
269
+ actual RPC on the underlying Channel. It is the interceptor's
270
+ responsibility to call it if it decides to move the RPC forward.
271
+ The interceptor can use
272
+ `call = await continuation(client_call_details, request_iterator)`
273
+ to continue with the RPC. `continuation` returns the call to the
274
+ RPC.
275
+ client_call_details: A ClientCallDetails object describing the
276
+ outgoing RPC.
277
+ request_iterator: The request iterator that will produce requests
278
+ for the RPC.
279
+
280
+ Returns:
281
+ The RPC Call or an asynchronous iterator.
282
+
283
+ Raises:
284
+ AioRpcError: Indicating that the RPC terminated with non-OK status.
285
+ asyncio.CancelledError: Indicating that the RPC was canceled.
286
+ """
287
+
288
+
289
+ class InterceptedCall:
290
+ """Base implementation for all intercepted call arities.
291
+
292
+ Interceptors might have some work to do before the RPC invocation with
293
+ the capacity of changing the invocation parameters, and some work to do
294
+ after the RPC invocation with the capacity for accessing to the wrapped
295
+ `UnaryUnaryCall`.
296
+
297
+ It handles also early and later cancellations, when the RPC has not even
298
+ started and the execution is still held by the interceptors or when the
299
+ RPC has finished but again the execution is still held by the interceptors.
300
+
301
+ Once the RPC is finally executed, all methods are finally done against the
302
+ intercepted call, being at the same time the same call returned to the
303
+ interceptors.
304
+
305
+ As a base class for all of the interceptors implements the logic around
306
+ final status, metadata and cancellation.
307
+ """
308
+
309
+ _interceptors_task: asyncio.Task
310
+ _pending_add_done_callbacks: Sequence[DoneCallbackType]
311
+
312
+ def __init__(self, interceptors_task: asyncio.Task) -> None:
313
+ self._interceptors_task = interceptors_task
314
+ self._pending_add_done_callbacks = []
315
+ self._interceptors_task.add_done_callback(
316
+ self._fire_or_add_pending_done_callbacks
317
+ )
318
+
319
+ def __del__(self):
320
+ self.cancel()
321
+
322
+ def _fire_or_add_pending_done_callbacks(
323
+ self, interceptors_task: asyncio.Task
324
+ ) -> None:
325
+ if not self._pending_add_done_callbacks:
326
+ return
327
+
328
+ call_completed = False
329
+
330
+ try:
331
+ call = interceptors_task.result()
332
+ if call.done():
333
+ call_completed = True
334
+ except (AioRpcError, asyncio.CancelledError):
335
+ call_completed = True
336
+
337
+ if call_completed:
338
+ for callback in self._pending_add_done_callbacks:
339
+ callback(self)
340
+ else:
341
+ for callback in self._pending_add_done_callbacks:
342
+ callback = functools.partial(
343
+ self._wrap_add_done_callback, callback
344
+ )
345
+ call.add_done_callback(callback)
346
+
347
+ self._pending_add_done_callbacks = []
348
+
349
+ def _wrap_add_done_callback(
350
+ self, callback: DoneCallbackType, unused_call: _base_call.Call
351
+ ) -> None:
352
+ callback(self)
353
+
354
+ def cancel(self) -> bool:
355
+ if not self._interceptors_task.done():
356
+ # There is no yet the intercepted call available,
357
+ # Trying to cancel it by using the generic Asyncio
358
+ # cancellation method.
359
+ return self._interceptors_task.cancel()
360
+
361
+ try:
362
+ call = self._interceptors_task.result()
363
+ except AioRpcError:
364
+ return False
365
+ except asyncio.CancelledError:
366
+ return False
367
+
368
+ return call.cancel()
369
+
370
+ def cancelled(self) -> bool:
371
+ if not self._interceptors_task.done():
372
+ return False
373
+
374
+ try:
375
+ call = self._interceptors_task.result()
376
+ except AioRpcError as err:
377
+ return err.code() == grpc.StatusCode.CANCELLED
378
+ except asyncio.CancelledError:
379
+ return True
380
+
381
+ return call.cancelled()
382
+
383
+ def done(self) -> bool:
384
+ if not self._interceptors_task.done():
385
+ return False
386
+
387
+ try:
388
+ call = self._interceptors_task.result()
389
+ except (AioRpcError, asyncio.CancelledError):
390
+ return True
391
+
392
+ return call.done()
393
+
394
+ def add_done_callback(self, callback: DoneCallbackType) -> None:
395
+ if not self._interceptors_task.done():
396
+ self._pending_add_done_callbacks.append(callback)
397
+ return
398
+
399
+ try:
400
+ call = self._interceptors_task.result()
401
+ except (AioRpcError, asyncio.CancelledError):
402
+ callback(self)
403
+ return
404
+
405
+ if call.done():
406
+ callback(self)
407
+ else:
408
+ callback = functools.partial(self._wrap_add_done_callback, callback)
409
+ call.add_done_callback(callback)
410
+
411
+ def time_remaining(self) -> Optional[float]:
412
+ raise NotImplementedError()
413
+
414
+ async def initial_metadata(self) -> Optional[Metadata]:
415
+ try:
416
+ call = await self._interceptors_task
417
+ except AioRpcError as err:
418
+ return err.initial_metadata()
419
+ except asyncio.CancelledError:
420
+ return None
421
+
422
+ return await call.initial_metadata()
423
+
424
+ async def trailing_metadata(self) -> Optional[Metadata]:
425
+ try:
426
+ call = await self._interceptors_task
427
+ except AioRpcError as err:
428
+ return err.trailing_metadata()
429
+ except asyncio.CancelledError:
430
+ return None
431
+
432
+ return await call.trailing_metadata()
433
+
434
+ async def code(self) -> grpc.StatusCode:
435
+ try:
436
+ call = await self._interceptors_task
437
+ except AioRpcError as err:
438
+ return err.code()
439
+ except asyncio.CancelledError:
440
+ return grpc.StatusCode.CANCELLED
441
+
442
+ return await call.code()
443
+
444
+ async def details(self) -> str:
445
+ try:
446
+ call = await self._interceptors_task
447
+ except AioRpcError as err:
448
+ return err.details()
449
+ except asyncio.CancelledError:
450
+ return _LOCAL_CANCELLATION_DETAILS
451
+
452
+ return await call.details()
453
+
454
+ async def debug_error_string(self) -> Optional[str]:
455
+ try:
456
+ call = await self._interceptors_task
457
+ except AioRpcError as err:
458
+ return err.debug_error_string()
459
+ except asyncio.CancelledError:
460
+ return ""
461
+
462
+ return await call.debug_error_string()
463
+
464
+ async def wait_for_connection(self) -> None:
465
+ call = await self._interceptors_task
466
+ return await call.wait_for_connection()
467
+
468
+
469
+ class _InterceptedUnaryResponseMixin:
470
+ def __await__(self):
471
+ call = yield from self._interceptors_task.__await__()
472
+ response = yield from call.__await__()
473
+ return response
474
+
475
+
476
+ class _InterceptedStreamResponseMixin:
477
+ _response_aiter: Optional[AsyncIterable[ResponseType]]
478
+
479
+ def _init_stream_response_mixin(self) -> None:
480
+ # Is initialized later, otherwise if the iterator is not finally
481
+ # consumed a logging warning is emitted by Asyncio.
482
+ self._response_aiter = None
483
+
484
+ async def _wait_for_interceptor_task_response_iterator(
485
+ self,
486
+ ) -> ResponseType:
487
+ call = await self._interceptors_task
488
+ async for response in call:
489
+ yield response
490
+
491
+ def __aiter__(self) -> AsyncIterable[ResponseType]:
492
+ if self._response_aiter is None:
493
+ self._response_aiter = (
494
+ self._wait_for_interceptor_task_response_iterator()
495
+ )
496
+ return self._response_aiter
497
+
498
+ async def read(self) -> Union[EOFType, ResponseType]:
499
+ if self._response_aiter is None:
500
+ self._response_aiter = (
501
+ self._wait_for_interceptor_task_response_iterator()
502
+ )
503
+ try:
504
+ return await self._response_aiter.asend(None)
505
+ except StopAsyncIteration:
506
+ return cygrpc.EOF
507
+
508
+
509
+ class _InterceptedStreamRequestMixin:
510
+ _write_to_iterator_async_gen: Optional[AsyncIterable[RequestType]]
511
+ _write_to_iterator_queue: Optional[asyncio.Queue]
512
+ _status_code_task: Optional[asyncio.Task]
513
+
514
+ _FINISH_ITERATOR_SENTINEL = object()
515
+
516
+ def _init_stream_request_mixin(
517
+ self, request_iterator: Optional[RequestIterableType]
518
+ ) -> RequestIterableType:
519
+ if request_iterator is None:
520
+ # We provide our own request iterator which is a proxy
521
+ # of the futures writes that will be done by the caller.
522
+ self._write_to_iterator_queue = asyncio.Queue(maxsize=1)
523
+ self._write_to_iterator_async_gen = (
524
+ self._proxy_writes_as_request_iterator()
525
+ )
526
+ self._status_code_task = None
527
+ request_iterator = self._write_to_iterator_async_gen
528
+ else:
529
+ self._write_to_iterator_queue = None
530
+
531
+ return request_iterator
532
+
533
+ async def _proxy_writes_as_request_iterator(self):
534
+ await self._interceptors_task
535
+
536
+ while True:
537
+ value = await self._write_to_iterator_queue.get()
538
+ if (
539
+ value
540
+ is _InterceptedStreamRequestMixin._FINISH_ITERATOR_SENTINEL
541
+ ):
542
+ break
543
+ yield value
544
+
545
+ async def _write_to_iterator_queue_interruptible(
546
+ self, request: RequestType, call: InterceptedCall
547
+ ):
548
+ # Write the specified 'request' to the request iterator queue using the
549
+ # specified 'call' to allow for interruption of the write in the case
550
+ # of abrupt termination of the call.
551
+ if self._status_code_task is None:
552
+ self._status_code_task = self._loop.create_task(call.code())
553
+
554
+ await asyncio.wait(
555
+ (
556
+ self._loop.create_task(
557
+ self._write_to_iterator_queue.put(request)
558
+ ),
559
+ self._status_code_task,
560
+ ),
561
+ return_when=asyncio.FIRST_COMPLETED,
562
+ )
563
+
564
+ async def write(self, request: RequestType) -> None:
565
+ # If no queue was created it means that requests
566
+ # should be expected through an iterators provided
567
+ # by the caller.
568
+ if self._write_to_iterator_queue is None:
569
+ raise cygrpc.UsageError(_API_STYLE_ERROR)
570
+
571
+ try:
572
+ call = await self._interceptors_task
573
+ except (asyncio.CancelledError, AioRpcError):
574
+ raise asyncio.InvalidStateError(_RPC_ALREADY_FINISHED_DETAILS)
575
+
576
+ if call.done():
577
+ raise asyncio.InvalidStateError(_RPC_ALREADY_FINISHED_DETAILS)
578
+ elif call._done_writing_flag:
579
+ raise asyncio.InvalidStateError(_RPC_HALF_CLOSED_DETAILS)
580
+
581
+ await self._write_to_iterator_queue_interruptible(request, call)
582
+
583
+ if call.done():
584
+ raise asyncio.InvalidStateError(_RPC_ALREADY_FINISHED_DETAILS)
585
+
586
+ async def done_writing(self) -> None:
587
+ """Signal peer that client is done writing.
588
+
589
+ This method is idempotent.
590
+ """
591
+ # If no queue was created it means that requests
592
+ # should be expected through an iterators provided
593
+ # by the caller.
594
+ if self._write_to_iterator_queue is None:
595
+ raise cygrpc.UsageError(_API_STYLE_ERROR)
596
+
597
+ try:
598
+ call = await self._interceptors_task
599
+ except asyncio.CancelledError:
600
+ raise asyncio.InvalidStateError(_RPC_ALREADY_FINISHED_DETAILS)
601
+
602
+ await self._write_to_iterator_queue_interruptible(
603
+ _InterceptedStreamRequestMixin._FINISH_ITERATOR_SENTINEL, call
604
+ )
605
+
606
+
607
+ class InterceptedUnaryUnaryCall(
608
+ _InterceptedUnaryResponseMixin, InterceptedCall, _base_call.UnaryUnaryCall
609
+ ):
610
+ """Used for running a `UnaryUnaryCall` wrapped by interceptors.
611
+
612
+ For the `__await__` method is it is proxied to the intercepted call only when
613
+ the interceptor task is finished.
614
+ """
615
+
616
+ _loop: asyncio.AbstractEventLoop
617
+ _channel: cygrpc.AioChannel
618
+
619
+ # pylint: disable=too-many-arguments
620
+ def __init__(
621
+ self,
622
+ interceptors: Sequence[UnaryUnaryClientInterceptor],
623
+ request: RequestType,
624
+ timeout: Optional[float],
625
+ metadata: Metadata,
626
+ credentials: Optional[grpc.CallCredentials],
627
+ wait_for_ready: Optional[bool],
628
+ channel: cygrpc.AioChannel,
629
+ method: bytes,
630
+ request_serializer: SerializingFunction,
631
+ response_deserializer: DeserializingFunction,
632
+ loop: asyncio.AbstractEventLoop,
633
+ ) -> None:
634
+ self._loop = loop
635
+ self._channel = channel
636
+ interceptors_task = loop.create_task(
637
+ self._invoke(
638
+ interceptors,
639
+ method,
640
+ timeout,
641
+ metadata,
642
+ credentials,
643
+ wait_for_ready,
644
+ request,
645
+ request_serializer,
646
+ response_deserializer,
647
+ )
648
+ )
649
+ super().__init__(interceptors_task)
650
+
651
+ # pylint: disable=too-many-arguments
652
+ async def _invoke(
653
+ self,
654
+ interceptors: Sequence[UnaryUnaryClientInterceptor],
655
+ method: bytes,
656
+ timeout: Optional[float],
657
+ metadata: Optional[Metadata],
658
+ credentials: Optional[grpc.CallCredentials],
659
+ wait_for_ready: Optional[bool],
660
+ request: RequestType,
661
+ request_serializer: SerializingFunction,
662
+ response_deserializer: DeserializingFunction,
663
+ ) -> UnaryUnaryCall:
664
+ """Run the RPC call wrapped in interceptors"""
665
+
666
+ async def _run_interceptor(
667
+ interceptors: List[UnaryUnaryClientInterceptor],
668
+ client_call_details: ClientCallDetails,
669
+ request: RequestType,
670
+ ) -> _base_call.UnaryUnaryCall:
671
+ if interceptors:
672
+ continuation = functools.partial(
673
+ _run_interceptor, interceptors[1:]
674
+ )
675
+ call_or_response = await interceptors[0].intercept_unary_unary(
676
+ continuation, client_call_details, request
677
+ )
678
+
679
+ if isinstance(call_or_response, _base_call.UnaryUnaryCall):
680
+ return call_or_response
681
+ else:
682
+ return UnaryUnaryCallResponse(call_or_response)
683
+
684
+ else:
685
+ return UnaryUnaryCall(
686
+ request,
687
+ _timeout_to_deadline(client_call_details.timeout),
688
+ client_call_details.metadata,
689
+ client_call_details.credentials,
690
+ client_call_details.wait_for_ready,
691
+ self._channel,
692
+ client_call_details.method,
693
+ request_serializer,
694
+ response_deserializer,
695
+ self._loop,
696
+ )
697
+
698
+ client_call_details = ClientCallDetails(
699
+ method, timeout, metadata, credentials, wait_for_ready
700
+ )
701
+ return await _run_interceptor(
702
+ list(interceptors), client_call_details, request
703
+ )
704
+
705
+ def time_remaining(self) -> Optional[float]:
706
+ raise NotImplementedError()
707
+
708
+
709
+ class InterceptedUnaryStreamCall(
710
+ _InterceptedStreamResponseMixin, InterceptedCall, _base_call.UnaryStreamCall
711
+ ):
712
+ """Used for running a `UnaryStreamCall` wrapped by interceptors."""
713
+
714
+ _loop: asyncio.AbstractEventLoop
715
+ _channel: cygrpc.AioChannel
716
+ _last_returned_call_from_interceptors = Optional[_base_call.UnaryStreamCall]
717
+
718
+ # pylint: disable=too-many-arguments
719
+ def __init__(
720
+ self,
721
+ interceptors: Sequence[UnaryStreamClientInterceptor],
722
+ request: RequestType,
723
+ timeout: Optional[float],
724
+ metadata: Metadata,
725
+ credentials: Optional[grpc.CallCredentials],
726
+ wait_for_ready: Optional[bool],
727
+ channel: cygrpc.AioChannel,
728
+ method: bytes,
729
+ request_serializer: SerializingFunction,
730
+ response_deserializer: DeserializingFunction,
731
+ loop: asyncio.AbstractEventLoop,
732
+ ) -> None:
733
+ self._loop = loop
734
+ self._channel = channel
735
+ self._init_stream_response_mixin()
736
+ self._last_returned_call_from_interceptors = None
737
+ interceptors_task = loop.create_task(
738
+ self._invoke(
739
+ interceptors,
740
+ method,
741
+ timeout,
742
+ metadata,
743
+ credentials,
744
+ wait_for_ready,
745
+ request,
746
+ request_serializer,
747
+ response_deserializer,
748
+ )
749
+ )
750
+ super().__init__(interceptors_task)
751
+
752
+ # pylint: disable=too-many-arguments
753
+ async def _invoke(
754
+ self,
755
+ interceptors: Sequence[UnaryStreamClientInterceptor],
756
+ method: bytes,
757
+ timeout: Optional[float],
758
+ metadata: Optional[Metadata],
759
+ credentials: Optional[grpc.CallCredentials],
760
+ wait_for_ready: Optional[bool],
761
+ request: RequestType,
762
+ request_serializer: SerializingFunction,
763
+ response_deserializer: DeserializingFunction,
764
+ ) -> UnaryStreamCall:
765
+ """Run the RPC call wrapped in interceptors"""
766
+
767
+ async def _run_interceptor(
768
+ interceptors: List[UnaryStreamClientInterceptor],
769
+ client_call_details: ClientCallDetails,
770
+ request: RequestType,
771
+ ) -> _base_call.UnaryStreamCall:
772
+ if interceptors:
773
+ continuation = functools.partial(
774
+ _run_interceptor, interceptors[1:]
775
+ )
776
+
777
+ call_or_response_iterator = await interceptors[
778
+ 0
779
+ ].intercept_unary_stream(
780
+ continuation, client_call_details, request
781
+ )
782
+
783
+ if isinstance(
784
+ call_or_response_iterator, _base_call.UnaryStreamCall
785
+ ):
786
+ self._last_returned_call_from_interceptors = (
787
+ call_or_response_iterator
788
+ )
789
+ else:
790
+ self._last_returned_call_from_interceptors = (
791
+ UnaryStreamCallResponseIterator(
792
+ self._last_returned_call_from_interceptors,
793
+ call_or_response_iterator,
794
+ )
795
+ )
796
+ return self._last_returned_call_from_interceptors
797
+ else:
798
+ self._last_returned_call_from_interceptors = UnaryStreamCall(
799
+ request,
800
+ _timeout_to_deadline(client_call_details.timeout),
801
+ client_call_details.metadata,
802
+ client_call_details.credentials,
803
+ client_call_details.wait_for_ready,
804
+ self._channel,
805
+ client_call_details.method,
806
+ request_serializer,
807
+ response_deserializer,
808
+ self._loop,
809
+ )
810
+
811
+ return self._last_returned_call_from_interceptors
812
+
813
+ client_call_details = ClientCallDetails(
814
+ method, timeout, metadata, credentials, wait_for_ready
815
+ )
816
+ return await _run_interceptor(
817
+ list(interceptors), client_call_details, request
818
+ )
819
+
820
+ def time_remaining(self) -> Optional[float]:
821
+ raise NotImplementedError()
822
+
823
+
824
+ class InterceptedStreamUnaryCall(
825
+ _InterceptedUnaryResponseMixin,
826
+ _InterceptedStreamRequestMixin,
827
+ InterceptedCall,
828
+ _base_call.StreamUnaryCall,
829
+ ):
830
+ """Used for running a `StreamUnaryCall` wrapped by interceptors.
831
+
832
+ For the `__await__` method is it is proxied to the intercepted call only when
833
+ the interceptor task is finished.
834
+ """
835
+
836
+ _loop: asyncio.AbstractEventLoop
837
+ _channel: cygrpc.AioChannel
838
+
839
+ # pylint: disable=too-many-arguments
840
+ def __init__(
841
+ self,
842
+ interceptors: Sequence[StreamUnaryClientInterceptor],
843
+ request_iterator: Optional[RequestIterableType],
844
+ timeout: Optional[float],
845
+ metadata: Metadata,
846
+ credentials: Optional[grpc.CallCredentials],
847
+ wait_for_ready: Optional[bool],
848
+ channel: cygrpc.AioChannel,
849
+ method: bytes,
850
+ request_serializer: SerializingFunction,
851
+ response_deserializer: DeserializingFunction,
852
+ loop: asyncio.AbstractEventLoop,
853
+ ) -> None:
854
+ self._loop = loop
855
+ self._channel = channel
856
+ request_iterator = self._init_stream_request_mixin(request_iterator)
857
+ interceptors_task = loop.create_task(
858
+ self._invoke(
859
+ interceptors,
860
+ method,
861
+ timeout,
862
+ metadata,
863
+ credentials,
864
+ wait_for_ready,
865
+ request_iterator,
866
+ request_serializer,
867
+ response_deserializer,
868
+ )
869
+ )
870
+ super().__init__(interceptors_task)
871
+
872
+ # pylint: disable=too-many-arguments
873
+ async def _invoke(
874
+ self,
875
+ interceptors: Sequence[StreamUnaryClientInterceptor],
876
+ method: bytes,
877
+ timeout: Optional[float],
878
+ metadata: Optional[Metadata],
879
+ credentials: Optional[grpc.CallCredentials],
880
+ wait_for_ready: Optional[bool],
881
+ request_iterator: RequestIterableType,
882
+ request_serializer: SerializingFunction,
883
+ response_deserializer: DeserializingFunction,
884
+ ) -> StreamUnaryCall:
885
+ """Run the RPC call wrapped in interceptors"""
886
+
887
+ async def _run_interceptor(
888
+ interceptors: Iterator[StreamUnaryClientInterceptor],
889
+ client_call_details: ClientCallDetails,
890
+ request_iterator: RequestIterableType,
891
+ ) -> _base_call.StreamUnaryCall:
892
+ if interceptors:
893
+ continuation = functools.partial(
894
+ _run_interceptor, interceptors[1:]
895
+ )
896
+
897
+ return await interceptors[0].intercept_stream_unary(
898
+ continuation, client_call_details, request_iterator
899
+ )
900
+ else:
901
+ return StreamUnaryCall(
902
+ request_iterator,
903
+ _timeout_to_deadline(client_call_details.timeout),
904
+ client_call_details.metadata,
905
+ client_call_details.credentials,
906
+ client_call_details.wait_for_ready,
907
+ self._channel,
908
+ client_call_details.method,
909
+ request_serializer,
910
+ response_deserializer,
911
+ self._loop,
912
+ )
913
+
914
+ client_call_details = ClientCallDetails(
915
+ method, timeout, metadata, credentials, wait_for_ready
916
+ )
917
+ return await _run_interceptor(
918
+ list(interceptors), client_call_details, request_iterator
919
+ )
920
+
921
+ def time_remaining(self) -> Optional[float]:
922
+ raise NotImplementedError()
923
+
924
+
925
+ class InterceptedStreamStreamCall(
926
+ _InterceptedStreamResponseMixin,
927
+ _InterceptedStreamRequestMixin,
928
+ InterceptedCall,
929
+ _base_call.StreamStreamCall,
930
+ ):
931
+ """Used for running a `StreamStreamCall` wrapped by interceptors."""
932
+
933
+ _loop: asyncio.AbstractEventLoop
934
+ _channel: cygrpc.AioChannel
935
+ _last_returned_call_from_interceptors = Optional[
936
+ _base_call.StreamStreamCall
937
+ ]
938
+
939
+ # pylint: disable=too-many-arguments
940
+ def __init__(
941
+ self,
942
+ interceptors: Sequence[StreamStreamClientInterceptor],
943
+ request_iterator: Optional[RequestIterableType],
944
+ timeout: Optional[float],
945
+ metadata: Metadata,
946
+ credentials: Optional[grpc.CallCredentials],
947
+ wait_for_ready: Optional[bool],
948
+ channel: cygrpc.AioChannel,
949
+ method: bytes,
950
+ request_serializer: SerializingFunction,
951
+ response_deserializer: DeserializingFunction,
952
+ loop: asyncio.AbstractEventLoop,
953
+ ) -> None:
954
+ self._loop = loop
955
+ self._channel = channel
956
+ self._init_stream_response_mixin()
957
+ request_iterator = self._init_stream_request_mixin(request_iterator)
958
+ self._last_returned_call_from_interceptors = None
959
+ interceptors_task = loop.create_task(
960
+ self._invoke(
961
+ interceptors,
962
+ method,
963
+ timeout,
964
+ metadata,
965
+ credentials,
966
+ wait_for_ready,
967
+ request_iterator,
968
+ request_serializer,
969
+ response_deserializer,
970
+ )
971
+ )
972
+ super().__init__(interceptors_task)
973
+
974
+ # pylint: disable=too-many-arguments
975
+ async def _invoke(
976
+ self,
977
+ interceptors: Sequence[StreamStreamClientInterceptor],
978
+ method: bytes,
979
+ timeout: Optional[float],
980
+ metadata: Optional[Metadata],
981
+ credentials: Optional[grpc.CallCredentials],
982
+ wait_for_ready: Optional[bool],
983
+ request_iterator: RequestIterableType,
984
+ request_serializer: SerializingFunction,
985
+ response_deserializer: DeserializingFunction,
986
+ ) -> StreamStreamCall:
987
+ """Run the RPC call wrapped in interceptors"""
988
+
989
+ async def _run_interceptor(
990
+ interceptors: List[StreamStreamClientInterceptor],
991
+ client_call_details: ClientCallDetails,
992
+ request_iterator: RequestIterableType,
993
+ ) -> _base_call.StreamStreamCall:
994
+ if interceptors:
995
+ continuation = functools.partial(
996
+ _run_interceptor, interceptors[1:]
997
+ )
998
+
999
+ call_or_response_iterator = await interceptors[
1000
+ 0
1001
+ ].intercept_stream_stream(
1002
+ continuation, client_call_details, request_iterator
1003
+ )
1004
+
1005
+ if isinstance(
1006
+ call_or_response_iterator, _base_call.StreamStreamCall
1007
+ ):
1008
+ self._last_returned_call_from_interceptors = (
1009
+ call_or_response_iterator
1010
+ )
1011
+ else:
1012
+ self._last_returned_call_from_interceptors = (
1013
+ StreamStreamCallResponseIterator(
1014
+ self._last_returned_call_from_interceptors,
1015
+ call_or_response_iterator,
1016
+ )
1017
+ )
1018
+ return self._last_returned_call_from_interceptors
1019
+ else:
1020
+ self._last_returned_call_from_interceptors = StreamStreamCall(
1021
+ request_iterator,
1022
+ _timeout_to_deadline(client_call_details.timeout),
1023
+ client_call_details.metadata,
1024
+ client_call_details.credentials,
1025
+ client_call_details.wait_for_ready,
1026
+ self._channel,
1027
+ client_call_details.method,
1028
+ request_serializer,
1029
+ response_deserializer,
1030
+ self._loop,
1031
+ )
1032
+ return self._last_returned_call_from_interceptors
1033
+
1034
+ client_call_details = ClientCallDetails(
1035
+ method, timeout, metadata, credentials, wait_for_ready
1036
+ )
1037
+ return await _run_interceptor(
1038
+ list(interceptors), client_call_details, request_iterator
1039
+ )
1040
+
1041
+ def time_remaining(self) -> Optional[float]:
1042
+ raise NotImplementedError()
1043
+
1044
+
1045
+ class UnaryUnaryCallResponse(_base_call.UnaryUnaryCall):
1046
+ """Final UnaryUnaryCall class finished with a response."""
1047
+
1048
+ _response: ResponseType
1049
+
1050
+ def __init__(self, response: ResponseType) -> None:
1051
+ self._response = response
1052
+
1053
+ def cancel(self) -> bool:
1054
+ return False
1055
+
1056
+ def cancelled(self) -> bool:
1057
+ return False
1058
+
1059
+ def done(self) -> bool:
1060
+ return True
1061
+
1062
+ def add_done_callback(self, unused_callback) -> None:
1063
+ raise NotImplementedError()
1064
+
1065
+ def time_remaining(self) -> Optional[float]:
1066
+ raise NotImplementedError()
1067
+
1068
+ async def initial_metadata(self) -> Optional[Metadata]:
1069
+ return None
1070
+
1071
+ async def trailing_metadata(self) -> Optional[Metadata]:
1072
+ return None
1073
+
1074
+ async def code(self) -> grpc.StatusCode:
1075
+ return grpc.StatusCode.OK
1076
+
1077
+ async def details(self) -> str:
1078
+ return ""
1079
+
1080
+ async def debug_error_string(self) -> Optional[str]:
1081
+ return None
1082
+
1083
+ def __await__(self):
1084
+ if False: # pylint: disable=using-constant-test
1085
+ # This code path is never used, but a yield statement is needed
1086
+ # for telling the interpreter that __await__ is a generator.
1087
+ yield None
1088
+ return self._response
1089
+
1090
+ async def wait_for_connection(self) -> None:
1091
+ pass
1092
+
1093
+
1094
+ class _StreamCallResponseIterator:
1095
+ _call: Union[_base_call.UnaryStreamCall, _base_call.StreamStreamCall]
1096
+ _response_iterator: AsyncIterable[ResponseType]
1097
+
1098
+ def __init__(
1099
+ self,
1100
+ call: Union[_base_call.UnaryStreamCall, _base_call.StreamStreamCall],
1101
+ response_iterator: AsyncIterable[ResponseType],
1102
+ ) -> None:
1103
+ self._response_iterator = response_iterator
1104
+ self._call = call
1105
+
1106
+ def cancel(self) -> bool:
1107
+ return self._call.cancel()
1108
+
1109
+ def cancelled(self) -> bool:
1110
+ return self._call.cancelled()
1111
+
1112
+ def done(self) -> bool:
1113
+ return self._call.done()
1114
+
1115
+ def add_done_callback(self, callback) -> None:
1116
+ self._call.add_done_callback(callback)
1117
+
1118
+ def time_remaining(self) -> Optional[float]:
1119
+ return self._call.time_remaining()
1120
+
1121
+ async def initial_metadata(self) -> Optional[Metadata]:
1122
+ return await self._call.initial_metadata()
1123
+
1124
+ async def trailing_metadata(self) -> Optional[Metadata]:
1125
+ return await self._call.trailing_metadata()
1126
+
1127
+ async def code(self) -> grpc.StatusCode:
1128
+ return await self._call.code()
1129
+
1130
+ async def details(self) -> str:
1131
+ return await self._call.details()
1132
+
1133
+ async def debug_error_string(self) -> Optional[str]:
1134
+ return await self._call.debug_error_string()
1135
+
1136
+ def __aiter__(self):
1137
+ return self._response_iterator.__aiter__()
1138
+
1139
+ async def wait_for_connection(self) -> None:
1140
+ return await self._call.wait_for_connection()
1141
+
1142
+
1143
+ class UnaryStreamCallResponseIterator(
1144
+ _StreamCallResponseIterator, _base_call.UnaryStreamCall
1145
+ ):
1146
+ """UnaryStreamCall class which uses an alternative response iterator."""
1147
+
1148
+ async def read(self) -> Union[EOFType, ResponseType]:
1149
+ # Behind the scenes everything goes through the
1150
+ # async iterator. So this path should not be reached.
1151
+ raise NotImplementedError()
1152
+
1153
+
1154
+ class StreamStreamCallResponseIterator(
1155
+ _StreamCallResponseIterator, _base_call.StreamStreamCall
1156
+ ):
1157
+ """StreamStreamCall class which uses an alternative response iterator."""
1158
+
1159
+ async def read(self) -> Union[EOFType, ResponseType]:
1160
+ # Behind the scenes everything goes through the
1161
+ # async iterator. So this path should not be reached.
1162
+ raise NotImplementedError()
1163
+
1164
+ async def write(self, request: RequestType) -> None:
1165
+ # Behind the scenes everything goes through the
1166
+ # async iterator provided by the InterceptedStreamStreamCall.
1167
+ # So this path should not be reached.
1168
+ raise NotImplementedError()
1169
+
1170
+ async def done_writing(self) -> None:
1171
+ # Behind the scenes everything goes through the
1172
+ # async iterator provided by the InterceptedStreamStreamCall.
1173
+ # So this path should not be reached.
1174
+ raise NotImplementedError()
1175
+
1176
+ @property
1177
+ def _done_writing_flag(self) -> bool:
1178
+ return self._call._done_writing_flag
vllm/lib/python3.10/site-packages/grpc/aio/_metadata.py ADDED
@@ -0,0 +1,137 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ # Copyright 2020 gRPC authors.
2
+ #
3
+ # Licensed under the Apache License, Version 2.0 (the "License");
4
+ # you may not use this file except in compliance with the License.
5
+ # You may obtain a copy of the License at
6
+ #
7
+ # http://www.apache.org/licenses/LICENSE-2.0
8
+ #
9
+ # Unless required by applicable law or agreed to in writing, software
10
+ # distributed under the License is distributed on an "AS IS" BASIS,
11
+ # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
12
+ # See the License for the specific language governing permissions and
13
+ # limitations under the License.
14
+ """Implementation of the metadata abstraction for gRPC Asyncio Python."""
15
+ from collections import OrderedDict
16
+ from collections import abc
17
+ from typing import Any, Iterator, List, Optional, Tuple, Union
18
+
19
+ MetadataKey = str
20
+ MetadataValue = Union[str, bytes]
21
+
22
+
23
+ class Metadata(abc.Collection):
24
+ """Metadata abstraction for the asynchronous calls and interceptors.
25
+
26
+ The metadata is a mapping from str -> List[str]
27
+
28
+ Traits
29
+ * Multiple entries are allowed for the same key
30
+ * The order of the values by key is preserved
31
+ * Getting by an element by key, retrieves the first mapped value
32
+ * Supports an immutable view of the data
33
+ * Allows partial mutation on the data without recreating the new object from scratch.
34
+ """
35
+
36
+ def __init__(self, *args: Tuple[MetadataKey, MetadataValue]) -> None:
37
+ self._metadata = OrderedDict()
38
+ for md_key, md_value in args:
39
+ self.add(md_key, md_value)
40
+
41
+ @classmethod
42
+ def from_tuple(cls, raw_metadata: tuple):
43
+ if raw_metadata:
44
+ return cls(*raw_metadata)
45
+ return cls()
46
+
47
+ def add(self, key: MetadataKey, value: MetadataValue) -> None:
48
+ self._metadata.setdefault(key, [])
49
+ self._metadata[key].append(value)
50
+
51
+ def __len__(self) -> int:
52
+ """Return the total number of elements that there are in the metadata,
53
+ including multiple values for the same key.
54
+ """
55
+ return sum(map(len, self._metadata.values()))
56
+
57
+ def __getitem__(self, key: MetadataKey) -> MetadataValue:
58
+ """When calling <metadata>[<key>], the first element of all those
59
+ mapped for <key> is returned.
60
+ """
61
+ try:
62
+ return self._metadata[key][0]
63
+ except (ValueError, IndexError) as e:
64
+ raise KeyError("{0!r}".format(key)) from e
65
+
66
+ def __setitem__(self, key: MetadataKey, value: MetadataValue) -> None:
67
+ """Calling metadata[<key>] = <value>
68
+ Maps <value> to the first instance of <key>.
69
+ """
70
+ if key not in self:
71
+ self._metadata[key] = [value]
72
+ else:
73
+ current_values = self.get_all(key)
74
+ self._metadata[key] = [value, *current_values[1:]]
75
+
76
+ def __delitem__(self, key: MetadataKey) -> None:
77
+ """``del metadata[<key>]`` deletes the first mapping for <key>."""
78
+ current_values = self.get_all(key)
79
+ if not current_values:
80
+ raise KeyError(repr(key))
81
+ self._metadata[key] = current_values[1:]
82
+
83
+ def delete_all(self, key: MetadataKey) -> None:
84
+ """Delete all mappings for <key>."""
85
+ del self._metadata[key]
86
+
87
+ def __iter__(self) -> Iterator[Tuple[MetadataKey, MetadataValue]]:
88
+ for key, values in self._metadata.items():
89
+ for value in values:
90
+ yield (key, value)
91
+
92
+ def keys(self) -> abc.KeysView:
93
+ return abc.KeysView(self)
94
+
95
+ def values(self) -> abc.ValuesView:
96
+ return abc.ValuesView(self)
97
+
98
+ def items(self) -> abc.ItemsView:
99
+ return abc.ItemsView(self)
100
+
101
+ def get(
102
+ self, key: MetadataKey, default: MetadataValue = None
103
+ ) -> Optional[MetadataValue]:
104
+ try:
105
+ return self[key]
106
+ except KeyError:
107
+ return default
108
+
109
+ def get_all(self, key: MetadataKey) -> List[MetadataValue]:
110
+ """For compatibility with other Metadata abstraction objects (like in Java),
111
+ this would return all items under the desired <key>.
112
+ """
113
+ return self._metadata.get(key, [])
114
+
115
+ def set_all(self, key: MetadataKey, values: List[MetadataValue]) -> None:
116
+ self._metadata[key] = values
117
+
118
+ def __contains__(self, key: MetadataKey) -> bool:
119
+ return key in self._metadata
120
+
121
+ def __eq__(self, other: Any) -> bool:
122
+ if isinstance(other, self.__class__):
123
+ return self._metadata == other._metadata
124
+ if isinstance(other, tuple):
125
+ return tuple(self) == other
126
+ return NotImplemented # pytype: disable=bad-return-type
127
+
128
+ def __add__(self, other: Any) -> "Metadata":
129
+ if isinstance(other, self.__class__):
130
+ return Metadata(*(tuple(self) + tuple(other)))
131
+ if isinstance(other, tuple):
132
+ return Metadata(*(tuple(self) + other))
133
+ return NotImplemented # pytype: disable=bad-return-type
134
+
135
+ def __repr__(self) -> str:
136
+ view = tuple(self)
137
+ return "{0}({1!r})".format(self.__class__.__name__, view)
vllm/lib/python3.10/site-packages/grpc/aio/_server.py ADDED
@@ -0,0 +1,239 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ # Copyright 2019 The gRPC Authors
2
+ #
3
+ # Licensed under the Apache License, Version 2.0 (the "License");
4
+ # you may not use this file except in compliance with the License.
5
+ # You may obtain a copy of the License at
6
+ #
7
+ # http://www.apache.org/licenses/LICENSE-2.0
8
+ #
9
+ # Unless required by applicable law or agreed to in writing, software
10
+ # distributed under the License is distributed on an "AS IS" BASIS,
11
+ # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
12
+ # See the License for the specific language governing permissions and
13
+ # limitations under the License.
14
+ """Server-side implementation of gRPC Asyncio Python."""
15
+
16
+ from concurrent.futures import Executor
17
+ from typing import Any, Dict, Optional, Sequence
18
+
19
+ import grpc
20
+ from grpc import _common
21
+ from grpc import _compression
22
+ from grpc._cython import cygrpc
23
+
24
+ from . import _base_server
25
+ from ._interceptor import ServerInterceptor
26
+ from ._typing import ChannelArgumentType
27
+
28
+
29
+ def _augment_channel_arguments(
30
+ base_options: ChannelArgumentType, compression: Optional[grpc.Compression]
31
+ ):
32
+ compression_option = _compression.create_channel_option(compression)
33
+ return tuple(base_options) + compression_option
34
+
35
+
36
+ class Server(_base_server.Server):
37
+ """Serves RPCs."""
38
+
39
+ def __init__(
40
+ self,
41
+ thread_pool: Optional[Executor],
42
+ generic_handlers: Optional[Sequence[grpc.GenericRpcHandler]],
43
+ interceptors: Optional[Sequence[Any]],
44
+ options: ChannelArgumentType,
45
+ maximum_concurrent_rpcs: Optional[int],
46
+ compression: Optional[grpc.Compression],
47
+ ):
48
+ self._loop = cygrpc.get_working_loop()
49
+ if interceptors:
50
+ invalid_interceptors = [
51
+ interceptor
52
+ for interceptor in interceptors
53
+ if not isinstance(interceptor, ServerInterceptor)
54
+ ]
55
+ if invalid_interceptors:
56
+ raise ValueError(
57
+ "Interceptor must be ServerInterceptor, the "
58
+ f"following are invalid: {invalid_interceptors}"
59
+ )
60
+ self._server = cygrpc.AioServer(
61
+ self._loop,
62
+ thread_pool,
63
+ generic_handlers,
64
+ interceptors,
65
+ _augment_channel_arguments(options, compression),
66
+ maximum_concurrent_rpcs,
67
+ )
68
+
69
+ def add_generic_rpc_handlers(
70
+ self, generic_rpc_handlers: Sequence[grpc.GenericRpcHandler]
71
+ ) -> None:
72
+ """Registers GenericRpcHandlers with this Server.
73
+
74
+ This method is only safe to call before the server is started.
75
+
76
+ Args:
77
+ generic_rpc_handlers: A sequence of GenericRpcHandlers that will be
78
+ used to service RPCs.
79
+ """
80
+ self._server.add_generic_rpc_handlers(generic_rpc_handlers)
81
+
82
+ def add_registered_method_handlers(
83
+ self,
84
+ service_name: str,
85
+ method_handlers: Dict[str, grpc.RpcMethodHandler],
86
+ ) -> None:
87
+ # TODO(xuanwn): Implement this for AsyncIO.
88
+ pass
89
+
90
+ def add_insecure_port(self, address: str) -> int:
91
+ """Opens an insecure port for accepting RPCs.
92
+
93
+ This method may only be called before starting the server.
94
+
95
+ Args:
96
+ address: The address for which to open a port. If the port is 0,
97
+ or not specified in the address, then the gRPC runtime will choose a port.
98
+
99
+ Returns:
100
+ An integer port on which the server will accept RPC requests.
101
+ """
102
+ return _common.validate_port_binding_result(
103
+ address, self._server.add_insecure_port(_common.encode(address))
104
+ )
105
+
106
+ def add_secure_port(
107
+ self, address: str, server_credentials: grpc.ServerCredentials
108
+ ) -> int:
109
+ """Opens a secure port for accepting RPCs.
110
+
111
+ This method may only be called before starting the server.
112
+
113
+ Args:
114
+ address: The address for which to open a port.
115
+ if the port is 0, or not specified in the address, then the gRPC
116
+ runtime will choose a port.
117
+ server_credentials: A ServerCredentials object.
118
+
119
+ Returns:
120
+ An integer port on which the server will accept RPC requests.
121
+ """
122
+ return _common.validate_port_binding_result(
123
+ address,
124
+ self._server.add_secure_port(
125
+ _common.encode(address), server_credentials
126
+ ),
127
+ )
128
+
129
+ async def start(self) -> None:
130
+ """Starts this Server.
131
+
132
+ This method may only be called once. (i.e. it is not idempotent).
133
+ """
134
+ await self._server.start()
135
+
136
+ async def stop(self, grace: Optional[float]) -> None:
137
+ """Stops this Server.
138
+
139
+ This method immediately stops the server from servicing new RPCs in
140
+ all cases.
141
+
142
+ If a grace period is specified, this method waits until all active
143
+ RPCs are finished or until the grace period is reached. RPCs that haven't
144
+ been terminated within the grace period are aborted.
145
+ If a grace period is not specified (by passing None for grace), all
146
+ existing RPCs are aborted immediately and this method blocks until
147
+ the last RPC handler terminates.
148
+
149
+ This method is idempotent and may be called at any time. Passing a
150
+ smaller grace value in a subsequent call will have the effect of
151
+ stopping the Server sooner (passing None will have the effect of
152
+ stopping the server immediately). Passing a larger grace value in a
153
+ subsequent call will not have the effect of stopping the server later
154
+ (i.e. the most restrictive grace value is used).
155
+
156
+ Args:
157
+ grace: A duration of time in seconds or None.
158
+ """
159
+ await self._server.shutdown(grace)
160
+
161
+ async def wait_for_termination(
162
+ self, timeout: Optional[float] = None
163
+ ) -> bool:
164
+ """Block current coroutine until the server stops.
165
+
166
+ This is an EXPERIMENTAL API.
167
+
168
+ The wait will not consume computational resources during blocking, and
169
+ it will block until one of the two following conditions are met:
170
+
171
+ 1) The server is stopped or terminated;
172
+ 2) A timeout occurs if timeout is not `None`.
173
+
174
+ The timeout argument works in the same way as `threading.Event.wait()`.
175
+ https://docs.python.org/3/library/threading.html#threading.Event.wait
176
+
177
+ Args:
178
+ timeout: A floating point number specifying a timeout for the
179
+ operation in seconds.
180
+
181
+ Returns:
182
+ A bool indicates if the operation times out.
183
+ """
184
+ return await self._server.wait_for_termination(timeout)
185
+
186
+ def __del__(self):
187
+ """Schedules a graceful shutdown in current event loop.
188
+
189
+ The Cython AioServer doesn't hold a ref-count to this class. It should
190
+ be safe to slightly extend the underlying Cython object's life span.
191
+ """
192
+ if hasattr(self, "_server"):
193
+ if self._server.is_running():
194
+ cygrpc.schedule_coro_threadsafe(
195
+ self._server.shutdown(None),
196
+ self._loop,
197
+ )
198
+
199
+
200
+ def server(
201
+ migration_thread_pool: Optional[Executor] = None,
202
+ handlers: Optional[Sequence[grpc.GenericRpcHandler]] = None,
203
+ interceptors: Optional[Sequence[Any]] = None,
204
+ options: Optional[ChannelArgumentType] = None,
205
+ maximum_concurrent_rpcs: Optional[int] = None,
206
+ compression: Optional[grpc.Compression] = None,
207
+ ):
208
+ """Creates a Server with which RPCs can be serviced.
209
+
210
+ Args:
211
+ migration_thread_pool: A futures.ThreadPoolExecutor to be used by the
212
+ Server to execute non-AsyncIO RPC handlers for migration purpose.
213
+ handlers: An optional list of GenericRpcHandlers used for executing RPCs.
214
+ More handlers may be added by calling add_generic_rpc_handlers any time
215
+ before the server is started.
216
+ interceptors: An optional list of ServerInterceptor objects that observe
217
+ and optionally manipulate the incoming RPCs before handing them over to
218
+ handlers. The interceptors are given control in the order they are
219
+ specified. This is an EXPERIMENTAL API.
220
+ options: An optional list of key-value pairs (:term:`channel_arguments` in gRPC runtime)
221
+ to configure the channel.
222
+ maximum_concurrent_rpcs: The maximum number of concurrent RPCs this server
223
+ will service before returning RESOURCE_EXHAUSTED status, or None to
224
+ indicate no limit.
225
+ compression: An element of grpc.compression, e.g.
226
+ grpc.compression.Gzip. This compression algorithm will be used for the
227
+ lifetime of the server unless overridden by set_compression.
228
+
229
+ Returns:
230
+ A Server object.
231
+ """
232
+ return Server(
233
+ migration_thread_pool,
234
+ () if handlers is None else handlers,
235
+ () if interceptors is None else interceptors,
236
+ () if options is None else options,
237
+ maximum_concurrent_rpcs,
238
+ compression,
239
+ )
vllm/lib/python3.10/site-packages/grpc/aio/_utils.py ADDED
@@ -0,0 +1,22 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ # Copyright 2019 gRPC authors.
2
+ #
3
+ # Licensed under the Apache License, Version 2.0 (the "License");
4
+ # you may not use this file except in compliance with the License.
5
+ # You may obtain a copy of the License at
6
+ #
7
+ # http://www.apache.org/licenses/LICENSE-2.0
8
+ #
9
+ # Unless required by applicable law or agreed to in writing, software
10
+ # distributed under the License is distributed on an "AS IS" BASIS,
11
+ # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
12
+ # See the License for the specific language governing permissions and
13
+ # limitations under the License.
14
+ """Internal utilities used by the gRPC Aio module."""
15
+ import time
16
+ from typing import Optional
17
+
18
+
19
+ def _timeout_to_deadline(timeout: Optional[float]) -> Optional[float]:
20
+ if timeout is None:
21
+ return None
22
+ return time.time() + timeout
vllm/lib/python3.10/site-packages/grpc/beta/__init__.py ADDED
@@ -0,0 +1,13 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ # Copyright 2015 gRPC authors.
2
+ #
3
+ # Licensed under the Apache License, Version 2.0 (the "License");
4
+ # you may not use this file except in compliance with the License.
5
+ # You may obtain a copy of the License at
6
+ #
7
+ # http://www.apache.org/licenses/LICENSE-2.0
8
+ #
9
+ # Unless required by applicable law or agreed to in writing, software
10
+ # distributed under the License is distributed on an "AS IS" BASIS,
11
+ # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
12
+ # See the License for the specific language governing permissions and
13
+ # limitations under the License.
vllm/lib/python3.10/site-packages/grpc/beta/__pycache__/__init__.cpython-310.pyc ADDED
Binary file (159 Bytes). View file
 
vllm/lib/python3.10/site-packages/grpc/beta/__pycache__/_client_adaptations.cpython-310.pyc ADDED
Binary file (16.3 kB). View file
 
vllm/lib/python3.10/site-packages/grpc/beta/__pycache__/_metadata.cpython-310.pyc ADDED
Binary file (1.32 kB). View file
 
vllm/lib/python3.10/site-packages/grpc/beta/__pycache__/_server_adaptations.cpython-310.pyc ADDED
Binary file (11.9 kB). View file