repo
stringlengths 2
99
| file
stringlengths 13
225
| code
stringlengths 0
18.3M
| file_length
int64 0
18.3M
| avg_line_length
float64 0
1.36M
| max_line_length
int64 0
4.26M
| extension_type
stringclasses 1
value |
|---|---|---|---|---|---|---|
grpc
|
grpc-master/src/python/grpcio_reflection/grpc_version.py
|
# Copyright 2016 gRPC authors.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# AUTO-GENERATED FROM `$REPO_ROOT/templates/src/python/grpcio_reflection/grpc_version.py.template`!!!
VERSION = '1.57.0.dev0'
| 705
| 38.222222
| 101
|
py
|
grpc
|
grpc-master/src/python/grpcio_reflection/grpc_reflection/__init__.py
|
# Copyright 2016 gRPC authors.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
| 577
| 40.285714
| 74
|
py
|
grpc
|
grpc-master/src/python/grpcio_reflection/grpc_reflection/v1alpha/proto_reflection_descriptor_database.py
|
# Copyright 2022 gRPC authors.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Reference implementation for reflection client in gRPC Python.
For usage instructions, see the Python Reflection documentation at
``doc/python/server_reflection.md``.
"""
import logging
from typing import Any, Dict, Iterable, List, Set
from google.protobuf.descriptor_database import DescriptorDatabase
from google.protobuf.descriptor_pb2 import FileDescriptorProto
import grpc
from grpc_reflection.v1alpha.reflection_pb2 import ExtensionNumberResponse
from grpc_reflection.v1alpha.reflection_pb2 import ExtensionRequest
from grpc_reflection.v1alpha.reflection_pb2 import FileDescriptorResponse
from grpc_reflection.v1alpha.reflection_pb2 import ListServiceResponse
from grpc_reflection.v1alpha.reflection_pb2 import ServerReflectionRequest
from grpc_reflection.v1alpha.reflection_pb2 import ServerReflectionResponse
from grpc_reflection.v1alpha.reflection_pb2 import ServiceResponse
from grpc_reflection.v1alpha.reflection_pb2_grpc import ServerReflectionStub
class ProtoReflectionDescriptorDatabase(DescriptorDatabase):
"""
A container and interface for receiving descriptors from a server's
Reflection service.
ProtoReflectionDescriptorDatabase takes a channel to a server with
Reflection service, and provides an interface to retrieve the Reflection
information. It implements the DescriptorDatabase interface.
It is typically used to feed a DescriptorPool instance.
"""
# Implementation based on C++ version found here (version tag 1.39.1):
# grpc/test/cpp/util/proto_reflection_descriptor_database.cc
# while implementing the Python interface given here:
# https://googleapis.dev/python/protobuf/3.17.0/google/protobuf/descriptor_database.html
def __init__(self, channel: grpc.Channel):
DescriptorDatabase.__init__(self)
self._logger = logging.getLogger(__name__)
self._stub = ServerReflectionStub(channel)
self._known_files: Set[str] = set()
self._cached_extension_numbers: Dict[str, List[int]] = dict()
def get_services(self) -> Iterable[str]:
"""
Get list of full names of the registered services.
Returns:
A list of strings corresponding to the names of the services.
"""
request = ServerReflectionRequest(list_services="")
response = self._do_one_request(request, key="")
list_services: ListServiceResponse = response.list_services_response
services: List[ServiceResponse] = list_services.service
return [service.name for service in services]
def FindFileByName(self, name: str) -> FileDescriptorProto:
"""
Find a file descriptor by file name.
This function implements a DescriptorDatabase interface, and is
typically not called directly; prefer using a DescriptorPool instead.
Args:
name: The name of the file. Typically this is a relative path ending in ".proto".
Returns:
A FileDescriptorProto for the file.
Raises:
KeyError: the file was not found.
"""
try:
return super().FindFileByName(name)
except KeyError:
pass
assert name not in self._known_files
request = ServerReflectionRequest(file_by_filename=name)
response = self._do_one_request(request, key=name)
self._add_file_from_response(response.file_descriptor_response)
return super().FindFileByName(name)
def FindFileContainingSymbol(self, symbol: str) -> FileDescriptorProto:
"""
Find the file containing the symbol, and return its file descriptor.
The symbol should be a fully qualified name including the file
descriptor's package and any containing messages. Some examples:
* "some.package.name.Message"
* "some.package.name.Message.NestedEnum"
* "some.package.name.Message.some_field"
This function implements a DescriptorDatabase interface, and is
typically not called directly; prefer using a DescriptorPool instead.
Args:
symbol: The fully-qualified name of the symbol.
Returns:
FileDescriptorProto for the file containing the symbol.
Raises:
KeyError: the symbol was not found.
"""
try:
return super().FindFileContainingSymbol(symbol)
except KeyError:
pass
# Query the server
request = ServerReflectionRequest(file_containing_symbol=symbol)
response = self._do_one_request(request, key=symbol)
self._add_file_from_response(response.file_descriptor_response)
return super().FindFileContainingSymbol(symbol)
def FindAllExtensionNumbers(self, extendee_name: str) -> Iterable[int]:
"""
Find the field numbers used by all known extensions of `extendee_name`.
This function implements a DescriptorDatabase interface, and is
typically not called directly; prefer using a DescriptorPool instead.
Args:
extendee_name: fully-qualified name of the extended message type.
Returns:
A list of field numbers used by all known extensions.
Raises:
KeyError: The message type `extendee_name` was not found.
"""
if extendee_name in self._cached_extension_numbers:
return self._cached_extension_numbers[extendee_name]
request = ServerReflectionRequest(
all_extension_numbers_of_type=extendee_name
)
response = self._do_one_request(request, key=extendee_name)
all_extension_numbers: ExtensionNumberResponse = (
response.all_extension_numbers_response
)
numbers = list(all_extension_numbers.extension_number)
self._cached_extension_numbers[extendee_name] = numbers
return numbers
def FindFileContainingExtension(
self, extendee_name: str, extension_number: int
) -> FileDescriptorProto:
"""
Find the file which defines an extension for the given message type
and field number.
This function implements a DescriptorDatabase interface, and is
typically not called directly; prefer using a DescriptorPool instead.
Args:
extendee_name: fully-qualified name of the extended message type.
extension_number: the number of the extension field.
Returns:
FileDescriptorProto for the file containing the extension.
Raises:
KeyError: The message or the extension number were not found.
"""
try:
return super().FindFileContainingExtension(
extendee_name, extension_number
)
except KeyError:
pass
request = ServerReflectionRequest(
file_containing_extension=ExtensionRequest(
containing_type=extendee_name, extension_number=extension_number
)
)
response = self._do_one_request(
request, key=(extendee_name, extension_number)
)
file_desc = response.file_descriptor_response
self._add_file_from_response(file_desc)
return super().FindFileContainingExtension(
extendee_name, extension_number
)
def _do_one_request(
self, request: ServerReflectionRequest, key: Any
) -> ServerReflectionResponse:
response = self._stub.ServerReflectionInfo(iter([request]))
res = next(response)
if res.WhichOneof("message_response") == "error_response":
# Only NOT_FOUND errors are expected at this layer
error_code = res.error_response.error_code
assert (
error_code == grpc.StatusCode.NOT_FOUND.value[0]
), "unexpected error response: " + repr(res.error_response)
raise KeyError(key)
return res
def _add_file_from_response(
self, file_descriptor: FileDescriptorResponse
) -> None:
protos: List[bytes] = file_descriptor.file_descriptor_proto
for proto in protos:
desc = FileDescriptorProto()
desc.ParseFromString(proto)
if desc.name not in self._known_files:
self._logger.info(
"Loading descriptors from file: %s", desc.name
)
self._known_files.add(desc.name)
self.Add(desc)
| 9,042
| 37.811159
| 94
|
py
|
grpc
|
grpc-master/src/python/grpcio_reflection/grpc_reflection/v1alpha/_async.py
|
# Copyright 2020 gRPC authors.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""The AsyncIO version of the reflection servicer."""
from typing import AsyncIterable
import grpc
from grpc_reflection.v1alpha import reflection_pb2 as _reflection_pb2
from grpc_reflection.v1alpha._base import BaseReflectionServicer
class ReflectionServicer(BaseReflectionServicer):
"""Servicer handling RPCs for service statuses."""
async def ServerReflectionInfo(
self,
request_iterator: AsyncIterable[
_reflection_pb2.ServerReflectionRequest
],
unused_context,
) -> AsyncIterable[_reflection_pb2.ServerReflectionResponse]:
async for request in request_iterator:
if request.HasField("file_by_filename"):
yield self._file_by_filename(request.file_by_filename)
elif request.HasField("file_containing_symbol"):
yield self._file_containing_symbol(
request.file_containing_symbol
)
elif request.HasField("file_containing_extension"):
yield self._file_containing_extension(
request.file_containing_extension.containing_type,
request.file_containing_extension.extension_number,
)
elif request.HasField("all_extension_numbers_of_type"):
yield self._all_extension_numbers_of_type(
request.all_extension_numbers_of_type
)
elif request.HasField("list_services"):
yield self._list_services()
else:
yield _reflection_pb2.ServerReflectionResponse(
error_response=_reflection_pb2.ErrorResponse(
error_code=grpc.StatusCode.INVALID_ARGUMENT.value[0],
error_message=grpc.StatusCode.INVALID_ARGUMENT.value[
1
].encode(),
)
)
__all__ = [
"ReflectionServicer",
]
| 2,549
| 38.230769
| 77
|
py
|
grpc
|
grpc-master/src/python/grpcio_reflection/grpc_reflection/v1alpha/reflection.py
|
# Copyright 2016 gRPC authors.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Reference implementation for reflection in gRPC Python."""
import sys
import grpc
from grpc_reflection.v1alpha import reflection_pb2 as _reflection_pb2
from grpc_reflection.v1alpha import reflection_pb2_grpc as _reflection_pb2_grpc
from grpc_reflection.v1alpha._base import BaseReflectionServicer
SERVICE_NAME = _reflection_pb2.DESCRIPTOR.services_by_name[
"ServerReflection"
].full_name
class ReflectionServicer(BaseReflectionServicer):
"""Servicer handling RPCs for service statuses."""
def ServerReflectionInfo(self, request_iterator, context):
# pylint: disable=unused-argument
for request in request_iterator:
if request.HasField("file_by_filename"):
yield self._file_by_filename(request.file_by_filename)
elif request.HasField("file_containing_symbol"):
yield self._file_containing_symbol(
request.file_containing_symbol
)
elif request.HasField("file_containing_extension"):
yield self._file_containing_extension(
request.file_containing_extension.containing_type,
request.file_containing_extension.extension_number,
)
elif request.HasField("all_extension_numbers_of_type"):
yield self._all_extension_numbers_of_type(
request.all_extension_numbers_of_type
)
elif request.HasField("list_services"):
yield self._list_services()
else:
yield _reflection_pb2.ServerReflectionResponse(
error_response=_reflection_pb2.ErrorResponse(
error_code=grpc.StatusCode.INVALID_ARGUMENT.value[0],
error_message=grpc.StatusCode.INVALID_ARGUMENT.value[
1
].encode(),
)
)
_enable_server_reflection_doc = """Enables server reflection on a server.
Args:
service_names: Iterable of fully-qualified service names available.
server: grpc.Server to which reflection service will be added.
pool: DescriptorPool object to use (descriptor_pool.Default() if None).
"""
if sys.version_info[0] >= 3 and sys.version_info[1] >= 6:
# Exposes AsyncReflectionServicer as public API.
# pylint: disable=ungrouped-imports
from grpc.experimental import aio as grpc_aio
# pylint: enable=ungrouped-imports
from . import _async as aio
def enable_server_reflection(service_names, server, pool=None):
if isinstance(server, grpc_aio.Server):
_reflection_pb2_grpc.add_ServerReflectionServicer_to_server(
aio.ReflectionServicer(service_names, pool=pool), server
)
else:
_reflection_pb2_grpc.add_ServerReflectionServicer_to_server(
ReflectionServicer(service_names, pool=pool), server
)
enable_server_reflection.__doc__ = _enable_server_reflection_doc
__all__ = [
"SERVICE_NAME",
"ReflectionServicer",
"enable_server_reflection",
"aio",
]
else:
def enable_server_reflection(service_names, server, pool=None):
_reflection_pb2_grpc.add_ServerReflectionServicer_to_server(
ReflectionServicer(service_names, pool=pool), server
)
enable_server_reflection.__doc__ = _enable_server_reflection_doc
__all__ = [
"SERVICE_NAME",
"ReflectionServicer",
"enable_server_reflection",
]
| 4,147
| 36.709091
| 79
|
py
|
grpc
|
grpc-master/src/python/grpcio_reflection/grpc_reflection/v1alpha/_base.py
|
# Copyright 2020 gRPC authors.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Base implementation of reflection servicer."""
from google.protobuf import descriptor_pb2
from google.protobuf import descriptor_pool
import grpc
from grpc_reflection.v1alpha import reflection_pb2 as _reflection_pb2
from grpc_reflection.v1alpha import reflection_pb2_grpc as _reflection_pb2_grpc
_POOL = descriptor_pool.Default()
def _not_found_error():
return _reflection_pb2.ServerReflectionResponse(
error_response=_reflection_pb2.ErrorResponse(
error_code=grpc.StatusCode.NOT_FOUND.value[0],
error_message=grpc.StatusCode.NOT_FOUND.value[1].encode(),
)
)
def _collect_transitive_dependencies(descriptor, seen_files):
seen_files.update({descriptor.name: descriptor})
for dependency in descriptor.dependencies:
if not dependency.name in seen_files:
# descriptors cannot have circular dependencies
_collect_transitive_dependencies(dependency, seen_files)
def _file_descriptor_response(descriptor):
# collect all dependencies
descriptors = {}
_collect_transitive_dependencies(descriptor, descriptors)
# serialize all descriptors
serialized_proto_list = []
for d_key in descriptors:
proto = descriptor_pb2.FileDescriptorProto()
descriptors[d_key].CopyToProto(proto)
serialized_proto_list.append(proto.SerializeToString())
return _reflection_pb2.ServerReflectionResponse(
file_descriptor_response=_reflection_pb2.FileDescriptorResponse(
file_descriptor_proto=(serialized_proto_list)
),
)
class BaseReflectionServicer(_reflection_pb2_grpc.ServerReflectionServicer):
"""Base class for reflection servicer."""
def __init__(self, service_names, pool=None):
"""Constructor.
Args:
service_names: Iterable of fully-qualified service names available.
pool: An optional DescriptorPool instance.
"""
self._service_names = tuple(sorted(service_names))
self._pool = _POOL if pool is None else pool
def _file_by_filename(self, filename):
try:
descriptor = self._pool.FindFileByName(filename)
except KeyError:
return _not_found_error()
else:
return _file_descriptor_response(descriptor)
def _file_containing_symbol(self, fully_qualified_name):
try:
descriptor = self._pool.FindFileContainingSymbol(
fully_qualified_name
)
except KeyError:
return _not_found_error()
else:
return _file_descriptor_response(descriptor)
def _file_containing_extension(self, containing_type, extension_number):
try:
message_descriptor = self._pool.FindMessageTypeByName(
containing_type
)
extension_descriptor = self._pool.FindExtensionByNumber(
message_descriptor, extension_number
)
descriptor = self._pool.FindFileContainingSymbol(
extension_descriptor.full_name
)
except KeyError:
return _not_found_error()
else:
return _file_descriptor_response(descriptor)
def _all_extension_numbers_of_type(self, containing_type):
try:
message_descriptor = self._pool.FindMessageTypeByName(
containing_type
)
extension_numbers = tuple(
sorted(
extension.number
for extension in self._pool.FindAllExtensions(
message_descriptor
)
)
)
except KeyError:
return _not_found_error()
else:
return _reflection_pb2.ServerReflectionResponse(
all_extension_numbers_response=_reflection_pb2.ExtensionNumberResponse(
base_type_name=message_descriptor.full_name,
extension_number=extension_numbers,
)
)
def _list_services(self):
return _reflection_pb2.ServerReflectionResponse(
list_services_response=_reflection_pb2.ListServiceResponse(
service=[
_reflection_pb2.ServiceResponse(name=service_name)
for service_name in self._service_names
]
)
)
__all__ = ["BaseReflectionServicer"]
| 5,024
| 34.13986
| 87
|
py
|
grpc
|
grpc-master/src/python/grpcio_reflection/grpc_reflection/v1alpha/__init__.py
|
# Copyright 2016 gRPC authors.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
| 577
| 40.285714
| 74
|
py
|
grpc
|
grpc-master/src/python/grpcio_csds/setup.py
|
# Copyright 2021 The gRPC Authors
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Setup module for CSDS in gRPC Python."""
import os
import sys
import setuptools
_PACKAGE_PATH = os.path.realpath(os.path.dirname(__file__))
_README_PATH = os.path.join(_PACKAGE_PATH, "README.rst")
# Ensure we're in the proper directory whether or not we're being used by pip.
os.chdir(os.path.dirname(os.path.abspath(__file__)))
# Break import-style to ensure we can actually find our local modules.
import grpc_version
CLASSIFIERS = [
"Development Status :: 5 - Production/Stable",
"Programming Language :: Python",
"Programming Language :: Python :: 3",
"License :: OSI Approved :: Apache Software License",
]
PACKAGE_DIRECTORIES = {
"": ".",
}
INSTALL_REQUIRES = (
"protobuf>=4.21.6",
"xds-protos>=0.0.7",
"grpcio>={version}".format(version=grpc_version.VERSION),
)
SETUP_REQUIRES = INSTALL_REQUIRES
setuptools.setup(
name="grpcio-csds",
version=grpc_version.VERSION,
license="Apache License 2.0",
description="xDS configuration dump library",
long_description=open(_README_PATH, "r").read(),
author="The gRPC Authors",
author_email="grpc-io@googlegroups.com",
classifiers=CLASSIFIERS,
url="https://grpc.io",
package_dir=PACKAGE_DIRECTORIES,
packages=setuptools.find_packages("."),
python_requires=">=3.6",
install_requires=INSTALL_REQUIRES,
setup_requires=SETUP_REQUIRES,
)
| 1,958
| 29.609375
| 78
|
py
|
grpc
|
grpc-master/src/python/grpcio_csds/grpc_version.py
|
# Copyright 2021 The gRPC Authors
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# AUTO-GENERATED FROM `$REPO_ROOT/templates/src/python/grpcio_csds/grpc_version.py.template`!!!
VERSION = '1.57.0.dev0'
| 702
| 38.055556
| 95
|
py
|
grpc
|
grpc-master/src/python/grpcio_csds/grpc_csds/__init__.py
|
# Copyright 2021 The gRPC Authors
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Channelz debug service implementation in gRPC Python."""
from envoy.service.status.v3 import csds_pb2
from envoy.service.status.v3 import csds_pb2_grpc
from google.protobuf import json_format
from grpc._cython import cygrpc
class ClientStatusDiscoveryServiceServicer(
csds_pb2_grpc.ClientStatusDiscoveryServiceServicer
):
"""CSDS Servicer works for both the sync API and asyncio API."""
@staticmethod
def FetchClientStatus(request, unused_context):
client_config = csds_pb2.ClientConfig.FromString(
cygrpc.dump_xds_configs()
)
response = csds_pb2.ClientStatusResponse()
response.config.append(client_config)
return response
@staticmethod
def StreamClientStatus(request_iterator, context):
for request in request_iterator:
yield ClientStatusDiscoveryServiceServicer.FetchClientStatus(
request, context
)
def add_csds_servicer(server):
"""Register CSDS servicer to a server.
CSDS is part of xDS protocol used to expose in-effective traffic
configuration (or xDS resources). It focuses on simplify the debugging of
unexpected routing behaviors, which could be due to a misconfiguration,
unhealthy backends or issues in the control or data plane.
Args:
server: A gRPC server to which the CSDS service will be added.
"""
csds_pb2_grpc.add_ClientStatusDiscoveryServiceServicer_to_server(
ClientStatusDiscoveryServiceServicer(), server
)
__all__ = ["ClientStatusDiscoveryServiceServicer", "add_csds_servicer"]
| 2,173
| 34.639344
| 77
|
py
|
grpc
|
grpc-master/src/python/grpcio_observability/grpc_observability/_gcp_observability.py
|
# Copyright 2023 gRPC authors.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
from __future__ import annotations
from dataclasses import dataclass
from dataclasses import field
import logging
import threading
import time
from typing import Any, Mapping, Optional
import grpc
from grpc_observability import _cyobservability # pytype: disable=pyi-error
from grpc_observability._open_census_exporter import CENSUS_UPLOAD_INTERVAL_SECS
from grpc_observability._open_census_exporter import OpenCensusExporter
from opencensus.trace import execution_context
from opencensus.trace import span_context as span_context_module
from opencensus.trace import trace_options as trace_options_module
_LOGGER = logging.getLogger(__name__)
ClientCallTracerCapsule = Any # it appears only once in the function signature
ServerCallTracerFactoryCapsule = (
Any # it appears only once in the function signature
)
grpc_observability = Any # grpc_observability.py imports this module.
GRPC_STATUS_CODE_TO_STRING = {
grpc.StatusCode.OK: "OK",
grpc.StatusCode.CANCELLED: "CANCELLED",
grpc.StatusCode.UNKNOWN: "UNKNOWN",
grpc.StatusCode.INVALID_ARGUMENT: "INVALID_ARGUMENT",
grpc.StatusCode.DEADLINE_EXCEEDED: "DEADLINE_EXCEEDED",
grpc.StatusCode.NOT_FOUND: "NOT_FOUND",
grpc.StatusCode.ALREADY_EXISTS: "ALREADY_EXISTS",
grpc.StatusCode.PERMISSION_DENIED: "PERMISSION_DENIED",
grpc.StatusCode.UNAUTHENTICATED: "UNAUTHENTICATED",
grpc.StatusCode.RESOURCE_EXHAUSTED: "RESOURCE_EXHAUSTED",
grpc.StatusCode.FAILED_PRECONDITION: "FAILED_PRECONDITION",
grpc.StatusCode.ABORTED: "ABORTED",
grpc.StatusCode.OUT_OF_RANGE: "OUT_OF_RANGE",
grpc.StatusCode.UNIMPLEMENTED: "UNIMPLEMENTED",
grpc.StatusCode.INTERNAL: "INTERNAL",
grpc.StatusCode.UNAVAILABLE: "UNAVAILABLE",
grpc.StatusCode.DATA_LOSS: "DATA_LOSS",
}
@dataclass
class GcpObservabilityPythonConfig:
_singleton = None
_lock: threading.RLock = threading.RLock()
project_id: str = ""
stats_enabled: bool = False
tracing_enabled: bool = False
labels: Optional[Mapping[str, str]] = field(default_factory=dict)
sampling_rate: Optional[float] = 0.0
@staticmethod
def get():
with GcpObservabilityPythonConfig._lock:
if GcpObservabilityPythonConfig._singleton is None:
GcpObservabilityPythonConfig._singleton = (
GcpObservabilityPythonConfig()
)
return GcpObservabilityPythonConfig._singleton
def set_configuration(
self,
project_id: str,
sampling_rate: Optional[float] = 0.0,
labels: Optional[Mapping[str, str]] = None,
tracing_enabled: bool = False,
stats_enabled: bool = False,
) -> None:
self.project_id = project_id
self.stats_enabled = stats_enabled
self.tracing_enabled = tracing_enabled
self.labels = labels
self.sampling_rate = sampling_rate
# pylint: disable=no-self-use
class GCPOpenCensusObservability(grpc._observability.ObservabilityPlugin):
"""GCP OpenCensus based plugin implementation.
If no exporter is passed, the default will be OpenCensus StackDriver
based exporter.
For more details, please refer to User Guide:
* https://cloud.google.com/stackdriver/docs/solutions/grpc
Attributes:
config: Configuration for GCP OpenCensus Observability.
exporter: Exporter used to export data.
"""
config: GcpObservabilityPythonConfig
exporter: "grpc_observability.Exporter"
use_open_census_exporter: bool
def __init__(self, exporter: "grpc_observability.Exporter" = None):
self.exporter = None
self.config = GcpObservabilityPythonConfig.get()
self.use_open_census_exporter = False
config_valid = _cyobservability.set_gcp_observability_config(
self.config
)
if not config_valid:
raise ValueError("Invalid configuration")
if exporter:
self.exporter = exporter
else:
self.exporter = OpenCensusExporter(self.config)
self.use_open_census_exporter = True
if self.config.tracing_enabled:
self.set_tracing(True)
if self.config.stats_enabled:
self.set_stats(True)
def __enter__(self):
try:
_cyobservability.cyobservability_init(self.exporter)
# TODO(xuanwn): Use specific exceptons
except Exception as e: # pylint: disable=broad-except
_LOGGER.exception("GCPOpenCensusObservability failed with: %s", e)
grpc._observability.observability_init(self)
return self
def __exit__(self, exc_type, exc_val, exc_tb) -> None:
self.exit()
def exit(self) -> None:
# Sleep so we don't loss any data. If we shutdown export thread
# immediately after exit, it's possible that core didn't call RecordEnd
# in callTracer, and all data recorded by calling RecordEnd will be
# lost.
# CENSUS_EXPORT_BATCH_INTERVAL_SECS: The time equals to the time in
# AwaitNextBatchLocked.
# TODO(xuanwn): explicit synchronization
# https://github.com/grpc/grpc/issues/33262
time.sleep(_cyobservability.CENSUS_EXPORT_BATCH_INTERVAL_SECS)
if self.use_open_census_exporter:
# Sleep so StackDriver can upload data to GCP.
time.sleep(CENSUS_UPLOAD_INTERVAL_SECS)
self.set_tracing(False)
self.set_stats(False)
_cyobservability.observability_deinit()
grpc._observability.observability_deinit()
def create_client_call_tracer(
self, method_name: bytes
) -> ClientCallTracerCapsule:
current_span = execution_context.get_current_span()
if current_span:
# Propagate existing OC context
trace_id = current_span.context_tracer.trace_id.encode("utf8")
parent_span_id = current_span.span_id.encode("utf8")
capsule = _cyobservability.create_client_call_tracer(
method_name, trace_id, parent_span_id
)
else:
trace_id = span_context_module.generate_trace_id().encode("utf8")
capsule = _cyobservability.create_client_call_tracer(
method_name, trace_id
)
return capsule
def create_server_call_tracer_factory(
self,
) -> ServerCallTracerFactoryCapsule:
capsule = _cyobservability.create_server_call_tracer_factory_capsule()
return capsule
def delete_client_call_tracer(
self, client_call_tracer: ClientCallTracerCapsule
) -> None:
_cyobservability.delete_client_call_tracer(client_call_tracer)
def save_trace_context(
self, trace_id: str, span_id: str, is_sampled: bool
) -> None:
trace_options = trace_options_module.TraceOptions(0)
trace_options.set_enabled(is_sampled)
span_context = span_context_module.SpanContext(
trace_id=trace_id, span_id=span_id, trace_options=trace_options
)
current_tracer = execution_context.get_opencensus_tracer()
current_tracer.span_context = span_context
def record_rpc_latency(
self, method: str, rpc_latency: float, status_code: grpc.StatusCode
) -> None:
status_code = GRPC_STATUS_CODE_TO_STRING.get(status_code, "UNKNOWN")
_cyobservability._record_rpc_latency(
self.exporter, method, rpc_latency, status_code
)
| 8,020
| 36.834906
| 80
|
py
|
grpc
|
grpc-master/src/python/grpcio_observability/grpc_observability/_views.py
|
# Copyright 2023 gRPC authors.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
from typing import Mapping
from grpc_observability import _measures
from grpc_observability._cyobservability import MetricsName
from opencensus.stats import aggregation
from opencensus.stats import view as view_module
from opencensus.tags.tag_key import TagKey
METRICS_NAME_TO_MEASURE = {
MetricsName.CLIENT_STARTED_RPCS: _measures.CLIENT_STARTED_RPCS_MEASURE,
MetricsName.CLIENT_ROUNDTRIP_LATENCY: _measures.CLIENT_ROUNDTRIP_LATENCY_MEASURE,
MetricsName.CLIENT_COMPLETED_RPC: _measures.CLIENT_COMPLETED_RPCS_MEASURE,
MetricsName.CLIENT_API_LATENCY: _measures.CLIENT_API_LATENCY_MEASURE,
MetricsName.CLIENT_SEND_BYTES_PER_RPC: _measures.CLIENT_SEND_BYTES_PER_RPC_MEASURE,
MetricsName.CLIENT_RECEIVED_BYTES_PER_RPC: _measures.CLIENT_RECEIVED_BYTES_PER_RPC_MEASURE,
MetricsName.SERVER_STARTED_RPCS: _measures.SERVER_STARTED_RPCS_MEASURE,
MetricsName.SERVER_SENT_BYTES_PER_RPC: _measures.SERVER_SENT_BYTES_PER_RPC_MEASURE,
MetricsName.SERVER_RECEIVED_BYTES_PER_RPC: _measures.SERVER_RECEIVED_BYTES_PER_RPC_MEASURE,
MetricsName.SERVER_SERVER_LATENCY: _measures.SERVER_SERVER_LATENCY_MEASURE,
MetricsName.SERVER_COMPLETED_RPC: _measures.SERVER_COMPLETED_RPCS_MEASURE,
}
# These measure definitions should be kept in sync across opencensus
# implementations--see
# https://github.com/census-instrumentation/opencensus-java/blob/master/contrib/grpc_metrics/src/main/java/io/opencensus/contrib/grpc/metrics/RpcMeasureConstants.java.
def client_method_tag_key():
return TagKey("grpc_client_method")
def client_status_tag_key():
return TagKey("grpc_client_status")
def server_method_tag_key():
return TagKey("grpc_server_method")
def server_status_tag_key():
return TagKey("server_status_tag_key")
def count_distribution_aggregation() -> aggregation.DistributionAggregation:
exponential_boundaries = _get_exponential_boundaries(17, 1.0, 2.0)
return aggregation.DistributionAggregation(exponential_boundaries)
def bytes_distribution_aggregation() -> aggregation.DistributionAggregation:
return aggregation.DistributionAggregation(
[
1024,
2048,
4096,
16384,
65536,
262144,
1048576,
4194304,
16777216,
67108864,
268435456,
1073741824,
4294967296,
]
)
def millis_distribution_aggregation() -> aggregation.DistributionAggregation:
return aggregation.DistributionAggregation(
[
0.01,
0.05,
0.1,
0.3,
0.6,
0.8,
1,
2,
3,
4,
5,
6,
8,
10,
13,
16,
20,
25,
30,
40,
50,
65,
80,
100,
130,
160,
200,
250,
300,
400,
500,
650,
800,
1000,
2000,
5000,
10000,
20000,
50000,
100000,
]
)
# Client
def client_started_rpcs(labels: Mapping[str, str]) -> view_module.View:
view = view_module.View(
"grpc.io/client/started_rpcs",
"The count of RPCs ever received at the server, including RPCs"
+ " that have not completed.",
[TagKey(key) for key in labels.keys()] + [client_method_tag_key()],
_measures.CLIENT_STARTED_RPCS_MEASURE,
aggregation.CountAggregation(),
)
return view
def client_completed_rpcs(labels: Mapping[str, str]) -> view_module.View:
view = view_module.View(
"grpc.io/client/completed_rpcs",
"The total count of RPCs completed, for example, when a response"
+ " is sent by the server.",
[TagKey(key) for key in labels.keys()]
+ [client_method_tag_key(), client_status_tag_key()],
_measures.CLIENT_COMPLETED_RPCS_MEASURE,
aggregation.CountAggregation(),
)
return view
def client_roundtrip_latency(labels: Mapping[str, str]) -> view_module.View:
view = view_module.View(
"grpc.io/client/roundtrip_latency",
"End-to-end time taken to complete an RPC attempt including the time"
+ " it takes to pick a subchannel.",
[TagKey(key) for key in labels.keys()] + [client_method_tag_key()],
_measures.CLIENT_ROUNDTRIP_LATENCY_MEASURE,
millis_distribution_aggregation(),
)
return view
def client_api_latency(labels: Mapping[str, str]) -> view_module.View:
view = view_module.View(
"grpc.io/client/api_latency",
"The total time taken by the gRPC library to complete an RPC from"
+ " the application's perspective.",
[TagKey(key) for key in labels.keys()]
+ [client_method_tag_key(), client_status_tag_key()],
_measures.CLIENT_API_LATENCY_MEASURE,
millis_distribution_aggregation(),
)
return view
def client_sent_compressed_message_bytes_per_rpc(
labels: Mapping[str, str]
) -> view_module.View:
view = view_module.View(
"grpc.io/client/sent_compressed_message_bytes_per_rpc",
"The total bytes (compressed, not encrypted) sent across all"
+ " request messages per RPC attempt.",
[TagKey(key) for key in labels.keys()]
+ [client_method_tag_key(), client_status_tag_key()],
_measures.CLIENT_SEND_BYTES_PER_RPC_MEASURE,
bytes_distribution_aggregation(),
)
return view
def client_received_compressed_message_bytes_per_rpc(
labels: Mapping[str, str]
) -> view_module.View:
view = view_module.View(
"grpc.io/client/received_compressed_message_bytes_per_rpc",
"The total bytes (compressed, not encrypted) received across"
+ " all response messages per RPC attempt.",
[TagKey(key) for key in labels.keys()]
+ [client_method_tag_key(), client_status_tag_key()],
_measures.CLIENT_RECEIVED_BYTES_PER_RPC_MEASURE,
bytes_distribution_aggregation(),
)
return view
# Server
def server_started_rpcs(labels: Mapping[str, str]) -> view_module.View:
view = view_module.View(
"grpc.io/server/started_rpcs",
"The count of RPCs ever received at the server, including RPCs"
+ " that have not completed.",
[TagKey(key) for key in labels.keys()] + [server_method_tag_key()],
_measures.SERVER_STARTED_RPCS_MEASURE,
aggregation.CountAggregation(),
)
return view
def server_completed_rpcs(labels: Mapping[str, str]) -> view_module.View:
view = view_module.View(
"grpc.io/server/completed_rpcs",
"The total count of RPCs completed, for example, when a response"
+ " is sent by the server.",
[TagKey(key) for key in labels.keys()]
+ [server_method_tag_key(), server_status_tag_key()],
_measures.SERVER_COMPLETED_RPCS_MEASURE,
aggregation.CountAggregation(),
)
return view
def server_sent_compressed_message_bytes_per_rpc(
labels: Mapping[str, str]
) -> view_module.View:
view = view_module.View(
"grpc.io/server/sent_compressed_message_bytes_per_rpc",
"The total bytes (compressed not encrypted) sent across all response"
+ " messages per RPC.",
[TagKey(key) for key in labels.keys()]
+ [server_method_tag_key(), server_status_tag_key()],
_measures.SERVER_SENT_BYTES_PER_RPC_MEASURE,
bytes_distribution_aggregation(),
)
return view
def server_received_compressed_message_bytes_per_rpc(
labels: Mapping[str, str]
) -> view_module.View:
view = view_module.View(
"grpc.io/server/received_compressed_message_bytes_per_rpc",
"The total bytes (compressed not encrypted) received across all"
+ " request messages per RPC.",
[TagKey(key) for key in labels.keys()]
+ [server_method_tag_key(), server_status_tag_key()],
_measures.SERVER_RECEIVED_BYTES_PER_RPC_MEASURE,
bytes_distribution_aggregation(),
)
return view
def server_server_latency(labels: Mapping[str, str]) -> view_module.View:
view = view_module.View(
"grpc.io/server/server_latency",
"The total time taken by an RPC from server transport's"
+ " (HTTP2 / inproc / cronet) perspective.",
[TagKey(key) for key in labels.keys()]
+ [server_method_tag_key(), server_status_tag_key()],
_measures.SERVER_SERVER_LATENCY_MEASURE,
millis_distribution_aggregation(),
)
return view
def _get_exponential_boundaries(
num_finite_buckets: int, scale: float, grrowth_factor: float
) -> list:
boundaries = []
upper_bound = scale
for _ in range(num_finite_buckets):
boundaries.append(upper_bound)
upper_bound *= grrowth_factor
return boundaries
| 9,564
| 32.211806
| 167
|
py
|
grpc
|
grpc-master/src/python/grpcio_observability/grpc_observability/_measures.py
|
# Copyright 2023 gRPC authors.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
from opencensus.stats import measure
# These measure definitions should be kept in sync across opencensus implementations.
# https://github.com/census-instrumentation/opencensus-java/blob/master/contrib/grpc_metrics/src/main/java/io/opencensus/contrib/grpc/metrics/RpcMeasureConstants.java.
# Unit constatns
UNIT_BYTES = "By"
UNIT_MILLISECONDS = "ms"
UNIT_COUNT = "1"
# Client
CLIENT_STARTED_RPCS_MEASURE = measure.MeasureInt(
"grpc.io/client/started_rpcs",
"The total number of client RPCs ever opened, including those that have not been completed.",
UNIT_COUNT,
)
CLIENT_COMPLETED_RPCS_MEASURE = measure.MeasureInt(
"grpc.io/client/completed_rpcs",
"The total number of completed client RPCs",
UNIT_COUNT,
)
CLIENT_ROUNDTRIP_LATENCY_MEASURE = measure.MeasureFloat(
"grpc.io/client/roundtrip_latency",
"Time between first byte of request sent to last byte of response received, or terminal error",
UNIT_MILLISECONDS,
)
CLIENT_API_LATENCY_MEASURE = measure.MeasureInt(
"grpc.io/client/api_latency",
"End-to-end time taken to complete an RPC",
UNIT_MILLISECONDS,
)
CLIENT_SEND_BYTES_PER_RPC_MEASURE = measure.MeasureFloat(
"grpc.io/client/sent_bytes_per_rpc",
"Total bytes sent across all request messages per RPC",
UNIT_BYTES,
)
CLIENT_RECEIVED_BYTES_PER_RPC_MEASURE = measure.MeasureFloat(
"grpc.io/client/received_bytes_per_rpc",
"Total bytes received across all response messages per RPC",
UNIT_BYTES,
)
# Server
SERVER_STARTED_RPCS_MEASURE = measure.MeasureInt(
"grpc.io/server/started_rpcs",
"Total bytes sent across all request messages per RPC",
UNIT_COUNT,
)
SERVER_COMPLETED_RPCS_MEASURE = measure.MeasureInt(
"grpc.io/server/completed_rpcs",
"The total number of completed server RPCs",
UNIT_COUNT,
)
SERVER_SENT_BYTES_PER_RPC_MEASURE = measure.MeasureFloat(
"grpc.io/server/sent_bytes_per_rpc",
"Total bytes sent across all messages per RPC",
UNIT_BYTES,
)
SERVER_RECEIVED_BYTES_PER_RPC_MEASURE = measure.MeasureFloat(
"grpc.io/server/received_bytes_per_rpc",
"Total bytes received across all messages per RPC",
UNIT_BYTES,
)
SERVER_SERVER_LATENCY_MEASURE = measure.MeasureFloat(
"grpc.io/server/server_latency",
"Time between first byte of request received to last byte of response sent, or terminal error",
UNIT_MILLISECONDS,
)
| 2,964
| 31.228261
| 167
|
py
|
grpc
|
grpc-master/src/python/grpcio_observability/grpc_observability/_observability.py
|
# Copyright 2023 gRPC authors.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
from __future__ import annotations
import abc
from dataclasses import dataclass
from dataclasses import field
from typing import List, Mapping, Tuple
class Exporter(metaclass=abc.ABCMeta):
"""Abstract base class for census data exporters."""
@abc.abstractmethod
def export_stats_data(self, stats_data: List[TracingData]) -> None:
"""Exports a list of TracingData objects to the exporter's destination.
Args:
stats_data: A list of TracingData objects to export.
"""
raise NotImplementedError()
@abc.abstractmethod
def export_tracing_data(self, tracing_data: List[StatsData]) -> None:
"""Exports a list of StatsData objects to the exporter's destination.
Args:
tracing_data: A list of StatsData objects to export.
"""
raise NotImplementedError()
@dataclass(frozen=True)
class StatsData:
"""A data class representing stats data.
Attributes:
name: An element of grpc_observability._cyobservability.MetricsName, e.g.
MetricsName.CLIENT_STARTED_RPCS.
measure_double: A bool indicate whether the metric is a floating-point
value.
value_int: The actual metric value if measure_double is False.
value_float: The actual metric value if measure_double is True.
labels: A dictionary that maps label tags associated with this metric to
corresponding label value.
"""
name: "grpc_observability._cyobservability.MetricsName"
measure_double: bool
value_int: int = 0
value_float: float = 0.0
labels: Mapping[str, str] = field(default_factory=dict)
@dataclass(frozen=True)
class TracingData:
"""A data class representing tracing data.
Attributes:
name: The name for tracing data, also the name for the Span.
start_time: The start time for the span in RFC3339 UTC "Zulu" format, e.g.
2014-10-02T15:01:23Z
end_time: The end time for the span in RFC3339 UTC "Zulu" format, e.g.
2014-10-02T15:01:23Z
trace_id: The identifier for the trace associated with this span as a
32-character hexadecimal encoded string,
e.g. 26ed0036f2eff2b7317bccce3e28d01f
span_id: The identifier for the span as a 16-character hexadecimal encoded
string. e.g. 113ec879e62583bc
parent_span_id: An option identifier for the span's parent id.
status: An element of grpc.StatusCode in string format representing the
final status for the trace data.
should_sample: A bool indicates whether the span is sampled.
child_span_count: The number of child span associated with this span.
span_labels: A dictionary that maps labels tags associated with this
span to corresponding label value.
span_annotations: A dictionary that maps annotation timeStamp with
description. The timeStamp have a format which can be converted
to Python datetime.datetime, e.g. 2023-05-29 17:07:09.895
"""
name: str
start_time: str
end_time: str
trace_id: str
span_id: str
parent_span_id: str
status: str
should_sample: bool
child_span_count: int
span_labels: Mapping[str, str] = field(default_factory=dict)
span_annotations: List[Tuple[str, str]] = field(default_factory=list)
| 3,866
| 36.182692
| 80
|
py
|
grpc
|
grpc-master/src/python/grpcio_observability/grpc_observability/_open_census_exporter.py
|
# Copyright 2023 gRPC authors.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
from datetime import datetime
import os
from typing import Any, List, Mapping, Optional, Tuple
from google.rpc import code_pb2
from grpc_observability import _observability # pytype: disable=pyi-error
from grpc_observability import _views
from opencensus.common.transports import async_
from opencensus.ext.stackdriver import stats_exporter
from opencensus.ext.stackdriver import trace_exporter
from opencensus.stats import stats as stats_module
from opencensus.stats.stats_recorder import StatsRecorder
from opencensus.stats.view_manager import ViewManager
from opencensus.tags.tag_key import TagKey
from opencensus.tags.tag_map import TagMap
from opencensus.tags.tag_value import TagValue
from opencensus.trace import execution_context
from opencensus.trace import samplers
from opencensus.trace import span
from opencensus.trace import span_context as span_context_module
from opencensus.trace import span_data as span_data_module
from opencensus.trace import status
from opencensus.trace import time_event
from opencensus.trace import trace_options
from opencensus.trace import tracer
_gcp_observability = Any # grpc_observability.py imports this module.
# 60s is the default time for open census to call export.
CENSUS_UPLOAD_INTERVAL_SECS = int(
os.environ.get("GRPC_PYTHON_CENSUS_EXPORT_UPLOAD_INTERVAL_SECS", 20)
)
class StackDriverAsyncTransport(async_.AsyncTransport):
"""Wrapper class used to pass wait_period.
This is required because current StackDriver Tracing Exporter doesn't allow
us pass wait_period to AsyncTransport directly.
Args:
exporter: An opencensus.trace.base_exporter.Exporter object.
"""
def __init__(self, exporter):
super().__init__(exporter, wait_period=CENSUS_UPLOAD_INTERVAL_SECS)
class OpenCensusExporter(_observability.Exporter):
config: "_gcp_observability.GcpObservabilityPythonConfig"
default_labels: Optional[Mapping[str, str]]
project_id: str
tracer: Optional[tracer.Tracer]
stats_recorder: Optional[StatsRecorder]
view_manager: Optional[ViewManager]
def __init__(
self, config: "_gcp_observability.GcpObservabilityPythonConfig"
):
self.config = config.get()
self.default_labels = self.config.labels
self.project_id = self.config.project_id
self.tracer = None
self.stats_recorder = None
self.view_manager = None
self._setup_open_census_stackdriver_exporter()
def _setup_open_census_stackdriver_exporter(self) -> None:
if self.config.stats_enabled:
stats = stats_module.stats
self.stats_recorder = stats.stats_recorder
self.view_manager = stats.view_manager
# If testing locally please add resource="global" to Options, otherwise
# StackDriver might override project_id based on detected resource.
options = stats_exporter.Options(project_id=self.project_id)
metrics_exporter = stats_exporter.new_stats_exporter(
options, interval=CENSUS_UPLOAD_INTERVAL_SECS
)
self.view_manager.register_exporter(metrics_exporter)
self._register_open_census_views()
if self.config.tracing_enabled:
current_tracer = execution_context.get_opencensus_tracer()
trace_id = current_tracer.span_context.trace_id
span_id = current_tracer.span_context.span_id
if not span_id:
span_id = span_context_module.generate_span_id()
span_context = span_context_module.SpanContext(
trace_id=trace_id, span_id=span_id
)
# Create and Saves Tracer and Sampler to ContextVar
sampler = samplers.ProbabilitySampler(
rate=self.config.sampling_rate
)
self.trace_exporter = trace_exporter.StackdriverExporter(
project_id=self.project_id,
transport=StackDriverAsyncTransport,
)
self.tracer = tracer.Tracer(
sampler=sampler,
span_context=span_context,
exporter=self.trace_exporter,
)
def export_stats_data(
self, stats_data: List[_observability.StatsData]
) -> None:
if not self.config.stats_enabled:
return
for data in stats_data:
measure = _views.METRICS_NAME_TO_MEASURE.get(data.name, None)
if not measure:
continue
# Create a measurement map for each metric, otherwise metrics will
# be override instead of accumulate.
measurement_map = self.stats_recorder.new_measurement_map()
# Add data label to default labels.
labels = data.labels
labels.update(self.default_labels)
tag_map = TagMap()
for key, value in labels.items():
tag_map.insert(TagKey(key), TagValue(value))
if data.measure_double:
measurement_map.measure_float_put(measure, data.value_float)
else:
measurement_map.measure_int_put(measure, data.value_int)
measurement_map.record(tag_map)
def export_tracing_data(
self, tracing_data: List[_observability.TracingData]
) -> None:
if not self.config.tracing_enabled:
return
for span_data in tracing_data:
# Only traced data will be exported, thus TraceOptions=1.
span_context = span_context_module.SpanContext(
trace_id=span_data.trace_id,
span_id=span_data.span_id,
trace_options=trace_options.TraceOptions(1),
)
span_datas = _get_span_data(
span_data, span_context, self.default_labels
)
self.trace_exporter.export(span_datas)
def _register_open_census_views(self) -> None:
# Client
self.view_manager.register_view(
_views.client_started_rpcs(self.default_labels)
)
self.view_manager.register_view(
_views.client_completed_rpcs(self.default_labels)
)
self.view_manager.register_view(
_views.client_roundtrip_latency(self.default_labels)
)
self.view_manager.register_view(
_views.client_api_latency(self.default_labels)
)
self.view_manager.register_view(
_views.client_sent_compressed_message_bytes_per_rpc(
self.default_labels
)
)
self.view_manager.register_view(
_views.client_received_compressed_message_bytes_per_rpc(
self.default_labels
)
)
# Server
self.view_manager.register_view(
_views.server_started_rpcs(self.default_labels)
)
self.view_manager.register_view(
_views.server_completed_rpcs(self.default_labels)
)
self.view_manager.register_view(
_views.server_sent_compressed_message_bytes_per_rpc(
self.default_labels
)
)
self.view_manager.register_view(
_views.server_received_compressed_message_bytes_per_rpc(
self.default_labels
)
)
self.view_manager.register_view(
_views.server_server_latency(self.default_labels)
)
def _get_span_annotations(
span_annotations: List[Tuple[str, str]]
) -> List[time_event.Annotation]:
annotations = []
for time_stamp, description in span_annotations:
time = datetime.fromisoformat(time_stamp)
annotations.append(time_event.Annotation(time, description))
return annotations
# pylint: disable=too-many-return-statements
# pylint: disable=too-many-branches
def _status_to_span_status(span_status: str) -> Optional[status.Status]:
if status == "OK":
return status.Status(code_pb2.OK, message=span_status)
elif status == "CANCELLED":
return status.Status(code_pb2.CANCELLED, message=span_status)
elif status == "UNKNOWN":
return status.Status(code_pb2.UNKNOWN, message=span_status)
elif status == "INVALID_ARGUMENT":
return status.Status(code_pb2.INVALID_ARGUMENT, message=span_status)
elif status == "DEADLINE_EXCEEDED":
return status.Status(code_pb2.DEADLINE_EXCEEDED, message=span_status)
elif status == "NOT_FOUND":
return status.Status(code_pb2.NOT_FOUND, message=span_status)
elif status == "ALREADY_EXISTS":
return status.Status(code_pb2.ALREADY_EXISTS, message=span_status)
elif status == "PERMISSION_DENIED":
return status.Status(code_pb2.PERMISSION_DENIED, message=span_status)
elif status == "UNAUTHENTICATED":
return status.Status(code_pb2.UNAUTHENTICATED, message=span_status)
elif status == "RESOURCE_EXHAUSTED":
return status.Status(code_pb2.RESOURCE_EXHAUSTED, message=span_status)
elif status == "FAILED_PRECONDITION":
return status.Status(code_pb2.FAILED_PRECONDITION, message=span_status)
elif status == "ABORTED":
return status.Status(code_pb2.ABORTED, message=span_status)
elif status == "OUT_OF_RANGE":
return status.Status(code_pb2.OUT_OF_RANGE, message=span_status)
elif status == "UNIMPLEMENTED":
return status.Status(code_pb2.UNIMPLEMENTED, message=span_status)
elif status == "INTERNAL":
return status.Status(code_pb2.INTERNAL, message=span_status)
elif status == "UNAVAILABLE":
return status.Status(code_pb2.UNAVAILABLE, message=span_status)
elif status == "DATA_LOSS":
return status.Status(code_pb2.DATA_LOSS, message=span_status)
else:
return None
def _get_span_data(
span_data: _observability.TracingData,
span_context: span_context_module.SpanContext,
labels: Mapping[str, str],
) -> List[span_data_module.SpanData]:
"""Extracts a list of SpanData tuples from a span.
Args:
span_data: _observability.TracingData to convert.
span_context: The context related to the span_data.
labels: Labels to be added to SpanData.
Returns:
A list of opencensus.trace.span_data.SpanData.
"""
span_attributes = span_data.span_labels
span_attributes.update(labels)
span_status = _status_to_span_status(span_data.status)
span_annotations = _get_span_annotations(span_data.span_annotations)
span_datas = [
span_data_module.SpanData(
name=span_data.name,
context=span_context,
span_id=span_data.span_id,
parent_span_id=span_data.parent_span_id
if span_data.parent_span_id
else None,
attributes=span_attributes,
start_time=span_data.start_time,
end_time=span_data.end_time,
child_span_count=span_data.child_span_count,
stack_trace=None,
annotations=span_annotations,
message_events=None,
links=None,
status=span_status,
same_process_as_parent_span=True
if span_data.parent_span_id
else None,
span_kind=span.SpanKind.UNSPECIFIED,
)
]
return span_datas
| 11,846
| 37.842623
| 83
|
py
|
grpc
|
grpc-master/src/python/grpcio_observability/grpc_observability/__init__.py
|
# Copyright 2023 gRPC authors.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
from grpc_observability._gcp_observability import GCPOpenCensusObservability
__all__ = ("GCPOpenCensusObservability",)
| 698
| 37.833333
| 76
|
py
|
grpc
|
grpc-master/src/python/grpcio_testing/testing_commands.py
|
# Copyright 2018 gRPC Authors.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Provides distutils command classes for the GRPC Python setup process."""
import os
import shutil
import setuptools
ROOT_DIR = os.path.abspath(os.path.dirname(os.path.abspath(__file__)))
LICENSE = os.path.join(ROOT_DIR, "../../../LICENSE")
class Preprocess(setuptools.Command):
"""Command to copy LICENSE from root directory."""
description = ""
user_options = []
def initialize_options(self):
pass
def finalize_options(self):
pass
def run(self):
if os.path.isfile(LICENSE):
shutil.copyfile(LICENSE, os.path.join(ROOT_DIR, "LICENSE"))
| 1,183
| 28.6
| 75
|
py
|
grpc
|
grpc-master/src/python/grpcio_testing/setup.py
|
# Copyright 2017 gRPC authors.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Setup module for gRPC Python's testing package."""
import os
import sys
import setuptools
_PACKAGE_PATH = os.path.realpath(os.path.dirname(__file__))
_README_PATH = os.path.join(_PACKAGE_PATH, "README.rst")
# Ensure we're in the proper directory whether or not we're being used by pip.
os.chdir(os.path.dirname(os.path.abspath(__file__)))
# Break import style to ensure that we can find same-directory modules.
import grpc_version
class _NoOpCommand(setuptools.Command):
"""No-op command."""
description = ""
user_options = []
def initialize_options(self):
pass
def finalize_options(self):
pass
def run(self):
pass
PACKAGE_DIRECTORIES = {
"": ".",
}
INSTALL_REQUIRES = (
"protobuf>=4.21.6",
"grpcio>={version}".format(version=grpc_version.VERSION),
)
try:
import testing_commands as _testing_commands
# we are in the build environment, otherwise the above import fails
COMMAND_CLASS = {
# Run preprocess from the repository *before* doing any packaging!
"preprocess": _testing_commands.Preprocess,
}
except ImportError:
COMMAND_CLASS = {
# wire up commands to no-op not to break the external dependencies
"preprocess": _NoOpCommand,
}
setuptools.setup(
name="grpcio-testing",
version=grpc_version.VERSION,
license="Apache License 2.0",
description="Testing utilities for gRPC Python",
long_description=open(_README_PATH, "r").read(),
author="The gRPC Authors",
author_email="grpc-io@googlegroups.com",
url="https://grpc.io",
package_dir=PACKAGE_DIRECTORIES,
packages=setuptools.find_packages("."),
install_requires=INSTALL_REQUIRES,
cmdclass=COMMAND_CLASS,
)
| 2,320
| 26.630952
| 78
|
py
|
grpc
|
grpc-master/src/python/grpcio_testing/grpc_version.py
|
# Copyright 2017 gRPC authors.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# AUTO-GENERATED FROM `$REPO_ROOT/templates/src/python/grpcio_testing/grpc_version.py.template`!!!
VERSION = '1.57.0.dev0'
| 702
| 38.055556
| 98
|
py
|
grpc
|
grpc-master/src/python/grpcio_testing/grpc_testing/_time.py
|
# Copyright 2017 gRPC authors.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Test times."""
import collections
import logging
import threading
import time as _time
import grpc
import grpc_testing
logging.basicConfig()
_LOGGER = logging.getLogger(__name__)
def _call(behaviors):
for behavior in behaviors:
try:
behavior()
except Exception: # pylint: disable=broad-except
_LOGGER.exception('Exception calling behavior "%r"!', behavior)
def _call_in_thread(behaviors):
calling = threading.Thread(target=_call, args=(behaviors,))
calling.start()
# NOTE(nathaniel): Because this function is called from "strict" Time
# implementations, it blocks until after all behaviors have terminated.
calling.join()
class _State(object):
def __init__(self):
self.condition = threading.Condition()
self.times_to_behaviors = collections.defaultdict(list)
class _Delta(
collections.namedtuple(
"_Delta",
(
"mature_behaviors",
"earliest_mature_time",
"earliest_immature_time",
),
)
):
pass
def _process(state, now):
mature_behaviors = []
earliest_mature_time = None
while state.times_to_behaviors:
earliest_time = min(state.times_to_behaviors)
if earliest_time <= now:
if earliest_mature_time is None:
earliest_mature_time = earliest_time
earliest_mature_behaviors = state.times_to_behaviors.pop(
earliest_time
)
mature_behaviors.extend(earliest_mature_behaviors)
else:
earliest_immature_time = earliest_time
break
else:
earliest_immature_time = None
return _Delta(
mature_behaviors, earliest_mature_time, earliest_immature_time
)
class _Future(grpc.Future):
def __init__(self, state, behavior, time):
self._state = state
self._behavior = behavior
self._time = time
self._cancelled = False
def cancel(self):
with self._state.condition:
if self._cancelled:
return True
else:
behaviors_at_time = self._state.times_to_behaviors.get(
self._time
)
if behaviors_at_time is None:
return False
else:
behaviors_at_time.remove(self._behavior)
if not behaviors_at_time:
self._state.times_to_behaviors.pop(self._time)
self._state.condition.notify_all()
self._cancelled = True
return True
def cancelled(self):
with self._state.condition:
return self._cancelled
def running(self):
raise NotImplementedError()
def done(self):
raise NotImplementedError()
def result(self, timeout=None):
raise NotImplementedError()
def exception(self, timeout=None):
raise NotImplementedError()
def traceback(self, timeout=None):
raise NotImplementedError()
def add_done_callback(self, fn):
raise NotImplementedError()
class StrictRealTime(grpc_testing.Time):
def __init__(self):
self._state = _State()
self._active = False
self._calling = None
def _activity(self):
while True:
with self._state.condition:
while True:
now = _time.time()
delta = _process(self._state, now)
self._state.condition.notify_all()
if delta.mature_behaviors:
self._calling = delta.earliest_mature_time
break
self._calling = None
if delta.earliest_immature_time is None:
self._active = False
return
else:
timeout = max(0, delta.earliest_immature_time - now)
self._state.condition.wait(timeout=timeout)
_call(delta.mature_behaviors)
def _ensure_called_through(self, time):
with self._state.condition:
while (
self._state.times_to_behaviors
and min(self._state.times_to_behaviors) < time
) or (self._calling is not None and self._calling < time):
self._state.condition.wait()
def _call_at(self, behavior, time):
with self._state.condition:
self._state.times_to_behaviors[time].append(behavior)
if self._active:
self._state.condition.notify_all()
else:
activity = threading.Thread(target=self._activity)
activity.start()
self._active = True
return _Future(self._state, behavior, time)
def time(self):
return _time.time()
def call_in(self, behavior, delay):
return self._call_at(behavior, _time.time() + delay)
def call_at(self, behavior, time):
return self._call_at(behavior, time)
def sleep_for(self, duration):
time = _time.time() + duration
_time.sleep(duration)
self._ensure_called_through(time)
def sleep_until(self, time):
_time.sleep(max(0, time - _time.time()))
self._ensure_called_through(time)
class StrictFakeTime(grpc_testing.Time):
def __init__(self, time):
self._state = _State()
self._time = time
def time(self):
return self._time
def call_in(self, behavior, delay):
if delay <= 0:
_call_in_thread((behavior,))
else:
with self._state.condition:
time = self._time + delay
self._state.times_to_behaviors[time].append(behavior)
return _Future(self._state, behavior, time)
def call_at(self, behavior, time):
with self._state.condition:
if time <= self._time:
_call_in_thread((behavior,))
else:
self._state.times_to_behaviors[time].append(behavior)
return _Future(self._state, behavior, time)
def sleep_for(self, duration):
if 0 < duration:
with self._state.condition:
self._time += duration
delta = _process(self._state, self._time)
_call_in_thread(delta.mature_behaviors)
def sleep_until(self, time):
with self._state.condition:
if self._time < time:
self._time = time
delta = _process(self._state, self._time)
_call_in_thread(delta.mature_behaviors)
| 7,256
| 30.012821
| 76
|
py
|
grpc
|
grpc-master/src/python/grpcio_testing/grpc_testing/__init__.py
|
# Copyright 2017 gRPC authors.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Objects for use in testing gRPC Python-using application code."""
import abc
from google.protobuf import descriptor
import grpc
class UnaryUnaryChannelRpc(abc.ABC):
"""Fixture for a unary-unary RPC invoked by a system under test.
Enables users to "play server" for the RPC.
"""
@abc.abstractmethod
def send_initial_metadata(self, initial_metadata):
"""Sends the RPC's initial metadata to the system under test.
Args:
initial_metadata: The RPC's initial metadata to be "sent" to
the system under test.
"""
raise NotImplementedError()
@abc.abstractmethod
def cancelled(self):
"""Blocks until the system under test has cancelled the RPC."""
raise NotImplementedError()
@abc.abstractmethod
def terminate(self, response, trailing_metadata, code, details):
"""Terminates the RPC.
Args:
response: The response for the RPC.
trailing_metadata: The RPC's trailing metadata.
code: The RPC's status code.
details: The RPC's status details.
"""
raise NotImplementedError()
class UnaryStreamChannelRpc(abc.ABC):
"""Fixture for a unary-stream RPC invoked by a system under test.
Enables users to "play server" for the RPC.
"""
@abc.abstractmethod
def send_initial_metadata(self, initial_metadata):
"""Sends the RPC's initial metadata to the system under test.
Args:
initial_metadata: The RPC's initial metadata to be "sent" to
the system under test.
"""
raise NotImplementedError()
@abc.abstractmethod
def send_response(self, response):
"""Sends a response to the system under test.
Args:
response: A response message to be "sent" to the system under test.
"""
raise NotImplementedError()
@abc.abstractmethod
def cancelled(self):
"""Blocks until the system under test has cancelled the RPC."""
raise NotImplementedError()
@abc.abstractmethod
def terminate(self, trailing_metadata, code, details):
"""Terminates the RPC.
Args:
trailing_metadata: The RPC's trailing metadata.
code: The RPC's status code.
details: The RPC's status details.
"""
raise NotImplementedError()
class StreamUnaryChannelRpc(abc.ABC):
"""Fixture for a stream-unary RPC invoked by a system under test.
Enables users to "play server" for the RPC.
"""
@abc.abstractmethod
def send_initial_metadata(self, initial_metadata):
"""Sends the RPC's initial metadata to the system under test.
Args:
initial_metadata: The RPC's initial metadata to be "sent" to
the system under test.
"""
raise NotImplementedError()
@abc.abstractmethod
def take_request(self):
"""Draws one of the requests added to the RPC by the system under test.
This method blocks until the system under test has added to the RPC
the request to be returned.
Successive calls to this method return requests in the same order in
which the system under test added them to the RPC.
Returns:
A request message added to the RPC by the system under test.
"""
raise NotImplementedError()
@abc.abstractmethod
def requests_closed(self):
"""Blocks until the system under test has closed the request stream."""
raise NotImplementedError()
@abc.abstractmethod
def cancelled(self):
"""Blocks until the system under test has cancelled the RPC."""
raise NotImplementedError()
@abc.abstractmethod
def terminate(self, response, trailing_metadata, code, details):
"""Terminates the RPC.
Args:
response: The response for the RPC.
trailing_metadata: The RPC's trailing metadata.
code: The RPC's status code.
details: The RPC's status details.
"""
raise NotImplementedError()
class StreamStreamChannelRpc(abc.ABC):
"""Fixture for a stream-stream RPC invoked by a system under test.
Enables users to "play server" for the RPC.
"""
@abc.abstractmethod
def send_initial_metadata(self, initial_metadata):
"""Sends the RPC's initial metadata to the system under test.
Args:
initial_metadata: The RPC's initial metadata to be "sent" to the
system under test.
"""
raise NotImplementedError()
@abc.abstractmethod
def take_request(self):
"""Draws one of the requests added to the RPC by the system under test.
This method blocks until the system under test has added to the RPC
the request to be returned.
Successive calls to this method return requests in the same order in
which the system under test added them to the RPC.
Returns:
A request message added to the RPC by the system under test.
"""
raise NotImplementedError()
@abc.abstractmethod
def send_response(self, response):
"""Sends a response to the system under test.
Args:
response: A response messages to be "sent" to the system under test.
"""
raise NotImplementedError()
@abc.abstractmethod
def requests_closed(self):
"""Blocks until the system under test has closed the request stream."""
raise NotImplementedError()
@abc.abstractmethod
def cancelled(self):
"""Blocks until the system under test has cancelled the RPC."""
raise NotImplementedError()
@abc.abstractmethod
def terminate(self, trailing_metadata, code, details):
"""Terminates the RPC.
Args:
trailing_metadata: The RPC's trailing metadata.
code: The RPC's status code.
details: The RPC's status details.
"""
raise NotImplementedError()
class Channel(grpc.Channel, metaclass=abc.ABCMeta):
"""A grpc.Channel double with which to test a system that invokes RPCs."""
@abc.abstractmethod
def take_unary_unary(self, method_descriptor):
"""Draws an RPC currently being made by the system under test.
If the given descriptor does not identify any RPC currently being made
by the system under test, this method blocks until the system under
test invokes such an RPC.
Args:
method_descriptor: A descriptor.MethodDescriptor describing a
unary-unary RPC method.
Returns:
A (invocation_metadata, request, unary_unary_channel_rpc) tuple of
the RPC's invocation metadata, its request, and a
UnaryUnaryChannelRpc with which to "play server" for the RPC.
"""
raise NotImplementedError()
@abc.abstractmethod
def take_unary_stream(self, method_descriptor):
"""Draws an RPC currently being made by the system under test.
If the given descriptor does not identify any RPC currently being made
by the system under test, this method blocks until the system under
test invokes such an RPC.
Args:
method_descriptor: A descriptor.MethodDescriptor describing a
unary-stream RPC method.
Returns:
A (invocation_metadata, request, unary_stream_channel_rpc) tuple of
the RPC's invocation metadata, its request, and a
UnaryStreamChannelRpc with which to "play server" for the RPC.
"""
raise NotImplementedError()
@abc.abstractmethod
def take_stream_unary(self, method_descriptor):
"""Draws an RPC currently being made by the system under test.
If the given descriptor does not identify any RPC currently being made
by the system under test, this method blocks until the system under
test invokes such an RPC.
Args:
method_descriptor: A descriptor.MethodDescriptor describing a
stream-unary RPC method.
Returns:
A (invocation_metadata, stream_unary_channel_rpc) tuple of the RPC's
invocation metadata and a StreamUnaryChannelRpc with which to "play
server" for the RPC.
"""
raise NotImplementedError()
@abc.abstractmethod
def take_stream_stream(self, method_descriptor):
"""Draws an RPC currently being made by the system under test.
If the given descriptor does not identify any RPC currently being made
by the system under test, this method blocks until the system under
test invokes such an RPC.
Args:
method_descriptor: A descriptor.MethodDescriptor describing a
stream-stream RPC method.
Returns:
A (invocation_metadata, stream_stream_channel_rpc) tuple of the RPC's
invocation metadata and a StreamStreamChannelRpc with which to
"play server" for the RPC.
"""
raise NotImplementedError()
class UnaryUnaryServerRpc(abc.ABC):
"""Fixture for a unary-unary RPC serviced by a system under test.
Enables users to "play client" for the RPC.
"""
@abc.abstractmethod
def initial_metadata(self):
"""Accesses the initial metadata emitted by the system under test.
This method blocks until the system under test has added initial
metadata to the RPC (or has provided one or more response messages or
has terminated the RPC, either of which will cause gRPC Python to
synthesize initial metadata for the RPC).
Returns:
The initial metadata for the RPC.
"""
raise NotImplementedError()
@abc.abstractmethod
def cancel(self):
"""Cancels the RPC."""
raise NotImplementedError()
@abc.abstractmethod
def termination(self):
"""Blocks until the system under test has terminated the RPC.
Returns:
A (response, trailing_metadata, code, details) sequence with the RPC's
response, trailing metadata, code, and details.
"""
raise NotImplementedError()
class UnaryStreamServerRpc(abc.ABC):
"""Fixture for a unary-stream RPC serviced by a system under test.
Enables users to "play client" for the RPC.
"""
@abc.abstractmethod
def initial_metadata(self):
"""Accesses the initial metadata emitted by the system under test.
This method blocks until the system under test has added initial
metadata to the RPC (or has provided one or more response messages or
has terminated the RPC, either of which will cause gRPC Python to
synthesize initial metadata for the RPC).
Returns:
The initial metadata for the RPC.
"""
raise NotImplementedError()
@abc.abstractmethod
def take_response(self):
"""Draws one of the responses added to the RPC by the system under test.
Successive calls to this method return responses in the same order in
which the system under test added them to the RPC.
Returns:
A response message added to the RPC by the system under test.
"""
raise NotImplementedError()
@abc.abstractmethod
def cancel(self):
"""Cancels the RPC."""
raise NotImplementedError()
@abc.abstractmethod
def termination(self):
"""Blocks until the system under test has terminated the RPC.
Returns:
A (trailing_metadata, code, details) sequence with the RPC's trailing
metadata, code, and details.
"""
raise NotImplementedError()
class StreamUnaryServerRpc(abc.ABC):
"""Fixture for a stream-unary RPC serviced by a system under test.
Enables users to "play client" for the RPC.
"""
@abc.abstractmethod
def initial_metadata(self):
"""Accesses the initial metadata emitted by the system under test.
This method blocks until the system under test has added initial
metadata to the RPC (or has provided one or more response messages or
has terminated the RPC, either of which will cause gRPC Python to
synthesize initial metadata for the RPC).
Returns:
The initial metadata for the RPC.
"""
raise NotImplementedError()
@abc.abstractmethod
def send_request(self, request):
"""Sends a request to the system under test.
Args:
request: A request message for the RPC to be "sent" to the system
under test.
"""
raise NotImplementedError()
@abc.abstractmethod
def requests_closed(self):
"""Indicates the end of the RPC's request stream."""
raise NotImplementedError()
@abc.abstractmethod
def cancel(self):
"""Cancels the RPC."""
raise NotImplementedError()
@abc.abstractmethod
def termination(self):
"""Blocks until the system under test has terminated the RPC.
Returns:
A (response, trailing_metadata, code, details) sequence with the RPC's
response, trailing metadata, code, and details.
"""
raise NotImplementedError()
class StreamStreamServerRpc(abc.ABC):
"""Fixture for a stream-stream RPC serviced by a system under test.
Enables users to "play client" for the RPC.
"""
@abc.abstractmethod
def initial_metadata(self):
"""Accesses the initial metadata emitted by the system under test.
This method blocks until the system under test has added initial
metadata to the RPC (or has provided one or more response messages or
has terminated the RPC, either of which will cause gRPC Python to
synthesize initial metadata for the RPC).
Returns:
The initial metadata for the RPC.
"""
raise NotImplementedError()
@abc.abstractmethod
def send_request(self, request):
"""Sends a request to the system under test.
Args:
request: A request message for the RPC to be "sent" to the system
under test.
"""
raise NotImplementedError()
@abc.abstractmethod
def requests_closed(self):
"""Indicates the end of the RPC's request stream."""
raise NotImplementedError()
@abc.abstractmethod
def take_response(self):
"""Draws one of the responses added to the RPC by the system under test.
Successive calls to this method return responses in the same order in
which the system under test added them to the RPC.
Returns:
A response message added to the RPC by the system under test.
"""
raise NotImplementedError()
@abc.abstractmethod
def cancel(self):
"""Cancels the RPC."""
raise NotImplementedError()
@abc.abstractmethod
def termination(self):
"""Blocks until the system under test has terminated the RPC.
Returns:
A (trailing_metadata, code, details) sequence with the RPC's trailing
metadata, code, and details.
"""
raise NotImplementedError()
class Server(abc.ABC):
"""A server with which to test a system that services RPCs."""
@abc.abstractmethod
def invoke_unary_unary(
self, method_descriptor, invocation_metadata, request, timeout
):
"""Invokes an RPC to be serviced by the system under test.
Args:
method_descriptor: A descriptor.MethodDescriptor describing a unary-unary
RPC method.
invocation_metadata: The RPC's invocation metadata.
request: The RPC's request.
timeout: A duration of time in seconds for the RPC or None to
indicate that the RPC has no time limit.
Returns:
A UnaryUnaryServerRpc with which to "play client" for the RPC.
"""
raise NotImplementedError()
@abc.abstractmethod
def invoke_unary_stream(
self, method_descriptor, invocation_metadata, request, timeout
):
"""Invokes an RPC to be serviced by the system under test.
Args:
method_descriptor: A descriptor.MethodDescriptor describing a unary-stream
RPC method.
invocation_metadata: The RPC's invocation metadata.
request: The RPC's request.
timeout: A duration of time in seconds for the RPC or None to
indicate that the RPC has no time limit.
Returns:
A UnaryStreamServerRpc with which to "play client" for the RPC.
"""
raise NotImplementedError()
@abc.abstractmethod
def invoke_stream_unary(
self, method_descriptor, invocation_metadata, timeout
):
"""Invokes an RPC to be serviced by the system under test.
Args:
method_descriptor: A descriptor.MethodDescriptor describing a stream-unary
RPC method.
invocation_metadata: The RPC's invocation metadata.
timeout: A duration of time in seconds for the RPC or None to
indicate that the RPC has no time limit.
Returns:
A StreamUnaryServerRpc with which to "play client" for the RPC.
"""
raise NotImplementedError()
@abc.abstractmethod
def invoke_stream_stream(
self, method_descriptor, invocation_metadata, timeout
):
"""Invokes an RPC to be serviced by the system under test.
Args:
method_descriptor: A descriptor.MethodDescriptor describing a stream-stream
RPC method.
invocation_metadata: The RPC's invocation metadata.
timeout: A duration of time in seconds for the RPC or None to
indicate that the RPC has no time limit.
Returns:
A StreamStreamServerRpc with which to "play client" for the RPC.
"""
raise NotImplementedError()
class Time(abc.ABC):
"""A simulation of time.
Implementations needn't be connected with real time as provided by the
Python interpreter, but as long as systems under test use
RpcContext.is_active and RpcContext.time_remaining for querying RPC liveness
implementations may be used to change passage of time in tests.
"""
@abc.abstractmethod
def time(self):
"""Accesses the current test time.
Returns:
The current test time (over which this object has authority).
"""
raise NotImplementedError()
@abc.abstractmethod
def call_in(self, behavior, delay):
"""Adds a behavior to be called after some time.
Args:
behavior: A behavior to be called with no arguments.
delay: A duration of time in seconds after which to call the behavior.
Returns:
A grpc.Future with which the call of the behavior may be cancelled
before it is executed.
"""
raise NotImplementedError()
@abc.abstractmethod
def call_at(self, behavior, time):
"""Adds a behavior to be called at a specific time.
Args:
behavior: A behavior to be called with no arguments.
time: The test time at which to call the behavior.
Returns:
A grpc.Future with which the call of the behavior may be cancelled
before it is executed.
"""
raise NotImplementedError()
@abc.abstractmethod
def sleep_for(self, duration):
"""Blocks for some length of test time.
Args:
duration: A duration of test time in seconds for which to block.
"""
raise NotImplementedError()
@abc.abstractmethod
def sleep_until(self, time):
"""Blocks until some test time.
Args:
time: The test time until which to block.
"""
raise NotImplementedError()
def strict_real_time():
"""Creates a Time backed by the Python interpreter's time.
The returned instance will be "strict" with respect to callbacks
submitted to it: it will ensure that all callbacks registered to
be called at time t have been called before it describes the time
as having advanced beyond t.
Returns:
A Time backed by the "system" (Python interpreter's) time.
"""
from grpc_testing import _time
return _time.StrictRealTime()
def strict_fake_time(now):
"""Creates a Time that can be manipulated by test code.
The returned instance maintains an internal representation of time
independent of real time. This internal representation only advances
when user code calls the instance's sleep_for and sleep_until methods.
The returned instance will be "strict" with respect to callbacks
submitted to it: it will ensure that all callbacks registered to
be called at time t have been called before it describes the time
as having advanced beyond t.
Returns:
A Time that simulates the passage of time.
"""
from grpc_testing import _time
return _time.StrictFakeTime(now)
def channel(service_descriptors, time):
"""Creates a Channel for use in tests of a gRPC Python-using system.
Args:
service_descriptors: An iterable of descriptor.ServiceDescriptors
describing the RPCs that will be made on the returned Channel by the
system under test.
time: A Time to be used for tests.
Returns:
A Channel for use in tests.
"""
from grpc_testing import _channel
return _channel.testing_channel(service_descriptors, time)
def server_from_dictionary(descriptors_to_servicers, time):
"""Creates a Server for use in tests of a gRPC Python-using system.
Args:
descriptors_to_servicers: A dictionary from descriptor.ServiceDescriptors
defining RPC services to servicer objects (usually instances of classes
that implement "Servicer" interfaces defined in generated "_pb2_grpc"
modules) implementing those services.
time: A Time to be used for tests.
Returns:
A Server for use in tests.
"""
from grpc_testing import _server
return _server.server_from_dictionary(descriptors_to_servicers, time)
| 22,799
| 31.386364
| 85
|
py
|
grpc
|
grpc-master/src/python/grpcio_testing/grpc_testing/_common.py
|
# Copyright 2017 gRPC authors.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Common interfaces and implementation."""
import abc
import collections
def _fuss(tuplified_metadata):
return tuplified_metadata + (
(
"grpc.metadata_added_by_runtime",
"gRPC is allowed to add metadata in transmission and does so.",
),
)
FUSSED_EMPTY_METADATA = _fuss(())
def fuss_with_metadata(metadata):
if metadata is None:
return FUSSED_EMPTY_METADATA
else:
return _fuss(tuple(metadata))
def rpc_names(service_descriptors):
rpc_names_to_descriptors = {}
for service_descriptor in service_descriptors:
for method_descriptor in service_descriptor.methods_by_name.values():
rpc_name = "/{}/{}".format(
service_descriptor.full_name, method_descriptor.name
)
rpc_names_to_descriptors[rpc_name] = method_descriptor
return rpc_names_to_descriptors
class ChannelRpcRead(
collections.namedtuple(
"ChannelRpcRead",
(
"response",
"trailing_metadata",
"code",
"details",
),
)
):
pass
class ChannelRpcHandler(abc.ABC):
@abc.abstractmethod
def initial_metadata(self):
raise NotImplementedError()
@abc.abstractmethod
def add_request(self, request):
raise NotImplementedError()
@abc.abstractmethod
def close_requests(self):
raise NotImplementedError()
@abc.abstractmethod
def take_response(self):
raise NotImplementedError()
@abc.abstractmethod
def cancel(self, code, details):
raise NotImplementedError()
@abc.abstractmethod
def termination(self):
raise NotImplementedError()
@abc.abstractmethod
def is_active(self):
raise NotImplementedError()
@abc.abstractmethod
def time_remaining(self):
raise NotImplementedError()
@abc.abstractmethod
def add_callback(self, callback):
raise NotImplementedError()
class ChannelHandler(abc.ABC):
@abc.abstractmethod
def invoke_rpc(
self,
method_full_rpc_name,
invocation_metadata,
requests,
requests_closed,
timeout,
):
raise NotImplementedError()
class ServerRpcRead(
collections.namedtuple(
"ServerRpcRead",
(
"request",
"requests_closed",
"terminated",
),
)
):
pass
REQUESTS_CLOSED = ServerRpcRead(None, True, False)
TERMINATED = ServerRpcRead(None, False, True)
class ServerRpcHandler(abc.ABC):
@abc.abstractmethod
def send_initial_metadata(self, initial_metadata):
raise NotImplementedError()
@abc.abstractmethod
def take_request(self):
raise NotImplementedError()
@abc.abstractmethod
def add_response(self, response):
raise NotImplementedError()
@abc.abstractmethod
def send_termination(self, trailing_metadata, code, details):
raise NotImplementedError()
@abc.abstractmethod
def add_termination_callback(self, callback):
raise NotImplementedError()
class Serverish(abc.ABC):
@abc.abstractmethod
def invoke_unary_unary(
self, method_descriptor, handler, invocation_metadata, request, deadline
):
raise NotImplementedError()
@abc.abstractmethod
def invoke_unary_stream(
self, method_descriptor, handler, invocation_metadata, request, deadline
):
raise NotImplementedError()
@abc.abstractmethod
def invoke_stream_unary(
self, method_descriptor, handler, invocation_metadata, deadline
):
raise NotImplementedError()
@abc.abstractmethod
def invoke_stream_stream(
self, method_descriptor, handler, invocation_metadata, deadline
):
raise NotImplementedError()
| 4,406
| 23.758427
| 80
|
py
|
grpc
|
grpc-master/src/python/grpcio_testing/grpc_testing/_channel/_channel_rpc.py
|
# Copyright 2017 gRPC authors.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import grpc_testing
class _UnaryUnary(grpc_testing.UnaryUnaryChannelRpc):
def __init__(self, rpc_state):
self._rpc_state = rpc_state
def send_initial_metadata(self, initial_metadata):
self._rpc_state.send_initial_metadata(initial_metadata)
def cancelled(self):
self._rpc_state.cancelled()
def terminate(self, response, trailing_metadata, code, details):
self._rpc_state.terminate_with_response(
response, trailing_metadata, code, details
)
class _UnaryStream(grpc_testing.UnaryStreamChannelRpc):
def __init__(self, rpc_state):
self._rpc_state = rpc_state
def send_initial_metadata(self, initial_metadata):
self._rpc_state.send_initial_metadata(initial_metadata)
def send_response(self, response):
self._rpc_state.send_response(response)
def cancelled(self):
self._rpc_state.cancelled()
def terminate(self, trailing_metadata, code, details):
self._rpc_state.terminate(trailing_metadata, code, details)
class _StreamUnary(grpc_testing.StreamUnaryChannelRpc):
def __init__(self, rpc_state):
self._rpc_state = rpc_state
def send_initial_metadata(self, initial_metadata):
self._rpc_state.send_initial_metadata(initial_metadata)
def take_request(self):
return self._rpc_state.take_request()
def requests_closed(self):
return self._rpc_state.requests_closed()
def cancelled(self):
self._rpc_state.cancelled()
def terminate(self, response, trailing_metadata, code, details):
self._rpc_state.terminate_with_response(
response, trailing_metadata, code, details
)
class _StreamStream(grpc_testing.StreamStreamChannelRpc):
def __init__(self, rpc_state):
self._rpc_state = rpc_state
def send_initial_metadata(self, initial_metadata):
self._rpc_state.send_initial_metadata(initial_metadata)
def take_request(self):
return self._rpc_state.take_request()
def send_response(self, response):
self._rpc_state.send_response(response)
def requests_closed(self):
return self._rpc_state.requests_closed()
def cancelled(self):
self._rpc_state.cancelled()
def terminate(self, trailing_metadata, code, details):
self._rpc_state.terminate(trailing_metadata, code, details)
def unary_unary(channel_state, method_descriptor):
rpc_state = channel_state.take_rpc_state(method_descriptor)
(
invocation_metadata,
request,
) = rpc_state.take_invocation_metadata_and_request()
return invocation_metadata, request, _UnaryUnary(rpc_state)
def unary_stream(channel_state, method_descriptor):
rpc_state = channel_state.take_rpc_state(method_descriptor)
(
invocation_metadata,
request,
) = rpc_state.take_invocation_metadata_and_request()
return invocation_metadata, request, _UnaryStream(rpc_state)
def stream_unary(channel_state, method_descriptor):
rpc_state = channel_state.take_rpc_state(method_descriptor)
return rpc_state.take_invocation_metadata(), _StreamUnary(rpc_state)
def stream_stream(channel_state, method_descriptor):
rpc_state = channel_state.take_rpc_state(method_descriptor)
return rpc_state.take_invocation_metadata(), _StreamStream(rpc_state)
| 3,920
| 31.139344
| 74
|
py
|
grpc
|
grpc-master/src/python/grpcio_testing/grpc_testing/_channel/_channel_state.py
|
# Copyright 2017 gRPC authors.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import collections
import threading
from grpc_testing import _common
from grpc_testing._channel import _rpc_state
class State(_common.ChannelHandler):
def __init__(self):
self._condition = threading.Condition()
self._rpc_states = collections.defaultdict(list)
def invoke_rpc(
self,
method_full_rpc_name,
invocation_metadata,
requests,
requests_closed,
timeout,
):
rpc_state = _rpc_state.State(
invocation_metadata, requests, requests_closed
)
with self._condition:
self._rpc_states[method_full_rpc_name].append(rpc_state)
self._condition.notify_all()
return rpc_state
def take_rpc_state(self, method_descriptor):
method_full_rpc_name = "/{}/{}".format(
method_descriptor.containing_service.full_name,
method_descriptor.name,
)
with self._condition:
while True:
method_rpc_states = self._rpc_states[method_full_rpc_name]
if method_rpc_states:
return method_rpc_states.pop(0)
else:
self._condition.wait()
| 1,782
| 31.418182
| 74
|
py
|
grpc
|
grpc-master/src/python/grpcio_testing/grpc_testing/_channel/_rpc_state.py
|
# Copyright 2017 gRPC authors.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import threading
import grpc
from grpc_testing import _common
class State(_common.ChannelRpcHandler):
def __init__(self, invocation_metadata, requests, requests_closed):
self._condition = threading.Condition()
self._invocation_metadata = invocation_metadata
self._requests = requests
self._requests_closed = requests_closed
self._initial_metadata = None
self._responses = []
self._trailing_metadata = None
self._code = None
self._details = None
def initial_metadata(self):
with self._condition:
while True:
if self._initial_metadata is None:
if self._code is None:
self._condition.wait()
else:
return _common.FUSSED_EMPTY_METADATA
else:
return self._initial_metadata
def add_request(self, request):
with self._condition:
if self._code is None and not self._requests_closed:
self._requests.append(request)
self._condition.notify_all()
return True
else:
return False
def close_requests(self):
with self._condition:
if self._code is None and not self._requests_closed:
self._requests_closed = True
self._condition.notify_all()
def take_response(self):
with self._condition:
while True:
if self._code is grpc.StatusCode.OK:
if self._responses:
response = self._responses.pop(0)
return _common.ChannelRpcRead(
response, None, None, None
)
else:
return _common.ChannelRpcRead(
None,
self._trailing_metadata,
grpc.StatusCode.OK,
self._details,
)
elif self._code is None:
if self._responses:
response = self._responses.pop(0)
return _common.ChannelRpcRead(
response, None, None, None
)
else:
self._condition.wait()
else:
return _common.ChannelRpcRead(
None, self._trailing_metadata, self._code, self._details
)
def termination(self):
with self._condition:
while True:
if self._code is None:
self._condition.wait()
else:
return self._trailing_metadata, self._code, self._details
def cancel(self, code, details):
with self._condition:
if self._code is None:
if self._initial_metadata is None:
self._initial_metadata = _common.FUSSED_EMPTY_METADATA
self._trailing_metadata = _common.FUSSED_EMPTY_METADATA
self._code = code
self._details = details
self._condition.notify_all()
return True
else:
return False
def take_invocation_metadata(self):
with self._condition:
if self._invocation_metadata is None:
raise ValueError("Expected invocation metadata!")
else:
invocation_metadata = self._invocation_metadata
self._invocation_metadata = None
return invocation_metadata
def take_invocation_metadata_and_request(self):
with self._condition:
if self._invocation_metadata is None:
raise ValueError("Expected invocation metadata!")
elif not self._requests:
raise ValueError("Expected at least one request!")
else:
invocation_metadata = self._invocation_metadata
self._invocation_metadata = None
return invocation_metadata, self._requests.pop(0)
def send_initial_metadata(self, initial_metadata):
with self._condition:
self._initial_metadata = _common.fuss_with_metadata(
initial_metadata
)
self._condition.notify_all()
def take_request(self):
with self._condition:
while True:
if self._requests:
return self._requests.pop(0)
else:
self._condition.wait()
def requests_closed(self):
with self._condition:
while True:
if self._requests_closed:
return
else:
self._condition.wait()
def send_response(self, response):
with self._condition:
if self._code is None:
self._responses.append(response)
self._condition.notify_all()
def terminate_with_response(
self, response, trailing_metadata, code, details
):
with self._condition:
if self._initial_metadata is None:
self._initial_metadata = _common.FUSSED_EMPTY_METADATA
self._responses.append(response)
self._trailing_metadata = _common.fuss_with_metadata(
trailing_metadata
)
self._code = code
self._details = details
self._condition.notify_all()
def terminate(self, trailing_metadata, code, details):
with self._condition:
if self._initial_metadata is None:
self._initial_metadata = _common.FUSSED_EMPTY_METADATA
self._trailing_metadata = _common.fuss_with_metadata(
trailing_metadata
)
self._code = code
self._details = details
self._condition.notify_all()
def cancelled(self):
with self._condition:
while True:
if self._code is grpc.StatusCode.CANCELLED:
return
elif self._code is None:
self._condition.wait()
else:
raise ValueError(
"Status code unexpectedly {}!".format(self._code)
)
def is_active(self):
raise NotImplementedError()
def time_remaining(self):
raise NotImplementedError()
def add_callback(self, callback):
raise NotImplementedError()
| 7,246
| 34.699507
| 80
|
py
|
grpc
|
grpc-master/src/python/grpcio_testing/grpc_testing/_channel/_invocation.py
|
# Copyright 2017 gRPC authors.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import logging
import threading
import grpc
_NOT_YET_OBSERVED = object()
logging.basicConfig()
_LOGGER = logging.getLogger(__name__)
def _cancel(handler):
return handler.cancel(grpc.StatusCode.CANCELLED, "Locally cancelled!")
def _is_active(handler):
return handler.is_active()
def _time_remaining(unused_handler):
raise NotImplementedError()
def _add_callback(handler, callback):
return handler.add_callback(callback)
def _initial_metadata(handler):
return handler.initial_metadata()
def _trailing_metadata(handler):
trailing_metadata, unused_code, unused_details = handler.termination()
return trailing_metadata
def _code(handler):
unused_trailing_metadata, code, unused_details = handler.termination()
return code
def _details(handler):
unused_trailing_metadata, unused_code, details = handler.termination()
return details
class _Call(grpc.Call):
def __init__(self, handler):
self._handler = handler
def cancel(self):
_cancel(self._handler)
def is_active(self):
return _is_active(self._handler)
def time_remaining(self):
return _time_remaining(self._handler)
def add_callback(self, callback):
return _add_callback(self._handler, callback)
def initial_metadata(self):
return _initial_metadata(self._handler)
def trailing_metadata(self):
return _trailing_metadata(self._handler)
def code(self):
return _code(self._handler)
def details(self):
return _details(self._handler)
class _RpcErrorCall(grpc.RpcError, grpc.Call):
def __init__(self, handler):
self._handler = handler
def cancel(self):
_cancel(self._handler)
def is_active(self):
return _is_active(self._handler)
def time_remaining(self):
return _time_remaining(self._handler)
def add_callback(self, callback):
return _add_callback(self._handler, callback)
def initial_metadata(self):
return _initial_metadata(self._handler)
def trailing_metadata(self):
return _trailing_metadata(self._handler)
def code(self):
return _code(self._handler)
def details(self):
return _details(self._handler)
def _next(handler):
read = handler.take_response()
if read.code is None:
return read.response
elif read.code is grpc.StatusCode.OK:
raise StopIteration()
else:
raise _RpcErrorCall(handler)
class _HandlerExtras(object):
def __init__(self):
self.condition = threading.Condition()
self.unary_response = _NOT_YET_OBSERVED
self.cancelled = False
def _with_extras_cancel(handler, extras):
with extras.condition:
if handler.cancel(grpc.StatusCode.CANCELLED, "Locally cancelled!"):
extras.cancelled = True
return True
else:
return False
def _extras_without_cancelled(extras):
with extras.condition:
return extras.cancelled
def _running(handler):
return handler.is_active()
def _done(handler):
return not handler.is_active()
def _with_extras_unary_response(handler, extras):
with extras.condition:
if extras.unary_response is _NOT_YET_OBSERVED:
read = handler.take_response()
if read.code is None:
extras.unary_response = read.response
return read.response
else:
raise _RpcErrorCall(handler)
else:
return extras.unary_response
def _exception(unused_handler):
raise NotImplementedError("TODO!")
def _traceback(unused_handler):
raise NotImplementedError("TODO!")
def _add_done_callback(handler, callback, future):
adapted_callback = lambda: callback(future)
if not handler.add_callback(adapted_callback):
callback(future)
class _FutureCall(grpc.Future, grpc.Call):
def __init__(self, handler, extras):
self._handler = handler
self._extras = extras
def cancel(self):
return _with_extras_cancel(self._handler, self._extras)
def cancelled(self):
return _extras_without_cancelled(self._extras)
def running(self):
return _running(self._handler)
def done(self):
return _done(self._handler)
def result(self):
return _with_extras_unary_response(self._handler, self._extras)
def exception(self):
return _exception(self._handler)
def traceback(self):
return _traceback(self._handler)
def add_done_callback(self, fn):
_add_done_callback(self._handler, fn, self)
def is_active(self):
return _is_active(self._handler)
def time_remaining(self):
return _time_remaining(self._handler)
def add_callback(self, callback):
return _add_callback(self._handler, callback)
def initial_metadata(self):
return _initial_metadata(self._handler)
def trailing_metadata(self):
return _trailing_metadata(self._handler)
def code(self):
return _code(self._handler)
def details(self):
return _details(self._handler)
def consume_requests(request_iterator, handler):
def _consume():
while True:
try:
request = next(request_iterator)
added = handler.add_request(request)
if not added:
break
except StopIteration:
handler.close_requests()
break
except Exception: # pylint: disable=broad-except
details = "Exception iterating requests!"
_LOGGER.exception(details)
handler.cancel(grpc.StatusCode.UNKNOWN, details)
consumption = threading.Thread(target=_consume)
consumption.start()
def blocking_unary_response(handler):
read = handler.take_response()
if read.code is None:
unused_trailing_metadata, code, unused_details = handler.termination()
if code is grpc.StatusCode.OK:
return read.response
else:
raise _RpcErrorCall(handler)
else:
raise _RpcErrorCall(handler)
def blocking_unary_response_with_call(handler):
read = handler.take_response()
if read.code is None:
unused_trailing_metadata, code, unused_details = handler.termination()
if code is grpc.StatusCode.OK:
return read.response, _Call(handler)
else:
raise _RpcErrorCall(handler)
else:
raise _RpcErrorCall(handler)
def future_call(handler):
return _FutureCall(handler, _HandlerExtras())
class ResponseIteratorCall(grpc.Call):
def __init__(self, handler):
self._handler = handler
def __iter__(self):
return self
def __next__(self):
return _next(self._handler)
def next(self):
return _next(self._handler)
def cancel(self):
_cancel(self._handler)
def is_active(self):
return _is_active(self._handler)
def time_remaining(self):
return _time_remaining(self._handler)
def add_callback(self, callback):
return _add_callback(self._handler, callback)
def initial_metadata(self):
return _initial_metadata(self._handler)
def trailing_metadata(self):
return _trailing_metadata(self._handler)
def code(self):
return _code(self._handler)
def details(self):
return _details(self._handler)
| 8,036
| 24.194357
| 78
|
py
|
grpc
|
grpc-master/src/python/grpcio_testing/grpc_testing/_channel/_channel.py
|
# Copyright 2017 gRPC authors.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import grpc_testing
from grpc_testing._channel import _channel_rpc
from grpc_testing._channel import _multi_callable
# All serializer and deserializer parameters are not (yet) used by this
# test infrastructure.
# pylint: disable=unused-argument
class TestingChannel(grpc_testing.Channel):
def __init__(self, time, state):
self._time = time
self._state = state
def subscribe(self, callback, try_to_connect=False):
raise NotImplementedError()
def unsubscribe(self, callback):
raise NotImplementedError()
def unary_unary(
self, method, request_serializer=None, response_deserializer=None
):
return _multi_callable.UnaryUnary(method, self._state)
def unary_stream(
self, method, request_serializer=None, response_deserializer=None
):
return _multi_callable.UnaryStream(method, self._state)
def stream_unary(
self, method, request_serializer=None, response_deserializer=None
):
return _multi_callable.StreamUnary(method, self._state)
def stream_stream(
self, method, request_serializer=None, response_deserializer=None
):
return _multi_callable.StreamStream(method, self._state)
def _close(self):
# TODO(https://github.com/grpc/grpc/issues/12531): Decide what
# action to take here, if any?
pass
def __enter__(self):
return self
def __exit__(self, exc_type, exc_val, exc_tb):
self._close()
return False
def close(self):
self._close()
def take_unary_unary(self, method_descriptor):
return _channel_rpc.unary_unary(self._state, method_descriptor)
def take_unary_stream(self, method_descriptor):
return _channel_rpc.unary_stream(self._state, method_descriptor)
def take_stream_unary(self, method_descriptor):
return _channel_rpc.stream_unary(self._state, method_descriptor)
def take_stream_stream(self, method_descriptor):
return _channel_rpc.stream_stream(self._state, method_descriptor)
# pylint: enable=unused-argument
| 2,671
| 31.192771
| 74
|
py
|
grpc
|
grpc-master/src/python/grpcio_testing/grpc_testing/_channel/__init__.py
|
# Copyright 2017 gRPC authors.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
from grpc_testing._channel import _channel
from grpc_testing._channel import _channel_state
# descriptors is reserved for later use.
# pylint: disable=unused-argument
def testing_channel(descriptors, time):
return _channel.TestingChannel(time, _channel_state.State())
# pylint: enable=unused-argument
| 887
| 33.153846
| 74
|
py
|
grpc
|
grpc-master/src/python/grpcio_testing/grpc_testing/_channel/_multi_callable.py
|
# Copyright 2017 gRPC authors.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import grpc
from grpc_testing import _common
from grpc_testing._channel import _invocation
# All per-call credentials parameters are unused by this test infrastructure.
# pylint: disable=unused-argument
class UnaryUnary(grpc.UnaryUnaryMultiCallable):
def __init__(self, method_full_rpc_name, channel_handler):
self._method_full_rpc_name = method_full_rpc_name
self._channel_handler = channel_handler
def __call__(self, request, timeout=None, metadata=None, credentials=None):
rpc_handler = self._channel_handler.invoke_rpc(
self._method_full_rpc_name,
_common.fuss_with_metadata(metadata),
[request],
True,
timeout,
)
return _invocation.blocking_unary_response(rpc_handler)
def with_call(self, request, timeout=None, metadata=None, credentials=None):
rpc_handler = self._channel_handler.invoke_rpc(
self._method_full_rpc_name,
_common.fuss_with_metadata(metadata),
[request],
True,
timeout,
)
return _invocation.blocking_unary_response_with_call(rpc_handler)
def future(self, request, timeout=None, metadata=None, credentials=None):
rpc_handler = self._channel_handler.invoke_rpc(
self._method_full_rpc_name,
_common.fuss_with_metadata(metadata),
[request],
True,
timeout,
)
return _invocation.future_call(rpc_handler)
class UnaryStream(grpc.StreamStreamMultiCallable):
def __init__(self, method_full_rpc_name, channel_handler):
self._method_full_rpc_name = method_full_rpc_name
self._channel_handler = channel_handler
def __call__(self, request, timeout=None, metadata=None, credentials=None):
rpc_handler = self._channel_handler.invoke_rpc(
self._method_full_rpc_name,
_common.fuss_with_metadata(metadata),
[request],
True,
timeout,
)
return _invocation.ResponseIteratorCall(rpc_handler)
class StreamUnary(grpc.StreamUnaryMultiCallable):
def __init__(self, method_full_rpc_name, channel_handler):
self._method_full_rpc_name = method_full_rpc_name
self._channel_handler = channel_handler
def __call__(
self, request_iterator, timeout=None, metadata=None, credentials=None
):
rpc_handler = self._channel_handler.invoke_rpc(
self._method_full_rpc_name,
_common.fuss_with_metadata(metadata),
[],
False,
timeout,
)
_invocation.consume_requests(request_iterator, rpc_handler)
return _invocation.blocking_unary_response(rpc_handler)
def with_call(
self, request_iterator, timeout=None, metadata=None, credentials=None
):
rpc_handler = self._channel_handler.invoke_rpc(
self._method_full_rpc_name,
_common.fuss_with_metadata(metadata),
[],
False,
timeout,
)
_invocation.consume_requests(request_iterator, rpc_handler)
return _invocation.blocking_unary_response_with_call(rpc_handler)
def future(
self, request_iterator, timeout=None, metadata=None, credentials=None
):
rpc_handler = self._channel_handler.invoke_rpc(
self._method_full_rpc_name,
_common.fuss_with_metadata(metadata),
[],
False,
timeout,
)
_invocation.consume_requests(request_iterator, rpc_handler)
return _invocation.future_call(rpc_handler)
class StreamStream(grpc.StreamStreamMultiCallable):
def __init__(self, method_full_rpc_name, channel_handler):
self._method_full_rpc_name = method_full_rpc_name
self._channel_handler = channel_handler
def __call__(
self, request_iterator, timeout=None, metadata=None, credentials=None
):
rpc_handler = self._channel_handler.invoke_rpc(
self._method_full_rpc_name,
_common.fuss_with_metadata(metadata),
[],
False,
timeout,
)
_invocation.consume_requests(request_iterator, rpc_handler)
return _invocation.ResponseIteratorCall(rpc_handler)
# pylint: enable=unused-argument
| 4,933
| 34.496403
| 80
|
py
|
grpc
|
grpc-master/src/python/grpcio_testing/grpc_testing/_server/_handler.py
|
# Copyright 2017 gRPC authors.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import abc
import threading
import grpc
from grpc_testing import _common
_CLIENT_INACTIVE = object()
class Handler(_common.ServerRpcHandler):
@abc.abstractmethod
def initial_metadata(self):
raise NotImplementedError()
@abc.abstractmethod
def add_request(self, request):
raise NotImplementedError()
@abc.abstractmethod
def take_response(self):
raise NotImplementedError()
@abc.abstractmethod
def requests_closed(self):
raise NotImplementedError()
@abc.abstractmethod
def cancel(self):
raise NotImplementedError()
@abc.abstractmethod
def unary_response_termination(self):
raise NotImplementedError()
@abc.abstractmethod
def stream_response_termination(self):
raise NotImplementedError()
class _Handler(Handler):
def __init__(self, requests_closed):
self._condition = threading.Condition()
self._requests = []
self._requests_closed = requests_closed
self._initial_metadata = None
self._responses = []
self._trailing_metadata = None
self._code = None
self._details = None
self._unary_response = None
self._expiration_future = None
self._termination_callbacks = []
def send_initial_metadata(self, initial_metadata):
with self._condition:
self._initial_metadata = initial_metadata
self._condition.notify_all()
def take_request(self):
with self._condition:
while True:
if self._code is None:
if self._requests:
request = self._requests.pop(0)
self._condition.notify_all()
return _common.ServerRpcRead(request, False, False)
elif self._requests_closed:
return _common.REQUESTS_CLOSED
else:
self._condition.wait()
else:
return _common.TERMINATED
def is_active(self):
with self._condition:
return self._code is None
def add_response(self, response):
with self._condition:
self._responses.append(response)
self._condition.notify_all()
def send_termination(self, trailing_metadata, code, details):
with self._condition:
self._trailing_metadata = trailing_metadata
self._code = code
self._details = details
if self._expiration_future is not None:
self._expiration_future.cancel()
self._condition.notify_all()
def add_termination_callback(self, callback):
with self._condition:
if self._code is None:
self._termination_callbacks.append(callback)
return True
else:
return False
def initial_metadata(self):
with self._condition:
while True:
if self._initial_metadata is None:
if self._code is None:
self._condition.wait()
else:
raise ValueError(
"No initial metadata despite status code!"
)
else:
return self._initial_metadata
def add_request(self, request):
with self._condition:
self._requests.append(request)
self._condition.notify_all()
def take_response(self):
with self._condition:
while True:
if self._responses:
response = self._responses.pop(0)
self._condition.notify_all()
return response
elif self._code is None:
self._condition.wait()
else:
raise ValueError("No more responses!")
def requests_closed(self):
with self._condition:
self._requests_closed = True
self._condition.notify_all()
def cancel(self):
with self._condition:
if self._code is None:
self._code = _CLIENT_INACTIVE
termination_callbacks = self._termination_callbacks
self._termination_callbacks = None
if self._expiration_future is not None:
self._expiration_future.cancel()
self._condition.notify_all()
for termination_callback in termination_callbacks:
termination_callback()
def unary_response_termination(self):
with self._condition:
while True:
if self._code is _CLIENT_INACTIVE:
raise ValueError("Huh? Cancelled but wanting status?")
elif self._code is None:
self._condition.wait()
else:
if self._unary_response is None:
if self._responses:
self._unary_response = self._responses.pop(0)
return (
self._unary_response,
self._trailing_metadata,
self._code,
self._details,
)
def stream_response_termination(self):
with self._condition:
while True:
if self._code is _CLIENT_INACTIVE:
raise ValueError("Huh? Cancelled but wanting status?")
elif self._code is None:
self._condition.wait()
else:
return self._trailing_metadata, self._code, self._details
def expire(self):
with self._condition:
if self._code is None:
if self._initial_metadata is None:
self._initial_metadata = _common.FUSSED_EMPTY_METADATA
self._trailing_metadata = _common.FUSSED_EMPTY_METADATA
self._code = grpc.StatusCode.DEADLINE_EXCEEDED
self._details = "Took too much time!"
termination_callbacks = self._termination_callbacks
self._termination_callbacks = None
self._condition.notify_all()
for termination_callback in termination_callbacks:
termination_callback()
def set_expiration_future(self, expiration_future):
with self._condition:
self._expiration_future = expiration_future
def handler_without_deadline(requests_closed):
return _Handler(requests_closed)
def handler_with_deadline(requests_closed, time, deadline):
handler = _Handler(requests_closed)
expiration_future = time.call_at(handler.expire, deadline)
handler.set_expiration_future(expiration_future)
return handler
| 7,428
| 33.235023
| 77
|
py
|
grpc
|
grpc-master/src/python/grpcio_testing/grpc_testing/_server/_rpc.py
|
# Copyright 2017 gRPC authors.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import logging
import threading
import grpc
from grpc_testing import _common
logging.basicConfig()
_LOGGER = logging.getLogger(__name__)
class Rpc(object):
def __init__(self, handler, invocation_metadata):
self._condition = threading.Condition()
self._handler = handler
self._invocation_metadata = invocation_metadata
self._initial_metadata_sent = False
self._pending_trailing_metadata = None
self._pending_code = None
self._pending_details = None
self._callbacks = []
self._active = True
self._rpc_errors = []
def _ensure_initial_metadata_sent(self):
if not self._initial_metadata_sent:
self._handler.send_initial_metadata(_common.FUSSED_EMPTY_METADATA)
self._initial_metadata_sent = True
def _call_back(self):
callbacks = tuple(self._callbacks)
self._callbacks = None
def call_back():
for callback in callbacks:
try:
callback()
except Exception: # pylint: disable=broad-except
_LOGGER.exception("Exception calling server-side callback!")
callback_calling_thread = threading.Thread(target=call_back)
callback_calling_thread.start()
def _terminate(self, trailing_metadata, code, details):
if self._active:
self._active = False
self._handler.send_termination(trailing_metadata, code, details)
self._call_back()
self._condition.notify_all()
def _complete(self):
if self._pending_trailing_metadata is None:
trailing_metadata = _common.FUSSED_EMPTY_METADATA
else:
trailing_metadata = self._pending_trailing_metadata
if self._pending_code is None:
code = grpc.StatusCode.OK
else:
code = self._pending_code
details = "" if self._pending_details is None else self._pending_details
self._terminate(trailing_metadata, code, details)
def _abort(self, code, details):
self._terminate(_common.FUSSED_EMPTY_METADATA, code, details)
def add_rpc_error(self, rpc_error):
with self._condition:
self._rpc_errors.append(rpc_error)
def application_cancel(self):
with self._condition:
self._abort(
grpc.StatusCode.CANCELLED,
"Cancelled by server-side application!",
)
def application_exception_abort(self, exception):
with self._condition:
if exception not in self._rpc_errors:
_LOGGER.exception("Exception calling application!")
self._abort(
grpc.StatusCode.UNKNOWN,
"Exception calling application: {}".format(exception),
)
def extrinsic_abort(self):
with self._condition:
if self._active:
self._active = False
self._call_back()
self._condition.notify_all()
def unary_response_complete(self, response):
with self._condition:
self._ensure_initial_metadata_sent()
self._handler.add_response(response)
self._complete()
def stream_response(self, response):
with self._condition:
self._ensure_initial_metadata_sent()
self._handler.add_response(response)
def stream_response_complete(self):
with self._condition:
self._ensure_initial_metadata_sent()
self._complete()
def send_initial_metadata(self, initial_metadata):
with self._condition:
if self._initial_metadata_sent:
return False
else:
self._handler.send_initial_metadata(initial_metadata)
self._initial_metadata_sent = True
return True
def is_active(self):
with self._condition:
return self._active
def add_callback(self, callback):
with self._condition:
if self._callbacks is None:
return False
else:
self._callbacks.append(callback)
return True
def invocation_metadata(self):
with self._condition:
return self._invocation_metadata
def set_trailing_metadata(self, trailing_metadata):
with self._condition:
self._pending_trailing_metadata = trailing_metadata
def set_code(self, code):
with self._condition:
self._pending_code = code
def set_details(self, details):
with self._condition:
self._pending_details = details
| 5,273
| 32.379747
| 80
|
py
|
grpc
|
grpc-master/src/python/grpcio_testing/grpc_testing/_server/_server_rpc.py
|
# Copyright 2017 gRPC authors.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import grpc_testing
class UnaryUnaryServerRpc(grpc_testing.UnaryUnaryServerRpc):
def __init__(self, handler):
self._handler = handler
def initial_metadata(self):
return self._handler.initial_metadata()
def cancel(self):
self._handler.cancel()
def termination(self):
return self._handler.unary_response_termination()
class UnaryStreamServerRpc(grpc_testing.UnaryStreamServerRpc):
def __init__(self, handler):
self._handler = handler
def initial_metadata(self):
return self._handler.initial_metadata()
def take_response(self):
return self._handler.take_response()
def cancel(self):
self._handler.cancel()
def termination(self):
return self._handler.stream_response_termination()
class StreamUnaryServerRpc(grpc_testing.StreamUnaryServerRpc):
def __init__(self, handler):
self._handler = handler
def initial_metadata(self):
return self._handler.initial_metadata()
def send_request(self, request):
self._handler.add_request(request)
def requests_closed(self):
self._handler.requests_closed()
def cancel(self):
self._handler.cancel()
def termination(self):
return self._handler.unary_response_termination()
class StreamStreamServerRpc(grpc_testing.StreamStreamServerRpc):
def __init__(self, handler):
self._handler = handler
def initial_metadata(self):
return self._handler.initial_metadata()
def send_request(self, request):
self._handler.add_request(request)
def requests_closed(self):
self._handler.requests_closed()
def take_response(self):
return self._handler.take_response()
def cancel(self):
self._handler.cancel()
def termination(self):
return self._handler.stream_response_termination()
| 2,460
| 26.344444
| 74
|
py
|
grpc
|
grpc-master/src/python/grpcio_testing/grpc_testing/_server/__init__.py
|
# Copyright 2017 gRPC authors.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
from grpc_testing._server import _server
def server_from_dictionary(descriptors_to_servicers, time):
return _server.server_from_descriptor_to_servicers(
descriptors_to_servicers, time
)
| 782
| 34.590909
| 74
|
py
|
grpc
|
grpc-master/src/python/grpcio_testing/grpc_testing/_server/_service.py
|
# Copyright 2017 gRPC authors.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import copy
import grpc
class _RequestIterator(object):
def __init__(self, rpc, handler):
self._rpc = rpc
self._handler = handler
def _next(self):
read = self._handler.take_request()
if read.requests_closed:
raise StopIteration()
elif read.terminated:
rpc_error = grpc.RpcError()
self._rpc.add_rpc_error(rpc_error)
raise rpc_error
else:
return read.request
def __iter__(self):
return self
def __next__(self):
return self._next()
def next(self):
return self._next()
def _unary_response(argument, implementation, rpc, servicer_context):
try:
response = implementation(argument, servicer_context)
except Exception as exception: # pylint: disable=broad-except
rpc.application_exception_abort(exception)
else:
rpc.unary_response_complete(response)
def _stream_response(argument, implementation, rpc, servicer_context):
try:
response_iterator = implementation(argument, servicer_context)
except Exception as exception: # pylint: disable=broad-except
rpc.application_exception_abort(exception)
else:
while True:
try:
response = copy.deepcopy(next(response_iterator))
except StopIteration:
rpc.stream_response_complete()
break
except Exception as exception: # pylint: disable=broad-except
rpc.application_exception_abort(exception)
break
else:
rpc.stream_response(response)
def unary_unary(implementation, rpc, request, servicer_context):
_unary_response(request, implementation, rpc, servicer_context)
def unary_stream(implementation, rpc, request, servicer_context):
_stream_response(request, implementation, rpc, servicer_context)
def stream_unary(implementation, rpc, handler, servicer_context):
_unary_response(
_RequestIterator(rpc, handler), implementation, rpc, servicer_context
)
def stream_stream(implementation, rpc, handler, servicer_context):
_stream_response(
_RequestIterator(rpc, handler), implementation, rpc, servicer_context
)
| 2,846
| 29.945652
| 77
|
py
|
grpc
|
grpc-master/src/python/grpcio_testing/grpc_testing/_server/_server.py
|
# Copyright 2017 gRPC authors.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import threading
import grpc_testing
from grpc_testing import _common
from grpc_testing._server import _handler
from grpc_testing._server import _rpc
from grpc_testing._server import _server_rpc
from grpc_testing._server import _service
from grpc_testing._server import _servicer_context
def _implementation(descriptors_to_servicers, method_descriptor):
servicer = descriptors_to_servicers[method_descriptor.containing_service]
return getattr(servicer, method_descriptor.name)
def _unary_unary_service(request):
def service(implementation, rpc, servicer_context):
_service.unary_unary(implementation, rpc, request, servicer_context)
return service
def _unary_stream_service(request):
def service(implementation, rpc, servicer_context):
_service.unary_stream(implementation, rpc, request, servicer_context)
return service
def _stream_unary_service(handler):
def service(implementation, rpc, servicer_context):
_service.stream_unary(implementation, rpc, handler, servicer_context)
return service
def _stream_stream_service(handler):
def service(implementation, rpc, servicer_context):
_service.stream_stream(implementation, rpc, handler, servicer_context)
return service
class _Serverish(_common.Serverish):
def __init__(self, descriptors_to_servicers, time):
self._descriptors_to_servicers = descriptors_to_servicers
self._time = time
def _invoke(
self,
service_behavior,
method_descriptor,
handler,
invocation_metadata,
deadline,
):
implementation = _implementation(
self._descriptors_to_servicers, method_descriptor
)
rpc = _rpc.Rpc(handler, invocation_metadata)
if handler.add_termination_callback(rpc.extrinsic_abort):
servicer_context = _servicer_context.ServicerContext(
rpc, self._time, deadline
)
service_thread = threading.Thread(
target=service_behavior,
args=(
implementation,
rpc,
servicer_context,
),
)
service_thread.start()
def invoke_unary_unary(
self, method_descriptor, handler, invocation_metadata, request, deadline
):
self._invoke(
_unary_unary_service(request),
method_descriptor,
handler,
invocation_metadata,
deadline,
)
def invoke_unary_stream(
self, method_descriptor, handler, invocation_metadata, request, deadline
):
self._invoke(
_unary_stream_service(request),
method_descriptor,
handler,
invocation_metadata,
deadline,
)
def invoke_stream_unary(
self, method_descriptor, handler, invocation_metadata, deadline
):
self._invoke(
_stream_unary_service(handler),
method_descriptor,
handler,
invocation_metadata,
deadline,
)
def invoke_stream_stream(
self, method_descriptor, handler, invocation_metadata, deadline
):
self._invoke(
_stream_stream_service(handler),
method_descriptor,
handler,
invocation_metadata,
deadline,
)
def _deadline_and_handler(requests_closed, time, timeout):
if timeout is None:
return None, _handler.handler_without_deadline(requests_closed)
else:
deadline = time.time() + timeout
handler = _handler.handler_with_deadline(
requests_closed, time, deadline
)
return deadline, handler
class _Server(grpc_testing.Server):
def __init__(self, serverish, time):
self._serverish = serverish
self._time = time
def invoke_unary_unary(
self, method_descriptor, invocation_metadata, request, timeout
):
deadline, handler = _deadline_and_handler(True, self._time, timeout)
self._serverish.invoke_unary_unary(
method_descriptor, handler, invocation_metadata, request, deadline
)
return _server_rpc.UnaryUnaryServerRpc(handler)
def invoke_unary_stream(
self, method_descriptor, invocation_metadata, request, timeout
):
deadline, handler = _deadline_and_handler(True, self._time, timeout)
self._serverish.invoke_unary_stream(
method_descriptor, handler, invocation_metadata, request, deadline
)
return _server_rpc.UnaryStreamServerRpc(handler)
def invoke_stream_unary(
self, method_descriptor, invocation_metadata, timeout
):
deadline, handler = _deadline_and_handler(False, self._time, timeout)
self._serverish.invoke_stream_unary(
method_descriptor, handler, invocation_metadata, deadline
)
return _server_rpc.StreamUnaryServerRpc(handler)
def invoke_stream_stream(
self, method_descriptor, invocation_metadata, timeout
):
deadline, handler = _deadline_and_handler(False, self._time, timeout)
self._serverish.invoke_stream_stream(
method_descriptor, handler, invocation_metadata, deadline
)
return _server_rpc.StreamStreamServerRpc(handler)
def server_from_descriptor_to_servicers(descriptors_to_servicers, time):
return _Server(_Serverish(descriptors_to_servicers, time), time)
| 6,096
| 31.089474
| 80
|
py
|
grpc
|
grpc-master/src/python/grpcio_testing/grpc_testing/_server/_servicer_context.py
|
# Copyright 2017 gRPC authors.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import grpc
from grpc_testing import _common
class ServicerContext(grpc.ServicerContext):
def __init__(self, rpc, time, deadline):
self._rpc = rpc
self._time = time
self._deadline = deadline
def is_active(self):
return self._rpc.is_active()
def time_remaining(self):
if self._rpc.is_active():
if self._deadline is None:
return None
else:
return max(0.0, self._deadline - self._time.time())
else:
return 0.0
def cancel(self):
self._rpc.application_cancel()
def add_callback(self, callback):
return self._rpc.add_callback(callback)
def invocation_metadata(self):
return self._rpc.invocation_metadata()
def peer(self):
raise NotImplementedError()
def peer_identities(self):
raise NotImplementedError()
def peer_identity_key(self):
raise NotImplementedError()
def auth_context(self):
raise NotImplementedError()
def set_compression(self):
raise NotImplementedError()
def send_initial_metadata(self, initial_metadata):
initial_metadata_sent = self._rpc.send_initial_metadata(
_common.fuss_with_metadata(initial_metadata)
)
if not initial_metadata_sent:
raise ValueError(
"ServicerContext.send_initial_metadata called too late!"
)
def disable_next_message_compression(self):
raise NotImplementedError()
def set_trailing_metadata(self, trailing_metadata):
self._rpc.set_trailing_metadata(
_common.fuss_with_metadata(trailing_metadata)
)
def abort(self, code, details):
with self._rpc._condition:
self._rpc._abort(code, details)
raise Exception()
def abort_with_status(self, status):
raise NotImplementedError()
def set_code(self, code):
self._rpc.set_code(code)
def set_details(self, details):
self._rpc.set_details(details)
| 2,631
| 27.923077
| 74
|
py
|
grpc
|
grpc-master/src/python/grpcio/_spawn_patch.py
|
# Copyright 2016 gRPC authors.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Patches the spawn() command for windows compilers.
Windows has an 8191 character command line limit, but some compilers
support an @command_file directive where command_file is a file
containing the full command line.
"""
from distutils import ccompiler
import os
import os.path
import shutil
import sys
import tempfile
MAX_COMMAND_LENGTH = 8191
_classic_spawn = ccompiler.CCompiler.spawn
def _commandfile_spawn(self, command):
command_length = sum([len(arg) for arg in command])
if os.name == "nt" and command_length > MAX_COMMAND_LENGTH:
# Even if this command doesn't support the @command_file, it will
# fail as is so we try blindly
print("Command line length exceeded, using command file")
print(" ".join(command))
temporary_directory = tempfile.mkdtemp()
command_filename = os.path.abspath(
os.path.join(temporary_directory, "command")
)
with open(command_filename, "w") as command_file:
escaped_args = [
'"' + arg.replace("\\", "\\\\") + '"' for arg in command[1:]
]
# add each arg on a separate line to avoid hitting the
# "line in command file contains 131071 or more characters" error
# (can happen for extra long link commands)
command_file.write(" \n".join(escaped_args))
modified_command = command[:1] + ["@{}".format(command_filename)]
try:
_classic_spawn(self, modified_command)
finally:
shutil.rmtree(temporary_directory)
else:
_classic_spawn(self, command)
def monkeypatch_spawn():
"""Monkeypatching is dumb, but it's either that or we become maintainers of
something much, much bigger."""
ccompiler.CCompiler.spawn = _commandfile_spawn
| 2,386
| 35.723077
| 79
|
py
|
grpc
|
grpc-master/src/python/grpcio/grpc_version.py
|
# Copyright 2015 gRPC authors.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# AUTO-GENERATED FROM `$REPO_ROOT/templates/src/python/grpcio/grpc_version.py.template`!!!
VERSION = '1.57.0.dev0'
| 694
| 37.611111
| 90
|
py
|
grpc
|
grpc-master/src/python/grpcio/grpc_core_dependencies.py
|
# Copyright 2015 gRPC authors.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# AUTO-GENERATED FROM `$REPO_ROOT/templates/src/python/grpcio/grpc_core_dependencies.py.template`!!!
CORE_SOURCE_FILES = [
'src/core/ext/filters/backend_metrics/backend_metric_filter.cc',
'src/core/ext/filters/census/grpc_context.cc',
'src/core/ext/filters/channel_idle/channel_idle_filter.cc',
'src/core/ext/filters/channel_idle/idle_filter_state.cc',
'src/core/ext/filters/client_channel/backend_metric.cc',
'src/core/ext/filters/client_channel/backup_poller.cc',
'src/core/ext/filters/client_channel/channel_connectivity.cc',
'src/core/ext/filters/client_channel/client_channel.cc',
'src/core/ext/filters/client_channel/client_channel_channelz.cc',
'src/core/ext/filters/client_channel/client_channel_factory.cc',
'src/core/ext/filters/client_channel/client_channel_plugin.cc',
'src/core/ext/filters/client_channel/client_channel_service_config.cc',
'src/core/ext/filters/client_channel/config_selector.cc',
'src/core/ext/filters/client_channel/dynamic_filters.cc',
'src/core/ext/filters/client_channel/global_subchannel_pool.cc',
'src/core/ext/filters/client_channel/http_proxy.cc',
'src/core/ext/filters/client_channel/lb_policy/address_filtering.cc',
'src/core/ext/filters/client_channel/lb_policy/child_policy_handler.cc',
'src/core/ext/filters/client_channel/lb_policy/grpclb/client_load_reporting_filter.cc',
'src/core/ext/filters/client_channel/lb_policy/grpclb/grpclb.cc',
'src/core/ext/filters/client_channel/lb_policy/grpclb/grpclb_balancer_addresses.cc',
'src/core/ext/filters/client_channel/lb_policy/grpclb/grpclb_client_stats.cc',
'src/core/ext/filters/client_channel/lb_policy/grpclb/load_balancer_api.cc',
'src/core/ext/filters/client_channel/lb_policy/health_check_client.cc',
'src/core/ext/filters/client_channel/lb_policy/oob_backend_metric.cc',
'src/core/ext/filters/client_channel/lb_policy/outlier_detection/outlier_detection.cc',
'src/core/ext/filters/client_channel/lb_policy/pick_first/pick_first.cc',
'src/core/ext/filters/client_channel/lb_policy/priority/priority.cc',
'src/core/ext/filters/client_channel/lb_policy/ring_hash/ring_hash.cc',
'src/core/ext/filters/client_channel/lb_policy/rls/rls.cc',
'src/core/ext/filters/client_channel/lb_policy/round_robin/round_robin.cc',
'src/core/ext/filters/client_channel/lb_policy/weighted_round_robin/static_stride_scheduler.cc',
'src/core/ext/filters/client_channel/lb_policy/weighted_round_robin/weighted_round_robin.cc',
'src/core/ext/filters/client_channel/lb_policy/weighted_target/weighted_target.cc',
'src/core/ext/filters/client_channel/lb_policy/xds/cds.cc',
'src/core/ext/filters/client_channel/lb_policy/xds/xds_cluster_impl.cc',
'src/core/ext/filters/client_channel/lb_policy/xds/xds_cluster_manager.cc',
'src/core/ext/filters/client_channel/lb_policy/xds/xds_cluster_resolver.cc',
'src/core/ext/filters/client_channel/lb_policy/xds/xds_override_host.cc',
'src/core/ext/filters/client_channel/lb_policy/xds/xds_wrr_locality.cc',
'src/core/ext/filters/client_channel/local_subchannel_pool.cc',
'src/core/ext/filters/client_channel/resolver/binder/binder_resolver.cc',
'src/core/ext/filters/client_channel/resolver/dns/c_ares/dns_resolver_ares.cc',
'src/core/ext/filters/client_channel/resolver/dns/c_ares/grpc_ares_ev_driver_posix.cc',
'src/core/ext/filters/client_channel/resolver/dns/c_ares/grpc_ares_ev_driver_windows.cc',
'src/core/ext/filters/client_channel/resolver/dns/c_ares/grpc_ares_wrapper.cc',
'src/core/ext/filters/client_channel/resolver/dns/c_ares/grpc_ares_wrapper_posix.cc',
'src/core/ext/filters/client_channel/resolver/dns/c_ares/grpc_ares_wrapper_windows.cc',
'src/core/ext/filters/client_channel/resolver/dns/dns_resolver_plugin.cc',
'src/core/ext/filters/client_channel/resolver/dns/event_engine/event_engine_client_channel_resolver.cc',
'src/core/ext/filters/client_channel/resolver/dns/event_engine/service_config_helper.cc',
'src/core/ext/filters/client_channel/resolver/dns/native/dns_resolver.cc',
'src/core/ext/filters/client_channel/resolver/fake/fake_resolver.cc',
'src/core/ext/filters/client_channel/resolver/google_c2p/google_c2p_resolver.cc',
'src/core/ext/filters/client_channel/resolver/polling_resolver.cc',
'src/core/ext/filters/client_channel/resolver/sockaddr/sockaddr_resolver.cc',
'src/core/ext/filters/client_channel/resolver/xds/xds_resolver.cc',
'src/core/ext/filters/client_channel/retry_filter.cc',
'src/core/ext/filters/client_channel/retry_filter_legacy_call_data.cc',
'src/core/ext/filters/client_channel/retry_service_config.cc',
'src/core/ext/filters/client_channel/retry_throttle.cc',
'src/core/ext/filters/client_channel/service_config_channel_arg_filter.cc',
'src/core/ext/filters/client_channel/subchannel.cc',
'src/core/ext/filters/client_channel/subchannel_pool_interface.cc',
'src/core/ext/filters/client_channel/subchannel_stream_client.cc',
'src/core/ext/filters/deadline/deadline_filter.cc',
'src/core/ext/filters/fault_injection/fault_injection_filter.cc',
'src/core/ext/filters/fault_injection/fault_injection_service_config_parser.cc',
'src/core/ext/filters/http/client/http_client_filter.cc',
'src/core/ext/filters/http/client_authority_filter.cc',
'src/core/ext/filters/http/http_filters_plugin.cc',
'src/core/ext/filters/http/message_compress/compression_filter.cc',
'src/core/ext/filters/http/server/http_server_filter.cc',
'src/core/ext/filters/message_size/message_size_filter.cc',
'src/core/ext/filters/rbac/rbac_filter.cc',
'src/core/ext/filters/rbac/rbac_service_config_parser.cc',
'src/core/ext/filters/server_config_selector/server_config_selector_filter.cc',
'src/core/ext/filters/stateful_session/stateful_session_filter.cc',
'src/core/ext/filters/stateful_session/stateful_session_service_config_parser.cc',
'src/core/ext/gcp/metadata_query.cc',
'src/core/ext/transport/chttp2/alpn/alpn.cc',
'src/core/ext/transport/chttp2/client/chttp2_connector.cc',
'src/core/ext/transport/chttp2/server/chttp2_server.cc',
'src/core/ext/transport/chttp2/transport/bin_decoder.cc',
'src/core/ext/transport/chttp2/transport/bin_encoder.cc',
'src/core/ext/transport/chttp2/transport/chttp2_transport.cc',
'src/core/ext/transport/chttp2/transport/decode_huff.cc',
'src/core/ext/transport/chttp2/transport/flow_control.cc',
'src/core/ext/transport/chttp2/transport/frame_data.cc',
'src/core/ext/transport/chttp2/transport/frame_goaway.cc',
'src/core/ext/transport/chttp2/transport/frame_ping.cc',
'src/core/ext/transport/chttp2/transport/frame_rst_stream.cc',
'src/core/ext/transport/chttp2/transport/frame_settings.cc',
'src/core/ext/transport/chttp2/transport/frame_window_update.cc',
'src/core/ext/transport/chttp2/transport/hpack_encoder.cc',
'src/core/ext/transport/chttp2/transport/hpack_encoder_table.cc',
'src/core/ext/transport/chttp2/transport/hpack_parse_result.cc',
'src/core/ext/transport/chttp2/transport/hpack_parser.cc',
'src/core/ext/transport/chttp2/transport/hpack_parser_table.cc',
'src/core/ext/transport/chttp2/transport/http2_settings.cc',
'src/core/ext/transport/chttp2/transport/http_trace.cc',
'src/core/ext/transport/chttp2/transport/huffsyms.cc',
'src/core/ext/transport/chttp2/transport/parsing.cc',
'src/core/ext/transport/chttp2/transport/stream_lists.cc',
'src/core/ext/transport/chttp2/transport/varint.cc',
'src/core/ext/transport/chttp2/transport/writing.cc',
'src/core/ext/transport/inproc/inproc_plugin.cc',
'src/core/ext/transport/inproc/inproc_transport.cc',
'src/core/ext/upb-generated/envoy/admin/v3/certs.upb.c',
'src/core/ext/upb-generated/envoy/admin/v3/clusters.upb.c',
'src/core/ext/upb-generated/envoy/admin/v3/config_dump.upb.c',
'src/core/ext/upb-generated/envoy/admin/v3/config_dump_shared.upb.c',
'src/core/ext/upb-generated/envoy/admin/v3/init_dump.upb.c',
'src/core/ext/upb-generated/envoy/admin/v3/listeners.upb.c',
'src/core/ext/upb-generated/envoy/admin/v3/memory.upb.c',
'src/core/ext/upb-generated/envoy/admin/v3/metrics.upb.c',
'src/core/ext/upb-generated/envoy/admin/v3/mutex_stats.upb.c',
'src/core/ext/upb-generated/envoy/admin/v3/server_info.upb.c',
'src/core/ext/upb-generated/envoy/admin/v3/tap.upb.c',
'src/core/ext/upb-generated/envoy/annotations/deprecation.upb.c',
'src/core/ext/upb-generated/envoy/annotations/resource.upb.c',
'src/core/ext/upb-generated/envoy/config/accesslog/v3/accesslog.upb.c',
'src/core/ext/upb-generated/envoy/config/bootstrap/v3/bootstrap.upb.c',
'src/core/ext/upb-generated/envoy/config/cluster/v3/circuit_breaker.upb.c',
'src/core/ext/upb-generated/envoy/config/cluster/v3/cluster.upb.c',
'src/core/ext/upb-generated/envoy/config/cluster/v3/filter.upb.c',
'src/core/ext/upb-generated/envoy/config/cluster/v3/outlier_detection.upb.c',
'src/core/ext/upb-generated/envoy/config/common/matcher/v3/matcher.upb.c',
'src/core/ext/upb-generated/envoy/config/core/v3/address.upb.c',
'src/core/ext/upb-generated/envoy/config/core/v3/backoff.upb.c',
'src/core/ext/upb-generated/envoy/config/core/v3/base.upb.c',
'src/core/ext/upb-generated/envoy/config/core/v3/config_source.upb.c',
'src/core/ext/upb-generated/envoy/config/core/v3/event_service_config.upb.c',
'src/core/ext/upb-generated/envoy/config/core/v3/extension.upb.c',
'src/core/ext/upb-generated/envoy/config/core/v3/grpc_method_list.upb.c',
'src/core/ext/upb-generated/envoy/config/core/v3/grpc_service.upb.c',
'src/core/ext/upb-generated/envoy/config/core/v3/health_check.upb.c',
'src/core/ext/upb-generated/envoy/config/core/v3/http_uri.upb.c',
'src/core/ext/upb-generated/envoy/config/core/v3/protocol.upb.c',
'src/core/ext/upb-generated/envoy/config/core/v3/proxy_protocol.upb.c',
'src/core/ext/upb-generated/envoy/config/core/v3/resolver.upb.c',
'src/core/ext/upb-generated/envoy/config/core/v3/socket_option.upb.c',
'src/core/ext/upb-generated/envoy/config/core/v3/substitution_format_string.upb.c',
'src/core/ext/upb-generated/envoy/config/core/v3/udp_socket_config.upb.c',
'src/core/ext/upb-generated/envoy/config/endpoint/v3/endpoint.upb.c',
'src/core/ext/upb-generated/envoy/config/endpoint/v3/endpoint_components.upb.c',
'src/core/ext/upb-generated/envoy/config/endpoint/v3/load_report.upb.c',
'src/core/ext/upb-generated/envoy/config/listener/v3/api_listener.upb.c',
'src/core/ext/upb-generated/envoy/config/listener/v3/listener.upb.c',
'src/core/ext/upb-generated/envoy/config/listener/v3/listener_components.upb.c',
'src/core/ext/upb-generated/envoy/config/listener/v3/quic_config.upb.c',
'src/core/ext/upb-generated/envoy/config/listener/v3/udp_listener_config.upb.c',
'src/core/ext/upb-generated/envoy/config/metrics/v3/metrics_service.upb.c',
'src/core/ext/upb-generated/envoy/config/metrics/v3/stats.upb.c',
'src/core/ext/upb-generated/envoy/config/overload/v3/overload.upb.c',
'src/core/ext/upb-generated/envoy/config/rbac/v3/rbac.upb.c',
'src/core/ext/upb-generated/envoy/config/route/v3/route.upb.c',
'src/core/ext/upb-generated/envoy/config/route/v3/route_components.upb.c',
'src/core/ext/upb-generated/envoy/config/route/v3/scoped_route.upb.c',
'src/core/ext/upb-generated/envoy/config/tap/v3/common.upb.c',
'src/core/ext/upb-generated/envoy/config/trace/v3/datadog.upb.c',
'src/core/ext/upb-generated/envoy/config/trace/v3/dynamic_ot.upb.c',
'src/core/ext/upb-generated/envoy/config/trace/v3/http_tracer.upb.c',
'src/core/ext/upb-generated/envoy/config/trace/v3/lightstep.upb.c',
'src/core/ext/upb-generated/envoy/config/trace/v3/opencensus.upb.c',
'src/core/ext/upb-generated/envoy/config/trace/v3/opentelemetry.upb.c',
'src/core/ext/upb-generated/envoy/config/trace/v3/service.upb.c',
'src/core/ext/upb-generated/envoy/config/trace/v3/skywalking.upb.c',
'src/core/ext/upb-generated/envoy/config/trace/v3/trace.upb.c',
'src/core/ext/upb-generated/envoy/config/trace/v3/xray.upb.c',
'src/core/ext/upb-generated/envoy/config/trace/v3/zipkin.upb.c',
'src/core/ext/upb-generated/envoy/data/accesslog/v3/accesslog.upb.c',
'src/core/ext/upb-generated/envoy/extensions/clusters/aggregate/v3/cluster.upb.c',
'src/core/ext/upb-generated/envoy/extensions/filters/common/fault/v3/fault.upb.c',
'src/core/ext/upb-generated/envoy/extensions/filters/http/fault/v3/fault.upb.c',
'src/core/ext/upb-generated/envoy/extensions/filters/http/rbac/v3/rbac.upb.c',
'src/core/ext/upb-generated/envoy/extensions/filters/http/router/v3/router.upb.c',
'src/core/ext/upb-generated/envoy/extensions/filters/http/stateful_session/v3/stateful_session.upb.c',
'src/core/ext/upb-generated/envoy/extensions/filters/network/http_connection_manager/v3/http_connection_manager.upb.c',
'src/core/ext/upb-generated/envoy/extensions/http/stateful_session/cookie/v3/cookie.upb.c',
'src/core/ext/upb-generated/envoy/extensions/load_balancing_policies/client_side_weighted_round_robin/v3/client_side_weighted_round_robin.upb.c',
'src/core/ext/upb-generated/envoy/extensions/load_balancing_policies/common/v3/common.upb.c',
'src/core/ext/upb-generated/envoy/extensions/load_balancing_policies/pick_first/v3/pick_first.upb.c',
'src/core/ext/upb-generated/envoy/extensions/load_balancing_policies/ring_hash/v3/ring_hash.upb.c',
'src/core/ext/upb-generated/envoy/extensions/load_balancing_policies/wrr_locality/v3/wrr_locality.upb.c',
'src/core/ext/upb-generated/envoy/extensions/transport_sockets/tls/v3/cert.upb.c',
'src/core/ext/upb-generated/envoy/extensions/transport_sockets/tls/v3/common.upb.c',
'src/core/ext/upb-generated/envoy/extensions/transport_sockets/tls/v3/secret.upb.c',
'src/core/ext/upb-generated/envoy/extensions/transport_sockets/tls/v3/tls.upb.c',
'src/core/ext/upb-generated/envoy/extensions/transport_sockets/tls/v3/tls_spiffe_validator_config.upb.c',
'src/core/ext/upb-generated/envoy/service/discovery/v3/ads.upb.c',
'src/core/ext/upb-generated/envoy/service/discovery/v3/discovery.upb.c',
'src/core/ext/upb-generated/envoy/service/load_stats/v3/lrs.upb.c',
'src/core/ext/upb-generated/envoy/service/status/v3/csds.upb.c',
'src/core/ext/upb-generated/envoy/type/http/v3/cookie.upb.c',
'src/core/ext/upb-generated/envoy/type/http/v3/path_transformation.upb.c',
'src/core/ext/upb-generated/envoy/type/matcher/v3/filter_state.upb.c',
'src/core/ext/upb-generated/envoy/type/matcher/v3/http_inputs.upb.c',
'src/core/ext/upb-generated/envoy/type/matcher/v3/metadata.upb.c',
'src/core/ext/upb-generated/envoy/type/matcher/v3/node.upb.c',
'src/core/ext/upb-generated/envoy/type/matcher/v3/number.upb.c',
'src/core/ext/upb-generated/envoy/type/matcher/v3/path.upb.c',
'src/core/ext/upb-generated/envoy/type/matcher/v3/regex.upb.c',
'src/core/ext/upb-generated/envoy/type/matcher/v3/status_code_input.upb.c',
'src/core/ext/upb-generated/envoy/type/matcher/v3/string.upb.c',
'src/core/ext/upb-generated/envoy/type/matcher/v3/struct.upb.c',
'src/core/ext/upb-generated/envoy/type/matcher/v3/value.upb.c',
'src/core/ext/upb-generated/envoy/type/metadata/v3/metadata.upb.c',
'src/core/ext/upb-generated/envoy/type/tracing/v3/custom_tag.upb.c',
'src/core/ext/upb-generated/envoy/type/v3/hash_policy.upb.c',
'src/core/ext/upb-generated/envoy/type/v3/http.upb.c',
'src/core/ext/upb-generated/envoy/type/v3/http_status.upb.c',
'src/core/ext/upb-generated/envoy/type/v3/percent.upb.c',
'src/core/ext/upb-generated/envoy/type/v3/range.upb.c',
'src/core/ext/upb-generated/envoy/type/v3/ratelimit_strategy.upb.c',
'src/core/ext/upb-generated/envoy/type/v3/ratelimit_unit.upb.c',
'src/core/ext/upb-generated/envoy/type/v3/semantic_version.upb.c',
'src/core/ext/upb-generated/envoy/type/v3/token_bucket.upb.c',
'src/core/ext/upb-generated/google/api/annotations.upb.c',
'src/core/ext/upb-generated/google/api/expr/v1alpha1/checked.upb.c',
'src/core/ext/upb-generated/google/api/expr/v1alpha1/syntax.upb.c',
'src/core/ext/upb-generated/google/api/http.upb.c',
'src/core/ext/upb-generated/google/api/httpbody.upb.c',
'src/core/ext/upb-generated/google/protobuf/any.upb.c',
'src/core/ext/upb-generated/google/protobuf/descriptor.upb.c',
'src/core/ext/upb-generated/google/protobuf/duration.upb.c',
'src/core/ext/upb-generated/google/protobuf/empty.upb.c',
'src/core/ext/upb-generated/google/protobuf/struct.upb.c',
'src/core/ext/upb-generated/google/protobuf/timestamp.upb.c',
'src/core/ext/upb-generated/google/protobuf/wrappers.upb.c',
'src/core/ext/upb-generated/google/rpc/status.upb.c',
'src/core/ext/upb-generated/opencensus/proto/trace/v1/trace_config.upb.c',
'src/core/ext/upb-generated/src/proto/grpc/gcp/altscontext.upb.c',
'src/core/ext/upb-generated/src/proto/grpc/gcp/handshaker.upb.c',
'src/core/ext/upb-generated/src/proto/grpc/gcp/transport_security_common.upb.c',
'src/core/ext/upb-generated/src/proto/grpc/health/v1/health.upb.c',
'src/core/ext/upb-generated/src/proto/grpc/lb/v1/load_balancer.upb.c',
'src/core/ext/upb-generated/src/proto/grpc/lookup/v1/rls.upb.c',
'src/core/ext/upb-generated/src/proto/grpc/lookup/v1/rls_config.upb.c',
'src/core/ext/upb-generated/udpa/annotations/migrate.upb.c',
'src/core/ext/upb-generated/udpa/annotations/security.upb.c',
'src/core/ext/upb-generated/udpa/annotations/sensitive.upb.c',
'src/core/ext/upb-generated/udpa/annotations/status.upb.c',
'src/core/ext/upb-generated/udpa/annotations/versioning.upb.c',
'src/core/ext/upb-generated/validate/validate.upb.c',
'src/core/ext/upb-generated/xds/annotations/v3/migrate.upb.c',
'src/core/ext/upb-generated/xds/annotations/v3/security.upb.c',
'src/core/ext/upb-generated/xds/annotations/v3/sensitive.upb.c',
'src/core/ext/upb-generated/xds/annotations/v3/status.upb.c',
'src/core/ext/upb-generated/xds/annotations/v3/versioning.upb.c',
'src/core/ext/upb-generated/xds/core/v3/authority.upb.c',
'src/core/ext/upb-generated/xds/core/v3/cidr.upb.c',
'src/core/ext/upb-generated/xds/core/v3/collection_entry.upb.c',
'src/core/ext/upb-generated/xds/core/v3/context_params.upb.c',
'src/core/ext/upb-generated/xds/core/v3/extension.upb.c',
'src/core/ext/upb-generated/xds/core/v3/resource.upb.c',
'src/core/ext/upb-generated/xds/core/v3/resource_locator.upb.c',
'src/core/ext/upb-generated/xds/core/v3/resource_name.upb.c',
'src/core/ext/upb-generated/xds/data/orca/v3/orca_load_report.upb.c',
'src/core/ext/upb-generated/xds/service/orca/v3/orca.upb.c',
'src/core/ext/upb-generated/xds/type/matcher/v3/cel.upb.c',
'src/core/ext/upb-generated/xds/type/matcher/v3/domain.upb.c',
'src/core/ext/upb-generated/xds/type/matcher/v3/http_inputs.upb.c',
'src/core/ext/upb-generated/xds/type/matcher/v3/ip.upb.c',
'src/core/ext/upb-generated/xds/type/matcher/v3/matcher.upb.c',
'src/core/ext/upb-generated/xds/type/matcher/v3/range.upb.c',
'src/core/ext/upb-generated/xds/type/matcher/v3/regex.upb.c',
'src/core/ext/upb-generated/xds/type/matcher/v3/string.upb.c',
'src/core/ext/upb-generated/xds/type/v3/cel.upb.c',
'src/core/ext/upb-generated/xds/type/v3/range.upb.c',
'src/core/ext/upb-generated/xds/type/v3/typed_struct.upb.c',
'src/core/ext/upbdefs-generated/envoy/admin/v3/certs.upbdefs.c',
'src/core/ext/upbdefs-generated/envoy/admin/v3/clusters.upbdefs.c',
'src/core/ext/upbdefs-generated/envoy/admin/v3/config_dump.upbdefs.c',
'src/core/ext/upbdefs-generated/envoy/admin/v3/config_dump_shared.upbdefs.c',
'src/core/ext/upbdefs-generated/envoy/admin/v3/init_dump.upbdefs.c',
'src/core/ext/upbdefs-generated/envoy/admin/v3/listeners.upbdefs.c',
'src/core/ext/upbdefs-generated/envoy/admin/v3/memory.upbdefs.c',
'src/core/ext/upbdefs-generated/envoy/admin/v3/metrics.upbdefs.c',
'src/core/ext/upbdefs-generated/envoy/admin/v3/mutex_stats.upbdefs.c',
'src/core/ext/upbdefs-generated/envoy/admin/v3/server_info.upbdefs.c',
'src/core/ext/upbdefs-generated/envoy/admin/v3/tap.upbdefs.c',
'src/core/ext/upbdefs-generated/envoy/annotations/deprecation.upbdefs.c',
'src/core/ext/upbdefs-generated/envoy/annotations/resource.upbdefs.c',
'src/core/ext/upbdefs-generated/envoy/config/accesslog/v3/accesslog.upbdefs.c',
'src/core/ext/upbdefs-generated/envoy/config/bootstrap/v3/bootstrap.upbdefs.c',
'src/core/ext/upbdefs-generated/envoy/config/cluster/v3/circuit_breaker.upbdefs.c',
'src/core/ext/upbdefs-generated/envoy/config/cluster/v3/cluster.upbdefs.c',
'src/core/ext/upbdefs-generated/envoy/config/cluster/v3/filter.upbdefs.c',
'src/core/ext/upbdefs-generated/envoy/config/cluster/v3/outlier_detection.upbdefs.c',
'src/core/ext/upbdefs-generated/envoy/config/common/matcher/v3/matcher.upbdefs.c',
'src/core/ext/upbdefs-generated/envoy/config/core/v3/address.upbdefs.c',
'src/core/ext/upbdefs-generated/envoy/config/core/v3/backoff.upbdefs.c',
'src/core/ext/upbdefs-generated/envoy/config/core/v3/base.upbdefs.c',
'src/core/ext/upbdefs-generated/envoy/config/core/v3/config_source.upbdefs.c',
'src/core/ext/upbdefs-generated/envoy/config/core/v3/event_service_config.upbdefs.c',
'src/core/ext/upbdefs-generated/envoy/config/core/v3/extension.upbdefs.c',
'src/core/ext/upbdefs-generated/envoy/config/core/v3/grpc_method_list.upbdefs.c',
'src/core/ext/upbdefs-generated/envoy/config/core/v3/grpc_service.upbdefs.c',
'src/core/ext/upbdefs-generated/envoy/config/core/v3/health_check.upbdefs.c',
'src/core/ext/upbdefs-generated/envoy/config/core/v3/http_uri.upbdefs.c',
'src/core/ext/upbdefs-generated/envoy/config/core/v3/protocol.upbdefs.c',
'src/core/ext/upbdefs-generated/envoy/config/core/v3/proxy_protocol.upbdefs.c',
'src/core/ext/upbdefs-generated/envoy/config/core/v3/resolver.upbdefs.c',
'src/core/ext/upbdefs-generated/envoy/config/core/v3/socket_option.upbdefs.c',
'src/core/ext/upbdefs-generated/envoy/config/core/v3/substitution_format_string.upbdefs.c',
'src/core/ext/upbdefs-generated/envoy/config/core/v3/udp_socket_config.upbdefs.c',
'src/core/ext/upbdefs-generated/envoy/config/endpoint/v3/endpoint.upbdefs.c',
'src/core/ext/upbdefs-generated/envoy/config/endpoint/v3/endpoint_components.upbdefs.c',
'src/core/ext/upbdefs-generated/envoy/config/endpoint/v3/load_report.upbdefs.c',
'src/core/ext/upbdefs-generated/envoy/config/listener/v3/api_listener.upbdefs.c',
'src/core/ext/upbdefs-generated/envoy/config/listener/v3/listener.upbdefs.c',
'src/core/ext/upbdefs-generated/envoy/config/listener/v3/listener_components.upbdefs.c',
'src/core/ext/upbdefs-generated/envoy/config/listener/v3/quic_config.upbdefs.c',
'src/core/ext/upbdefs-generated/envoy/config/listener/v3/udp_listener_config.upbdefs.c',
'src/core/ext/upbdefs-generated/envoy/config/metrics/v3/metrics_service.upbdefs.c',
'src/core/ext/upbdefs-generated/envoy/config/metrics/v3/stats.upbdefs.c',
'src/core/ext/upbdefs-generated/envoy/config/overload/v3/overload.upbdefs.c',
'src/core/ext/upbdefs-generated/envoy/config/rbac/v3/rbac.upbdefs.c',
'src/core/ext/upbdefs-generated/envoy/config/route/v3/route.upbdefs.c',
'src/core/ext/upbdefs-generated/envoy/config/route/v3/route_components.upbdefs.c',
'src/core/ext/upbdefs-generated/envoy/config/route/v3/scoped_route.upbdefs.c',
'src/core/ext/upbdefs-generated/envoy/config/tap/v3/common.upbdefs.c',
'src/core/ext/upbdefs-generated/envoy/config/trace/v3/datadog.upbdefs.c',
'src/core/ext/upbdefs-generated/envoy/config/trace/v3/dynamic_ot.upbdefs.c',
'src/core/ext/upbdefs-generated/envoy/config/trace/v3/http_tracer.upbdefs.c',
'src/core/ext/upbdefs-generated/envoy/config/trace/v3/lightstep.upbdefs.c',
'src/core/ext/upbdefs-generated/envoy/config/trace/v3/opencensus.upbdefs.c',
'src/core/ext/upbdefs-generated/envoy/config/trace/v3/opentelemetry.upbdefs.c',
'src/core/ext/upbdefs-generated/envoy/config/trace/v3/service.upbdefs.c',
'src/core/ext/upbdefs-generated/envoy/config/trace/v3/skywalking.upbdefs.c',
'src/core/ext/upbdefs-generated/envoy/config/trace/v3/trace.upbdefs.c',
'src/core/ext/upbdefs-generated/envoy/config/trace/v3/xray.upbdefs.c',
'src/core/ext/upbdefs-generated/envoy/config/trace/v3/zipkin.upbdefs.c',
'src/core/ext/upbdefs-generated/envoy/data/accesslog/v3/accesslog.upbdefs.c',
'src/core/ext/upbdefs-generated/envoy/extensions/clusters/aggregate/v3/cluster.upbdefs.c',
'src/core/ext/upbdefs-generated/envoy/extensions/filters/common/fault/v3/fault.upbdefs.c',
'src/core/ext/upbdefs-generated/envoy/extensions/filters/http/fault/v3/fault.upbdefs.c',
'src/core/ext/upbdefs-generated/envoy/extensions/filters/http/rbac/v3/rbac.upbdefs.c',
'src/core/ext/upbdefs-generated/envoy/extensions/filters/http/router/v3/router.upbdefs.c',
'src/core/ext/upbdefs-generated/envoy/extensions/filters/http/stateful_session/v3/stateful_session.upbdefs.c',
'src/core/ext/upbdefs-generated/envoy/extensions/filters/network/http_connection_manager/v3/http_connection_manager.upbdefs.c',
'src/core/ext/upbdefs-generated/envoy/extensions/http/stateful_session/cookie/v3/cookie.upbdefs.c',
'src/core/ext/upbdefs-generated/envoy/extensions/transport_sockets/tls/v3/cert.upbdefs.c',
'src/core/ext/upbdefs-generated/envoy/extensions/transport_sockets/tls/v3/common.upbdefs.c',
'src/core/ext/upbdefs-generated/envoy/extensions/transport_sockets/tls/v3/secret.upbdefs.c',
'src/core/ext/upbdefs-generated/envoy/extensions/transport_sockets/tls/v3/tls.upbdefs.c',
'src/core/ext/upbdefs-generated/envoy/extensions/transport_sockets/tls/v3/tls_spiffe_validator_config.upbdefs.c',
'src/core/ext/upbdefs-generated/envoy/service/discovery/v3/ads.upbdefs.c',
'src/core/ext/upbdefs-generated/envoy/service/discovery/v3/discovery.upbdefs.c',
'src/core/ext/upbdefs-generated/envoy/service/load_stats/v3/lrs.upbdefs.c',
'src/core/ext/upbdefs-generated/envoy/service/status/v3/csds.upbdefs.c',
'src/core/ext/upbdefs-generated/envoy/type/http/v3/cookie.upbdefs.c',
'src/core/ext/upbdefs-generated/envoy/type/http/v3/path_transformation.upbdefs.c',
'src/core/ext/upbdefs-generated/envoy/type/matcher/v3/filter_state.upbdefs.c',
'src/core/ext/upbdefs-generated/envoy/type/matcher/v3/http_inputs.upbdefs.c',
'src/core/ext/upbdefs-generated/envoy/type/matcher/v3/metadata.upbdefs.c',
'src/core/ext/upbdefs-generated/envoy/type/matcher/v3/node.upbdefs.c',
'src/core/ext/upbdefs-generated/envoy/type/matcher/v3/number.upbdefs.c',
'src/core/ext/upbdefs-generated/envoy/type/matcher/v3/path.upbdefs.c',
'src/core/ext/upbdefs-generated/envoy/type/matcher/v3/regex.upbdefs.c',
'src/core/ext/upbdefs-generated/envoy/type/matcher/v3/status_code_input.upbdefs.c',
'src/core/ext/upbdefs-generated/envoy/type/matcher/v3/string.upbdefs.c',
'src/core/ext/upbdefs-generated/envoy/type/matcher/v3/struct.upbdefs.c',
'src/core/ext/upbdefs-generated/envoy/type/matcher/v3/value.upbdefs.c',
'src/core/ext/upbdefs-generated/envoy/type/metadata/v3/metadata.upbdefs.c',
'src/core/ext/upbdefs-generated/envoy/type/tracing/v3/custom_tag.upbdefs.c',
'src/core/ext/upbdefs-generated/envoy/type/v3/hash_policy.upbdefs.c',
'src/core/ext/upbdefs-generated/envoy/type/v3/http.upbdefs.c',
'src/core/ext/upbdefs-generated/envoy/type/v3/http_status.upbdefs.c',
'src/core/ext/upbdefs-generated/envoy/type/v3/percent.upbdefs.c',
'src/core/ext/upbdefs-generated/envoy/type/v3/range.upbdefs.c',
'src/core/ext/upbdefs-generated/envoy/type/v3/ratelimit_strategy.upbdefs.c',
'src/core/ext/upbdefs-generated/envoy/type/v3/ratelimit_unit.upbdefs.c',
'src/core/ext/upbdefs-generated/envoy/type/v3/semantic_version.upbdefs.c',
'src/core/ext/upbdefs-generated/envoy/type/v3/token_bucket.upbdefs.c',
'src/core/ext/upbdefs-generated/google/api/annotations.upbdefs.c',
'src/core/ext/upbdefs-generated/google/api/expr/v1alpha1/checked.upbdefs.c',
'src/core/ext/upbdefs-generated/google/api/expr/v1alpha1/syntax.upbdefs.c',
'src/core/ext/upbdefs-generated/google/api/http.upbdefs.c',
'src/core/ext/upbdefs-generated/google/api/httpbody.upbdefs.c',
'src/core/ext/upbdefs-generated/google/protobuf/any.upbdefs.c',
'src/core/ext/upbdefs-generated/google/protobuf/descriptor.upbdefs.c',
'src/core/ext/upbdefs-generated/google/protobuf/duration.upbdefs.c',
'src/core/ext/upbdefs-generated/google/protobuf/empty.upbdefs.c',
'src/core/ext/upbdefs-generated/google/protobuf/struct.upbdefs.c',
'src/core/ext/upbdefs-generated/google/protobuf/timestamp.upbdefs.c',
'src/core/ext/upbdefs-generated/google/protobuf/wrappers.upbdefs.c',
'src/core/ext/upbdefs-generated/google/rpc/status.upbdefs.c',
'src/core/ext/upbdefs-generated/opencensus/proto/trace/v1/trace_config.upbdefs.c',
'src/core/ext/upbdefs-generated/src/proto/grpc/lookup/v1/rls_config.upbdefs.c',
'src/core/ext/upbdefs-generated/udpa/annotations/migrate.upbdefs.c',
'src/core/ext/upbdefs-generated/udpa/annotations/security.upbdefs.c',
'src/core/ext/upbdefs-generated/udpa/annotations/sensitive.upbdefs.c',
'src/core/ext/upbdefs-generated/udpa/annotations/status.upbdefs.c',
'src/core/ext/upbdefs-generated/udpa/annotations/versioning.upbdefs.c',
'src/core/ext/upbdefs-generated/validate/validate.upbdefs.c',
'src/core/ext/upbdefs-generated/xds/annotations/v3/migrate.upbdefs.c',
'src/core/ext/upbdefs-generated/xds/annotations/v3/security.upbdefs.c',
'src/core/ext/upbdefs-generated/xds/annotations/v3/sensitive.upbdefs.c',
'src/core/ext/upbdefs-generated/xds/annotations/v3/status.upbdefs.c',
'src/core/ext/upbdefs-generated/xds/annotations/v3/versioning.upbdefs.c',
'src/core/ext/upbdefs-generated/xds/core/v3/authority.upbdefs.c',
'src/core/ext/upbdefs-generated/xds/core/v3/cidr.upbdefs.c',
'src/core/ext/upbdefs-generated/xds/core/v3/collection_entry.upbdefs.c',
'src/core/ext/upbdefs-generated/xds/core/v3/context_params.upbdefs.c',
'src/core/ext/upbdefs-generated/xds/core/v3/extension.upbdefs.c',
'src/core/ext/upbdefs-generated/xds/core/v3/resource.upbdefs.c',
'src/core/ext/upbdefs-generated/xds/core/v3/resource_locator.upbdefs.c',
'src/core/ext/upbdefs-generated/xds/core/v3/resource_name.upbdefs.c',
'src/core/ext/upbdefs-generated/xds/type/matcher/v3/cel.upbdefs.c',
'src/core/ext/upbdefs-generated/xds/type/matcher/v3/domain.upbdefs.c',
'src/core/ext/upbdefs-generated/xds/type/matcher/v3/http_inputs.upbdefs.c',
'src/core/ext/upbdefs-generated/xds/type/matcher/v3/ip.upbdefs.c',
'src/core/ext/upbdefs-generated/xds/type/matcher/v3/matcher.upbdefs.c',
'src/core/ext/upbdefs-generated/xds/type/matcher/v3/range.upbdefs.c',
'src/core/ext/upbdefs-generated/xds/type/matcher/v3/regex.upbdefs.c',
'src/core/ext/upbdefs-generated/xds/type/matcher/v3/string.upbdefs.c',
'src/core/ext/upbdefs-generated/xds/type/v3/cel.upbdefs.c',
'src/core/ext/upbdefs-generated/xds/type/v3/range.upbdefs.c',
'src/core/ext/upbdefs-generated/xds/type/v3/typed_struct.upbdefs.c',
'src/core/ext/xds/certificate_provider_store.cc',
'src/core/ext/xds/file_watcher_certificate_provider_factory.cc',
'src/core/ext/xds/xds_api.cc',
'src/core/ext/xds/xds_audit_logger_registry.cc',
'src/core/ext/xds/xds_bootstrap.cc',
'src/core/ext/xds/xds_bootstrap_grpc.cc',
'src/core/ext/xds/xds_certificate_provider.cc',
'src/core/ext/xds/xds_channel_stack_modifier.cc',
'src/core/ext/xds/xds_client.cc',
'src/core/ext/xds/xds_client_grpc.cc',
'src/core/ext/xds/xds_client_stats.cc',
'src/core/ext/xds/xds_cluster.cc',
'src/core/ext/xds/xds_cluster_specifier_plugin.cc',
'src/core/ext/xds/xds_common_types.cc',
'src/core/ext/xds/xds_endpoint.cc',
'src/core/ext/xds/xds_health_status.cc',
'src/core/ext/xds/xds_http_fault_filter.cc',
'src/core/ext/xds/xds_http_filters.cc',
'src/core/ext/xds/xds_http_rbac_filter.cc',
'src/core/ext/xds/xds_http_stateful_session_filter.cc',
'src/core/ext/xds/xds_lb_policy_registry.cc',
'src/core/ext/xds/xds_listener.cc',
'src/core/ext/xds/xds_route_config.cc',
'src/core/ext/xds/xds_routing.cc',
'src/core/ext/xds/xds_server_config_fetcher.cc',
'src/core/ext/xds/xds_transport_grpc.cc',
'src/core/lib/address_utils/parse_address.cc',
'src/core/lib/address_utils/sockaddr_utils.cc',
'src/core/lib/backoff/backoff.cc',
'src/core/lib/backoff/random_early_detection.cc',
'src/core/lib/channel/call_tracer.cc',
'src/core/lib/channel/channel_args.cc',
'src/core/lib/channel/channel_args_preconditioning.cc',
'src/core/lib/channel/channel_stack.cc',
'src/core/lib/channel/channel_stack_builder.cc',
'src/core/lib/channel/channel_stack_builder_impl.cc',
'src/core/lib/channel/channel_trace.cc',
'src/core/lib/channel/channelz.cc',
'src/core/lib/channel/channelz_registry.cc',
'src/core/lib/channel/connected_channel.cc',
'src/core/lib/channel/promise_based_filter.cc',
'src/core/lib/channel/server_call_tracer_filter.cc',
'src/core/lib/channel/status_util.cc',
'src/core/lib/compression/compression.cc',
'src/core/lib/compression/compression_internal.cc',
'src/core/lib/compression/message_compress.cc',
'src/core/lib/config/config_vars.cc',
'src/core/lib/config/config_vars_non_generated.cc',
'src/core/lib/config/core_configuration.cc',
'src/core/lib/config/load_config.cc',
'src/core/lib/debug/event_log.cc',
'src/core/lib/debug/histogram_view.cc',
'src/core/lib/debug/stats.cc',
'src/core/lib/debug/stats_data.cc',
'src/core/lib/debug/trace.cc',
'src/core/lib/event_engine/cf_engine/cf_engine.cc',
'src/core/lib/event_engine/cf_engine/cfstream_endpoint.cc',
'src/core/lib/event_engine/channel_args_endpoint_config.cc',
'src/core/lib/event_engine/default_event_engine.cc',
'src/core/lib/event_engine/default_event_engine_factory.cc',
'src/core/lib/event_engine/event_engine.cc',
'src/core/lib/event_engine/forkable.cc',
'src/core/lib/event_engine/memory_allocator.cc',
'src/core/lib/event_engine/posix_engine/ev_epoll1_linux.cc',
'src/core/lib/event_engine/posix_engine/ev_poll_posix.cc',
'src/core/lib/event_engine/posix_engine/event_poller_posix_default.cc',
'src/core/lib/event_engine/posix_engine/internal_errqueue.cc',
'src/core/lib/event_engine/posix_engine/lockfree_event.cc',
'src/core/lib/event_engine/posix_engine/posix_endpoint.cc',
'src/core/lib/event_engine/posix_engine/posix_engine.cc',
'src/core/lib/event_engine/posix_engine/posix_engine_listener.cc',
'src/core/lib/event_engine/posix_engine/posix_engine_listener_utils.cc',
'src/core/lib/event_engine/posix_engine/tcp_socket_utils.cc',
'src/core/lib/event_engine/posix_engine/timer.cc',
'src/core/lib/event_engine/posix_engine/timer_heap.cc',
'src/core/lib/event_engine/posix_engine/timer_manager.cc',
'src/core/lib/event_engine/posix_engine/traced_buffer_list.cc',
'src/core/lib/event_engine/posix_engine/wakeup_fd_eventfd.cc',
'src/core/lib/event_engine/posix_engine/wakeup_fd_pipe.cc',
'src/core/lib/event_engine/posix_engine/wakeup_fd_posix_default.cc',
'src/core/lib/event_engine/resolved_address.cc',
'src/core/lib/event_engine/shim.cc',
'src/core/lib/event_engine/slice.cc',
'src/core/lib/event_engine/slice_buffer.cc',
'src/core/lib/event_engine/tcp_socket_utils.cc',
'src/core/lib/event_engine/thread_local.cc',
'src/core/lib/event_engine/thread_pool/original_thread_pool.cc',
'src/core/lib/event_engine/thread_pool/thread_pool_factory.cc',
'src/core/lib/event_engine/thread_pool/work_stealing_thread_pool.cc',
'src/core/lib/event_engine/thready_event_engine/thready_event_engine.cc',
'src/core/lib/event_engine/time_util.cc',
'src/core/lib/event_engine/trace.cc',
'src/core/lib/event_engine/utils.cc',
'src/core/lib/event_engine/windows/iocp.cc',
'src/core/lib/event_engine/windows/win_socket.cc',
'src/core/lib/event_engine/windows/windows_endpoint.cc',
'src/core/lib/event_engine/windows/windows_engine.cc',
'src/core/lib/event_engine/windows/windows_listener.cc',
'src/core/lib/event_engine/work_queue/basic_work_queue.cc',
'src/core/lib/experiments/config.cc',
'src/core/lib/experiments/experiments.cc',
'src/core/lib/gpr/alloc.cc',
'src/core/lib/gpr/android/log.cc',
'src/core/lib/gpr/atm.cc',
'src/core/lib/gpr/iphone/cpu.cc',
'src/core/lib/gpr/linux/cpu.cc',
'src/core/lib/gpr/linux/log.cc',
'src/core/lib/gpr/log.cc',
'src/core/lib/gpr/msys/tmpfile.cc',
'src/core/lib/gpr/posix/cpu.cc',
'src/core/lib/gpr/posix/log.cc',
'src/core/lib/gpr/posix/string.cc',
'src/core/lib/gpr/posix/sync.cc',
'src/core/lib/gpr/posix/time.cc',
'src/core/lib/gpr/posix/tmpfile.cc',
'src/core/lib/gpr/string.cc',
'src/core/lib/gpr/sync.cc',
'src/core/lib/gpr/sync_abseil.cc',
'src/core/lib/gpr/time.cc',
'src/core/lib/gpr/time_precise.cc',
'src/core/lib/gpr/windows/cpu.cc',
'src/core/lib/gpr/windows/log.cc',
'src/core/lib/gpr/windows/string.cc',
'src/core/lib/gpr/windows/string_util.cc',
'src/core/lib/gpr/windows/sync.cc',
'src/core/lib/gpr/windows/time.cc',
'src/core/lib/gpr/windows/tmpfile.cc',
'src/core/lib/gpr/wrap_memcpy.cc',
'src/core/lib/gprpp/crash.cc',
'src/core/lib/gprpp/examine_stack.cc',
'src/core/lib/gprpp/fork.cc',
'src/core/lib/gprpp/host_port.cc',
'src/core/lib/gprpp/linux/env.cc',
'src/core/lib/gprpp/load_file.cc',
'src/core/lib/gprpp/mpscq.cc',
'src/core/lib/gprpp/per_cpu.cc',
'src/core/lib/gprpp/posix/env.cc',
'src/core/lib/gprpp/posix/stat.cc',
'src/core/lib/gprpp/posix/thd.cc',
'src/core/lib/gprpp/status_helper.cc',
'src/core/lib/gprpp/strerror.cc',
'src/core/lib/gprpp/tchar.cc',
'src/core/lib/gprpp/time.cc',
'src/core/lib/gprpp/time_averaged_stats.cc',
'src/core/lib/gprpp/time_util.cc',
'src/core/lib/gprpp/validation_errors.cc',
'src/core/lib/gprpp/windows/env.cc',
'src/core/lib/gprpp/windows/stat.cc',
'src/core/lib/gprpp/windows/thd.cc',
'src/core/lib/gprpp/work_serializer.cc',
'src/core/lib/handshaker/proxy_mapper_registry.cc',
'src/core/lib/http/format_request.cc',
'src/core/lib/http/httpcli.cc',
'src/core/lib/http/httpcli_security_connector.cc',
'src/core/lib/http/parser.cc',
'src/core/lib/iomgr/buffer_list.cc',
'src/core/lib/iomgr/call_combiner.cc',
'src/core/lib/iomgr/cfstream_handle.cc',
'src/core/lib/iomgr/closure.cc',
'src/core/lib/iomgr/combiner.cc',
'src/core/lib/iomgr/dualstack_socket_posix.cc',
'src/core/lib/iomgr/endpoint.cc',
'src/core/lib/iomgr/endpoint_cfstream.cc',
'src/core/lib/iomgr/endpoint_pair_posix.cc',
'src/core/lib/iomgr/endpoint_pair_windows.cc',
'src/core/lib/iomgr/error.cc',
'src/core/lib/iomgr/error_cfstream.cc',
'src/core/lib/iomgr/ev_apple.cc',
'src/core/lib/iomgr/ev_epoll1_linux.cc',
'src/core/lib/iomgr/ev_poll_posix.cc',
'src/core/lib/iomgr/ev_posix.cc',
'src/core/lib/iomgr/ev_windows.cc',
'src/core/lib/iomgr/event_engine_shims/closure.cc',
'src/core/lib/iomgr/event_engine_shims/endpoint.cc',
'src/core/lib/iomgr/event_engine_shims/tcp_client.cc',
'src/core/lib/iomgr/exec_ctx.cc',
'src/core/lib/iomgr/executor.cc',
'src/core/lib/iomgr/fork_posix.cc',
'src/core/lib/iomgr/fork_windows.cc',
'src/core/lib/iomgr/gethostname_fallback.cc',
'src/core/lib/iomgr/gethostname_host_name_max.cc',
'src/core/lib/iomgr/gethostname_sysconf.cc',
'src/core/lib/iomgr/grpc_if_nametoindex_posix.cc',
'src/core/lib/iomgr/grpc_if_nametoindex_unsupported.cc',
'src/core/lib/iomgr/internal_errqueue.cc',
'src/core/lib/iomgr/iocp_windows.cc',
'src/core/lib/iomgr/iomgr.cc',
'src/core/lib/iomgr/iomgr_internal.cc',
'src/core/lib/iomgr/iomgr_posix.cc',
'src/core/lib/iomgr/iomgr_posix_cfstream.cc',
'src/core/lib/iomgr/iomgr_windows.cc',
'src/core/lib/iomgr/load_file.cc',
'src/core/lib/iomgr/lockfree_event.cc',
'src/core/lib/iomgr/polling_entity.cc',
'src/core/lib/iomgr/pollset.cc',
'src/core/lib/iomgr/pollset_set.cc',
'src/core/lib/iomgr/pollset_set_windows.cc',
'src/core/lib/iomgr/pollset_windows.cc',
'src/core/lib/iomgr/resolve_address.cc',
'src/core/lib/iomgr/resolve_address_posix.cc',
'src/core/lib/iomgr/resolve_address_windows.cc',
'src/core/lib/iomgr/sockaddr_utils_posix.cc',
'src/core/lib/iomgr/socket_factory_posix.cc',
'src/core/lib/iomgr/socket_mutator.cc',
'src/core/lib/iomgr/socket_utils_common_posix.cc',
'src/core/lib/iomgr/socket_utils_linux.cc',
'src/core/lib/iomgr/socket_utils_posix.cc',
'src/core/lib/iomgr/socket_utils_windows.cc',
'src/core/lib/iomgr/socket_windows.cc',
'src/core/lib/iomgr/systemd_utils.cc',
'src/core/lib/iomgr/tcp_client.cc',
'src/core/lib/iomgr/tcp_client_cfstream.cc',
'src/core/lib/iomgr/tcp_client_posix.cc',
'src/core/lib/iomgr/tcp_client_windows.cc',
'src/core/lib/iomgr/tcp_posix.cc',
'src/core/lib/iomgr/tcp_server.cc',
'src/core/lib/iomgr/tcp_server_posix.cc',
'src/core/lib/iomgr/tcp_server_utils_posix_common.cc',
'src/core/lib/iomgr/tcp_server_utils_posix_ifaddrs.cc',
'src/core/lib/iomgr/tcp_server_utils_posix_noifaddrs.cc',
'src/core/lib/iomgr/tcp_server_windows.cc',
'src/core/lib/iomgr/tcp_windows.cc',
'src/core/lib/iomgr/timer.cc',
'src/core/lib/iomgr/timer_generic.cc',
'src/core/lib/iomgr/timer_heap.cc',
'src/core/lib/iomgr/timer_manager.cc',
'src/core/lib/iomgr/unix_sockets_posix.cc',
'src/core/lib/iomgr/unix_sockets_posix_noop.cc',
'src/core/lib/iomgr/vsock.cc',
'src/core/lib/iomgr/wakeup_fd_eventfd.cc',
'src/core/lib/iomgr/wakeup_fd_nospecial.cc',
'src/core/lib/iomgr/wakeup_fd_pipe.cc',
'src/core/lib/iomgr/wakeup_fd_posix.cc',
'src/core/lib/json/json_object_loader.cc',
'src/core/lib/json/json_reader.cc',
'src/core/lib/json/json_util.cc',
'src/core/lib/json/json_writer.cc',
'src/core/lib/load_balancing/lb_policy.cc',
'src/core/lib/load_balancing/lb_policy_registry.cc',
'src/core/lib/matchers/matchers.cc',
'src/core/lib/promise/activity.cc',
'src/core/lib/promise/party.cc',
'src/core/lib/promise/sleep.cc',
'src/core/lib/promise/trace.cc',
'src/core/lib/resolver/resolver.cc',
'src/core/lib/resolver/resolver_registry.cc',
'src/core/lib/resolver/server_address.cc',
'src/core/lib/resource_quota/api.cc',
'src/core/lib/resource_quota/arena.cc',
'src/core/lib/resource_quota/memory_quota.cc',
'src/core/lib/resource_quota/periodic_update.cc',
'src/core/lib/resource_quota/resource_quota.cc',
'src/core/lib/resource_quota/thread_quota.cc',
'src/core/lib/resource_quota/trace.cc',
'src/core/lib/security/authorization/audit_logging.cc',
'src/core/lib/security/authorization/authorization_policy_provider_vtable.cc',
'src/core/lib/security/authorization/evaluate_args.cc',
'src/core/lib/security/authorization/grpc_authorization_engine.cc',
'src/core/lib/security/authorization/grpc_server_authz_filter.cc',
'src/core/lib/security/authorization/matchers.cc',
'src/core/lib/security/authorization/rbac_policy.cc',
'src/core/lib/security/authorization/stdout_logger.cc',
'src/core/lib/security/certificate_provider/certificate_provider_registry.cc',
'src/core/lib/security/context/security_context.cc',
'src/core/lib/security/credentials/alts/alts_credentials.cc',
'src/core/lib/security/credentials/alts/check_gcp_environment.cc',
'src/core/lib/security/credentials/alts/check_gcp_environment_linux.cc',
'src/core/lib/security/credentials/alts/check_gcp_environment_no_op.cc',
'src/core/lib/security/credentials/alts/check_gcp_environment_windows.cc',
'src/core/lib/security/credentials/alts/grpc_alts_credentials_client_options.cc',
'src/core/lib/security/credentials/alts/grpc_alts_credentials_options.cc',
'src/core/lib/security/credentials/alts/grpc_alts_credentials_server_options.cc',
'src/core/lib/security/credentials/call_creds_util.cc',
'src/core/lib/security/credentials/channel_creds_registry_init.cc',
'src/core/lib/security/credentials/composite/composite_credentials.cc',
'src/core/lib/security/credentials/credentials.cc',
'src/core/lib/security/credentials/external/aws_external_account_credentials.cc',
'src/core/lib/security/credentials/external/aws_request_signer.cc',
'src/core/lib/security/credentials/external/external_account_credentials.cc',
'src/core/lib/security/credentials/external/file_external_account_credentials.cc',
'src/core/lib/security/credentials/external/url_external_account_credentials.cc',
'src/core/lib/security/credentials/fake/fake_credentials.cc',
'src/core/lib/security/credentials/google_default/credentials_generic.cc',
'src/core/lib/security/credentials/google_default/google_default_credentials.cc',
'src/core/lib/security/credentials/iam/iam_credentials.cc',
'src/core/lib/security/credentials/insecure/insecure_credentials.cc',
'src/core/lib/security/credentials/jwt/json_token.cc',
'src/core/lib/security/credentials/jwt/jwt_credentials.cc',
'src/core/lib/security/credentials/jwt/jwt_verifier.cc',
'src/core/lib/security/credentials/local/local_credentials.cc',
'src/core/lib/security/credentials/oauth2/oauth2_credentials.cc',
'src/core/lib/security/credentials/plugin/plugin_credentials.cc',
'src/core/lib/security/credentials/ssl/ssl_credentials.cc',
'src/core/lib/security/credentials/tls/grpc_tls_certificate_distributor.cc',
'src/core/lib/security/credentials/tls/grpc_tls_certificate_provider.cc',
'src/core/lib/security/credentials/tls/grpc_tls_certificate_verifier.cc',
'src/core/lib/security/credentials/tls/grpc_tls_credentials_options.cc',
'src/core/lib/security/credentials/tls/tls_credentials.cc',
'src/core/lib/security/credentials/tls/tls_utils.cc',
'src/core/lib/security/credentials/xds/xds_credentials.cc',
'src/core/lib/security/security_connector/alts/alts_security_connector.cc',
'src/core/lib/security/security_connector/fake/fake_security_connector.cc',
'src/core/lib/security/security_connector/insecure/insecure_security_connector.cc',
'src/core/lib/security/security_connector/load_system_roots_fallback.cc',
'src/core/lib/security/security_connector/load_system_roots_supported.cc',
'src/core/lib/security/security_connector/local/local_security_connector.cc',
'src/core/lib/security/security_connector/security_connector.cc',
'src/core/lib/security/security_connector/ssl/ssl_security_connector.cc',
'src/core/lib/security/security_connector/ssl_utils.cc',
'src/core/lib/security/security_connector/tls/tls_security_connector.cc',
'src/core/lib/security/transport/client_auth_filter.cc',
'src/core/lib/security/transport/secure_endpoint.cc',
'src/core/lib/security/transport/security_handshaker.cc',
'src/core/lib/security/transport/server_auth_filter.cc',
'src/core/lib/security/transport/tsi_error.cc',
'src/core/lib/security/util/json_util.cc',
'src/core/lib/service_config/service_config_impl.cc',
'src/core/lib/service_config/service_config_parser.cc',
'src/core/lib/slice/b64.cc',
'src/core/lib/slice/percent_encoding.cc',
'src/core/lib/slice/slice.cc',
'src/core/lib/slice/slice_buffer.cc',
'src/core/lib/slice/slice_refcount.cc',
'src/core/lib/slice/slice_string_helpers.cc',
'src/core/lib/surface/api_trace.cc',
'src/core/lib/surface/builtins.cc',
'src/core/lib/surface/byte_buffer.cc',
'src/core/lib/surface/byte_buffer_reader.cc',
'src/core/lib/surface/call.cc',
'src/core/lib/surface/call_details.cc',
'src/core/lib/surface/call_log_batch.cc',
'src/core/lib/surface/call_trace.cc',
'src/core/lib/surface/channel.cc',
'src/core/lib/surface/channel_init.cc',
'src/core/lib/surface/channel_ping.cc',
'src/core/lib/surface/channel_stack_type.cc',
'src/core/lib/surface/completion_queue.cc',
'src/core/lib/surface/completion_queue_factory.cc',
'src/core/lib/surface/event_string.cc',
'src/core/lib/surface/init.cc',
'src/core/lib/surface/init_internally.cc',
'src/core/lib/surface/lame_client.cc',
'src/core/lib/surface/metadata_array.cc',
'src/core/lib/surface/server.cc',
'src/core/lib/surface/validate_metadata.cc',
'src/core/lib/surface/version.cc',
'src/core/lib/transport/batch_builder.cc',
'src/core/lib/transport/bdp_estimator.cc',
'src/core/lib/transport/connectivity_state.cc',
'src/core/lib/transport/error_utils.cc',
'src/core/lib/transport/handshaker.cc',
'src/core/lib/transport/handshaker_registry.cc',
'src/core/lib/transport/http_connect_handshaker.cc',
'src/core/lib/transport/metadata_batch.cc',
'src/core/lib/transport/parsed_metadata.cc',
'src/core/lib/transport/pid_controller.cc',
'src/core/lib/transport/status_conversion.cc',
'src/core/lib/transport/tcp_connect_handshaker.cc',
'src/core/lib/transport/timeout_encoding.cc',
'src/core/lib/transport/transport.cc',
'src/core/lib/transport/transport_op_string.cc',
'src/core/lib/uri/uri_parser.cc',
'src/core/plugin_registry/grpc_plugin_registry.cc',
'src/core/plugin_registry/grpc_plugin_registry_extra.cc',
'src/core/tsi/alts/crypt/aes_gcm.cc',
'src/core/tsi/alts/crypt/gsec.cc',
'src/core/tsi/alts/frame_protector/alts_counter.cc',
'src/core/tsi/alts/frame_protector/alts_crypter.cc',
'src/core/tsi/alts/frame_protector/alts_frame_protector.cc',
'src/core/tsi/alts/frame_protector/alts_record_protocol_crypter_common.cc',
'src/core/tsi/alts/frame_protector/alts_seal_privacy_integrity_crypter.cc',
'src/core/tsi/alts/frame_protector/alts_unseal_privacy_integrity_crypter.cc',
'src/core/tsi/alts/frame_protector/frame_handler.cc',
'src/core/tsi/alts/handshaker/alts_handshaker_client.cc',
'src/core/tsi/alts/handshaker/alts_shared_resource.cc',
'src/core/tsi/alts/handshaker/alts_tsi_handshaker.cc',
'src/core/tsi/alts/handshaker/alts_tsi_utils.cc',
'src/core/tsi/alts/handshaker/transport_security_common_api.cc',
'src/core/tsi/alts/zero_copy_frame_protector/alts_grpc_integrity_only_record_protocol.cc',
'src/core/tsi/alts/zero_copy_frame_protector/alts_grpc_privacy_integrity_record_protocol.cc',
'src/core/tsi/alts/zero_copy_frame_protector/alts_grpc_record_protocol_common.cc',
'src/core/tsi/alts/zero_copy_frame_protector/alts_iovec_record_protocol.cc',
'src/core/tsi/alts/zero_copy_frame_protector/alts_zero_copy_grpc_protector.cc',
'src/core/tsi/fake_transport_security.cc',
'src/core/tsi/local_transport_security.cc',
'src/core/tsi/ssl/key_logging/ssl_key_logging.cc',
'src/core/tsi/ssl/session_cache/ssl_session_boringssl.cc',
'src/core/tsi/ssl/session_cache/ssl_session_cache.cc',
'src/core/tsi/ssl/session_cache/ssl_session_openssl.cc',
'src/core/tsi/ssl_transport_security.cc',
'src/core/tsi/ssl_transport_security_utils.cc',
'src/core/tsi/transport_security.cc',
'src/core/tsi/transport_security_grpc.cc',
'third_party/abseil-cpp/absl/base/internal/cycleclock.cc',
'third_party/abseil-cpp/absl/base/internal/low_level_alloc.cc',
'third_party/abseil-cpp/absl/base/internal/raw_logging.cc',
'third_party/abseil-cpp/absl/base/internal/spinlock.cc',
'third_party/abseil-cpp/absl/base/internal/spinlock_wait.cc',
'third_party/abseil-cpp/absl/base/internal/strerror.cc',
'third_party/abseil-cpp/absl/base/internal/sysinfo.cc',
'third_party/abseil-cpp/absl/base/internal/thread_identity.cc',
'third_party/abseil-cpp/absl/base/internal/throw_delegate.cc',
'third_party/abseil-cpp/absl/base/internal/unscaledcycleclock.cc',
'third_party/abseil-cpp/absl/base/log_severity.cc',
'third_party/abseil-cpp/absl/container/internal/hashtablez_sampler.cc',
'third_party/abseil-cpp/absl/container/internal/hashtablez_sampler_force_weak_definition.cc',
'third_party/abseil-cpp/absl/container/internal/raw_hash_set.cc',
'third_party/abseil-cpp/absl/crc/crc32c.cc',
'third_party/abseil-cpp/absl/crc/internal/cpu_detect.cc',
'third_party/abseil-cpp/absl/crc/internal/crc.cc',
'third_party/abseil-cpp/absl/crc/internal/crc_cord_state.cc',
'third_party/abseil-cpp/absl/crc/internal/crc_memcpy_fallback.cc',
'third_party/abseil-cpp/absl/crc/internal/crc_memcpy_x86_64.cc',
'third_party/abseil-cpp/absl/crc/internal/crc_non_temporal_memcpy.cc',
'third_party/abseil-cpp/absl/crc/internal/crc_x86_arm_combined.cc',
'third_party/abseil-cpp/absl/debugging/internal/address_is_readable.cc',
'third_party/abseil-cpp/absl/debugging/internal/demangle.cc',
'third_party/abseil-cpp/absl/debugging/internal/elf_mem_image.cc',
'third_party/abseil-cpp/absl/debugging/internal/vdso_support.cc',
'third_party/abseil-cpp/absl/debugging/stacktrace.cc',
'third_party/abseil-cpp/absl/debugging/symbolize.cc',
'third_party/abseil-cpp/absl/flags/commandlineflag.cc',
'third_party/abseil-cpp/absl/flags/flag.cc',
'third_party/abseil-cpp/absl/flags/internal/commandlineflag.cc',
'third_party/abseil-cpp/absl/flags/internal/flag.cc',
'third_party/abseil-cpp/absl/flags/internal/private_handle_accessor.cc',
'third_party/abseil-cpp/absl/flags/internal/program_name.cc',
'third_party/abseil-cpp/absl/flags/marshalling.cc',
'third_party/abseil-cpp/absl/flags/reflection.cc',
'third_party/abseil-cpp/absl/flags/usage_config.cc',
'third_party/abseil-cpp/absl/hash/internal/city.cc',
'third_party/abseil-cpp/absl/hash/internal/hash.cc',
'third_party/abseil-cpp/absl/hash/internal/low_level_hash.cc',
'third_party/abseil-cpp/absl/numeric/int128.cc',
'third_party/abseil-cpp/absl/profiling/internal/exponential_biased.cc',
'third_party/abseil-cpp/absl/random/discrete_distribution.cc',
'third_party/abseil-cpp/absl/random/gaussian_distribution.cc',
'third_party/abseil-cpp/absl/random/internal/pool_urbg.cc',
'third_party/abseil-cpp/absl/random/internal/randen.cc',
'third_party/abseil-cpp/absl/random/internal/randen_detect.cc',
'third_party/abseil-cpp/absl/random/internal/randen_hwaes.cc',
'third_party/abseil-cpp/absl/random/internal/randen_round_keys.cc',
'third_party/abseil-cpp/absl/random/internal/randen_slow.cc',
'third_party/abseil-cpp/absl/random/internal/seed_material.cc',
'third_party/abseil-cpp/absl/random/seed_gen_exception.cc',
'third_party/abseil-cpp/absl/random/seed_sequences.cc',
'third_party/abseil-cpp/absl/status/status.cc',
'third_party/abseil-cpp/absl/status/status_payload_printer.cc',
'third_party/abseil-cpp/absl/status/statusor.cc',
'third_party/abseil-cpp/absl/strings/ascii.cc',
'third_party/abseil-cpp/absl/strings/charconv.cc',
'third_party/abseil-cpp/absl/strings/cord.cc',
'third_party/abseil-cpp/absl/strings/cord_analysis.cc',
'third_party/abseil-cpp/absl/strings/cord_buffer.cc',
'third_party/abseil-cpp/absl/strings/escaping.cc',
'third_party/abseil-cpp/absl/strings/internal/charconv_bigint.cc',
'third_party/abseil-cpp/absl/strings/internal/charconv_parse.cc',
'third_party/abseil-cpp/absl/strings/internal/cord_internal.cc',
'third_party/abseil-cpp/absl/strings/internal/cord_rep_btree.cc',
'third_party/abseil-cpp/absl/strings/internal/cord_rep_btree_navigator.cc',
'third_party/abseil-cpp/absl/strings/internal/cord_rep_btree_reader.cc',
'third_party/abseil-cpp/absl/strings/internal/cord_rep_consume.cc',
'third_party/abseil-cpp/absl/strings/internal/cord_rep_crc.cc',
'third_party/abseil-cpp/absl/strings/internal/cord_rep_ring.cc',
'third_party/abseil-cpp/absl/strings/internal/cordz_functions.cc',
'third_party/abseil-cpp/absl/strings/internal/cordz_handle.cc',
'third_party/abseil-cpp/absl/strings/internal/cordz_info.cc',
'third_party/abseil-cpp/absl/strings/internal/damerau_levenshtein_distance.cc',
'third_party/abseil-cpp/absl/strings/internal/escaping.cc',
'third_party/abseil-cpp/absl/strings/internal/memutil.cc',
'third_party/abseil-cpp/absl/strings/internal/ostringstream.cc',
'third_party/abseil-cpp/absl/strings/internal/str_format/arg.cc',
'third_party/abseil-cpp/absl/strings/internal/str_format/bind.cc',
'third_party/abseil-cpp/absl/strings/internal/str_format/extension.cc',
'third_party/abseil-cpp/absl/strings/internal/str_format/float_conversion.cc',
'third_party/abseil-cpp/absl/strings/internal/str_format/output.cc',
'third_party/abseil-cpp/absl/strings/internal/str_format/parser.cc',
'third_party/abseil-cpp/absl/strings/internal/stringify_sink.cc',
'third_party/abseil-cpp/absl/strings/internal/utf8.cc',
'third_party/abseil-cpp/absl/strings/match.cc',
'third_party/abseil-cpp/absl/strings/numbers.cc',
'third_party/abseil-cpp/absl/strings/str_cat.cc',
'third_party/abseil-cpp/absl/strings/str_replace.cc',
'third_party/abseil-cpp/absl/strings/str_split.cc',
'third_party/abseil-cpp/absl/strings/string_view.cc',
'third_party/abseil-cpp/absl/strings/substitute.cc',
'third_party/abseil-cpp/absl/synchronization/barrier.cc',
'third_party/abseil-cpp/absl/synchronization/blocking_counter.cc',
'third_party/abseil-cpp/absl/synchronization/internal/create_thread_identity.cc',
'third_party/abseil-cpp/absl/synchronization/internal/graphcycles.cc',
'third_party/abseil-cpp/absl/synchronization/internal/per_thread_sem.cc',
'third_party/abseil-cpp/absl/synchronization/internal/waiter.cc',
'third_party/abseil-cpp/absl/synchronization/mutex.cc',
'third_party/abseil-cpp/absl/synchronization/notification.cc',
'third_party/abseil-cpp/absl/time/civil_time.cc',
'third_party/abseil-cpp/absl/time/clock.cc',
'third_party/abseil-cpp/absl/time/duration.cc',
'third_party/abseil-cpp/absl/time/format.cc',
'third_party/abseil-cpp/absl/time/internal/cctz/src/civil_time_detail.cc',
'third_party/abseil-cpp/absl/time/internal/cctz/src/time_zone_fixed.cc',
'third_party/abseil-cpp/absl/time/internal/cctz/src/time_zone_format.cc',
'third_party/abseil-cpp/absl/time/internal/cctz/src/time_zone_if.cc',
'third_party/abseil-cpp/absl/time/internal/cctz/src/time_zone_impl.cc',
'third_party/abseil-cpp/absl/time/internal/cctz/src/time_zone_info.cc',
'third_party/abseil-cpp/absl/time/internal/cctz/src/time_zone_libc.cc',
'third_party/abseil-cpp/absl/time/internal/cctz/src/time_zone_lookup.cc',
'third_party/abseil-cpp/absl/time/internal/cctz/src/time_zone_posix.cc',
'third_party/abseil-cpp/absl/time/internal/cctz/src/zone_info_source.cc',
'third_party/abseil-cpp/absl/time/time.cc',
'third_party/abseil-cpp/absl/types/bad_optional_access.cc',
'third_party/abseil-cpp/absl/types/bad_variant_access.cc',
'third_party/address_sorting/address_sorting.c',
'third_party/address_sorting/address_sorting_posix.c',
'third_party/address_sorting/address_sorting_windows.c',
'third_party/boringssl-with-bazel/err_data.c',
'third_party/boringssl-with-bazel/src/crypto/asn1/a_bitstr.c',
'third_party/boringssl-with-bazel/src/crypto/asn1/a_bool.c',
'third_party/boringssl-with-bazel/src/crypto/asn1/a_d2i_fp.c',
'third_party/boringssl-with-bazel/src/crypto/asn1/a_dup.c',
'third_party/boringssl-with-bazel/src/crypto/asn1/a_gentm.c',
'third_party/boringssl-with-bazel/src/crypto/asn1/a_i2d_fp.c',
'third_party/boringssl-with-bazel/src/crypto/asn1/a_int.c',
'third_party/boringssl-with-bazel/src/crypto/asn1/a_mbstr.c',
'third_party/boringssl-with-bazel/src/crypto/asn1/a_object.c',
'third_party/boringssl-with-bazel/src/crypto/asn1/a_octet.c',
'third_party/boringssl-with-bazel/src/crypto/asn1/a_strex.c',
'third_party/boringssl-with-bazel/src/crypto/asn1/a_strnid.c',
'third_party/boringssl-with-bazel/src/crypto/asn1/a_time.c',
'third_party/boringssl-with-bazel/src/crypto/asn1/a_type.c',
'third_party/boringssl-with-bazel/src/crypto/asn1/a_utctm.c',
'third_party/boringssl-with-bazel/src/crypto/asn1/asn1_lib.c',
'third_party/boringssl-with-bazel/src/crypto/asn1/asn1_par.c',
'third_party/boringssl-with-bazel/src/crypto/asn1/asn_pack.c',
'third_party/boringssl-with-bazel/src/crypto/asn1/f_int.c',
'third_party/boringssl-with-bazel/src/crypto/asn1/f_string.c',
'third_party/boringssl-with-bazel/src/crypto/asn1/posix_time.c',
'third_party/boringssl-with-bazel/src/crypto/asn1/tasn_dec.c',
'third_party/boringssl-with-bazel/src/crypto/asn1/tasn_enc.c',
'third_party/boringssl-with-bazel/src/crypto/asn1/tasn_fre.c',
'third_party/boringssl-with-bazel/src/crypto/asn1/tasn_new.c',
'third_party/boringssl-with-bazel/src/crypto/asn1/tasn_typ.c',
'third_party/boringssl-with-bazel/src/crypto/asn1/tasn_utl.c',
'third_party/boringssl-with-bazel/src/crypto/base64/base64.c',
'third_party/boringssl-with-bazel/src/crypto/bio/bio.c',
'third_party/boringssl-with-bazel/src/crypto/bio/bio_mem.c',
'third_party/boringssl-with-bazel/src/crypto/bio/connect.c',
'third_party/boringssl-with-bazel/src/crypto/bio/fd.c',
'third_party/boringssl-with-bazel/src/crypto/bio/file.c',
'third_party/boringssl-with-bazel/src/crypto/bio/hexdump.c',
'third_party/boringssl-with-bazel/src/crypto/bio/pair.c',
'third_party/boringssl-with-bazel/src/crypto/bio/printf.c',
'third_party/boringssl-with-bazel/src/crypto/bio/socket.c',
'third_party/boringssl-with-bazel/src/crypto/bio/socket_helper.c',
'third_party/boringssl-with-bazel/src/crypto/blake2/blake2.c',
'third_party/boringssl-with-bazel/src/crypto/bn_extra/bn_asn1.c',
'third_party/boringssl-with-bazel/src/crypto/bn_extra/convert.c',
'third_party/boringssl-with-bazel/src/crypto/buf/buf.c',
'third_party/boringssl-with-bazel/src/crypto/bytestring/asn1_compat.c',
'third_party/boringssl-with-bazel/src/crypto/bytestring/ber.c',
'third_party/boringssl-with-bazel/src/crypto/bytestring/cbb.c',
'third_party/boringssl-with-bazel/src/crypto/bytestring/cbs.c',
'third_party/boringssl-with-bazel/src/crypto/bytestring/unicode.c',
'third_party/boringssl-with-bazel/src/crypto/chacha/chacha.c',
'third_party/boringssl-with-bazel/src/crypto/cipher_extra/cipher_extra.c',
'third_party/boringssl-with-bazel/src/crypto/cipher_extra/derive_key.c',
'third_party/boringssl-with-bazel/src/crypto/cipher_extra/e_aesctrhmac.c',
'third_party/boringssl-with-bazel/src/crypto/cipher_extra/e_aesgcmsiv.c',
'third_party/boringssl-with-bazel/src/crypto/cipher_extra/e_chacha20poly1305.c',
'third_party/boringssl-with-bazel/src/crypto/cipher_extra/e_des.c',
'third_party/boringssl-with-bazel/src/crypto/cipher_extra/e_null.c',
'third_party/boringssl-with-bazel/src/crypto/cipher_extra/e_rc2.c',
'third_party/boringssl-with-bazel/src/crypto/cipher_extra/e_rc4.c',
'third_party/boringssl-with-bazel/src/crypto/cipher_extra/e_tls.c',
'third_party/boringssl-with-bazel/src/crypto/cipher_extra/tls_cbc.c',
'third_party/boringssl-with-bazel/src/crypto/conf/conf.c',
'third_party/boringssl-with-bazel/src/crypto/cpu_aarch64_apple.c',
'third_party/boringssl-with-bazel/src/crypto/cpu_aarch64_freebsd.c',
'third_party/boringssl-with-bazel/src/crypto/cpu_aarch64_fuchsia.c',
'third_party/boringssl-with-bazel/src/crypto/cpu_aarch64_linux.c',
'third_party/boringssl-with-bazel/src/crypto/cpu_aarch64_openbsd.c',
'third_party/boringssl-with-bazel/src/crypto/cpu_aarch64_win.c',
'third_party/boringssl-with-bazel/src/crypto/cpu_arm.c',
'third_party/boringssl-with-bazel/src/crypto/cpu_arm_freebsd.c',
'third_party/boringssl-with-bazel/src/crypto/cpu_arm_linux.c',
'third_party/boringssl-with-bazel/src/crypto/cpu_arm_openbsd.c',
'third_party/boringssl-with-bazel/src/crypto/cpu_intel.c',
'third_party/boringssl-with-bazel/src/crypto/crypto.c',
'third_party/boringssl-with-bazel/src/crypto/curve25519/curve25519.c',
'third_party/boringssl-with-bazel/src/crypto/curve25519/curve25519_64_adx.c',
'third_party/boringssl-with-bazel/src/crypto/curve25519/spake25519.c',
'third_party/boringssl-with-bazel/src/crypto/des/des.c',
'third_party/boringssl-with-bazel/src/crypto/dh_extra/dh_asn1.c',
'third_party/boringssl-with-bazel/src/crypto/dh_extra/params.c',
'third_party/boringssl-with-bazel/src/crypto/digest_extra/digest_extra.c',
'third_party/boringssl-with-bazel/src/crypto/dsa/dsa.c',
'third_party/boringssl-with-bazel/src/crypto/dsa/dsa_asn1.c',
'third_party/boringssl-with-bazel/src/crypto/ec_extra/ec_asn1.c',
'third_party/boringssl-with-bazel/src/crypto/ec_extra/ec_derive.c',
'third_party/boringssl-with-bazel/src/crypto/ec_extra/hash_to_curve.c',
'third_party/boringssl-with-bazel/src/crypto/ecdh_extra/ecdh_extra.c',
'third_party/boringssl-with-bazel/src/crypto/ecdsa_extra/ecdsa_asn1.c',
'third_party/boringssl-with-bazel/src/crypto/engine/engine.c',
'third_party/boringssl-with-bazel/src/crypto/err/err.c',
'third_party/boringssl-with-bazel/src/crypto/evp/evp.c',
'third_party/boringssl-with-bazel/src/crypto/evp/evp_asn1.c',
'third_party/boringssl-with-bazel/src/crypto/evp/evp_ctx.c',
'third_party/boringssl-with-bazel/src/crypto/evp/p_dsa_asn1.c',
'third_party/boringssl-with-bazel/src/crypto/evp/p_ec.c',
'third_party/boringssl-with-bazel/src/crypto/evp/p_ec_asn1.c',
'third_party/boringssl-with-bazel/src/crypto/evp/p_ed25519.c',
'third_party/boringssl-with-bazel/src/crypto/evp/p_ed25519_asn1.c',
'third_party/boringssl-with-bazel/src/crypto/evp/p_hkdf.c',
'third_party/boringssl-with-bazel/src/crypto/evp/p_rsa.c',
'third_party/boringssl-with-bazel/src/crypto/evp/p_rsa_asn1.c',
'third_party/boringssl-with-bazel/src/crypto/evp/p_x25519.c',
'third_party/boringssl-with-bazel/src/crypto/evp/p_x25519_asn1.c',
'third_party/boringssl-with-bazel/src/crypto/evp/pbkdf.c',
'third_party/boringssl-with-bazel/src/crypto/evp/print.c',
'third_party/boringssl-with-bazel/src/crypto/evp/scrypt.c',
'third_party/boringssl-with-bazel/src/crypto/evp/sign.c',
'third_party/boringssl-with-bazel/src/crypto/ex_data.c',
'third_party/boringssl-with-bazel/src/crypto/fipsmodule/bcm.c',
'third_party/boringssl-with-bazel/src/crypto/fipsmodule/fips_shared_support.c',
'third_party/boringssl-with-bazel/src/crypto/hpke/hpke.c',
'third_party/boringssl-with-bazel/src/crypto/hrss/hrss.c',
'third_party/boringssl-with-bazel/src/crypto/kyber/keccak.c',
'third_party/boringssl-with-bazel/src/crypto/kyber/kyber.c',
'third_party/boringssl-with-bazel/src/crypto/lhash/lhash.c',
'third_party/boringssl-with-bazel/src/crypto/mem.c',
'third_party/boringssl-with-bazel/src/crypto/obj/obj.c',
'third_party/boringssl-with-bazel/src/crypto/obj/obj_xref.c',
'third_party/boringssl-with-bazel/src/crypto/pem/pem_all.c',
'third_party/boringssl-with-bazel/src/crypto/pem/pem_info.c',
'third_party/boringssl-with-bazel/src/crypto/pem/pem_lib.c',
'third_party/boringssl-with-bazel/src/crypto/pem/pem_oth.c',
'third_party/boringssl-with-bazel/src/crypto/pem/pem_pk8.c',
'third_party/boringssl-with-bazel/src/crypto/pem/pem_pkey.c',
'third_party/boringssl-with-bazel/src/crypto/pem/pem_x509.c',
'third_party/boringssl-with-bazel/src/crypto/pem/pem_xaux.c',
'third_party/boringssl-with-bazel/src/crypto/pkcs7/pkcs7.c',
'third_party/boringssl-with-bazel/src/crypto/pkcs7/pkcs7_x509.c',
'third_party/boringssl-with-bazel/src/crypto/pkcs8/p5_pbev2.c',
'third_party/boringssl-with-bazel/src/crypto/pkcs8/pkcs8.c',
'third_party/boringssl-with-bazel/src/crypto/pkcs8/pkcs8_x509.c',
'third_party/boringssl-with-bazel/src/crypto/poly1305/poly1305.c',
'third_party/boringssl-with-bazel/src/crypto/poly1305/poly1305_arm.c',
'third_party/boringssl-with-bazel/src/crypto/poly1305/poly1305_vec.c',
'third_party/boringssl-with-bazel/src/crypto/pool/pool.c',
'third_party/boringssl-with-bazel/src/crypto/rand_extra/deterministic.c',
'third_party/boringssl-with-bazel/src/crypto/rand_extra/forkunsafe.c',
'third_party/boringssl-with-bazel/src/crypto/rand_extra/getentropy.c',
'third_party/boringssl-with-bazel/src/crypto/rand_extra/ios.c',
'third_party/boringssl-with-bazel/src/crypto/rand_extra/passive.c',
'third_party/boringssl-with-bazel/src/crypto/rand_extra/rand_extra.c',
'third_party/boringssl-with-bazel/src/crypto/rand_extra/trusty.c',
'third_party/boringssl-with-bazel/src/crypto/rand_extra/windows.c',
'third_party/boringssl-with-bazel/src/crypto/rc4/rc4.c',
'third_party/boringssl-with-bazel/src/crypto/refcount.c',
'third_party/boringssl-with-bazel/src/crypto/rsa_extra/rsa_asn1.c',
'third_party/boringssl-with-bazel/src/crypto/rsa_extra/rsa_crypt.c',
'third_party/boringssl-with-bazel/src/crypto/rsa_extra/rsa_print.c',
'third_party/boringssl-with-bazel/src/crypto/siphash/siphash.c',
'third_party/boringssl-with-bazel/src/crypto/stack/stack.c',
'third_party/boringssl-with-bazel/src/crypto/thread.c',
'third_party/boringssl-with-bazel/src/crypto/thread_none.c',
'third_party/boringssl-with-bazel/src/crypto/thread_pthread.c',
'third_party/boringssl-with-bazel/src/crypto/thread_win.c',
'third_party/boringssl-with-bazel/src/crypto/trust_token/pmbtoken.c',
'third_party/boringssl-with-bazel/src/crypto/trust_token/trust_token.c',
'third_party/boringssl-with-bazel/src/crypto/trust_token/voprf.c',
'third_party/boringssl-with-bazel/src/crypto/x509/a_digest.c',
'third_party/boringssl-with-bazel/src/crypto/x509/a_sign.c',
'third_party/boringssl-with-bazel/src/crypto/x509/a_verify.c',
'third_party/boringssl-with-bazel/src/crypto/x509/algorithm.c',
'third_party/boringssl-with-bazel/src/crypto/x509/asn1_gen.c',
'third_party/boringssl-with-bazel/src/crypto/x509/by_dir.c',
'third_party/boringssl-with-bazel/src/crypto/x509/by_file.c',
'third_party/boringssl-with-bazel/src/crypto/x509/i2d_pr.c',
'third_party/boringssl-with-bazel/src/crypto/x509/name_print.c',
'third_party/boringssl-with-bazel/src/crypto/x509/policy.c',
'third_party/boringssl-with-bazel/src/crypto/x509/rsa_pss.c',
'third_party/boringssl-with-bazel/src/crypto/x509/t_crl.c',
'third_party/boringssl-with-bazel/src/crypto/x509/t_req.c',
'third_party/boringssl-with-bazel/src/crypto/x509/t_x509.c',
'third_party/boringssl-with-bazel/src/crypto/x509/t_x509a.c',
'third_party/boringssl-with-bazel/src/crypto/x509/x509.c',
'third_party/boringssl-with-bazel/src/crypto/x509/x509_att.c',
'third_party/boringssl-with-bazel/src/crypto/x509/x509_cmp.c',
'third_party/boringssl-with-bazel/src/crypto/x509/x509_d2.c',
'third_party/boringssl-with-bazel/src/crypto/x509/x509_def.c',
'third_party/boringssl-with-bazel/src/crypto/x509/x509_ext.c',
'third_party/boringssl-with-bazel/src/crypto/x509/x509_lu.c',
'third_party/boringssl-with-bazel/src/crypto/x509/x509_obj.c',
'third_party/boringssl-with-bazel/src/crypto/x509/x509_req.c',
'third_party/boringssl-with-bazel/src/crypto/x509/x509_set.c',
'third_party/boringssl-with-bazel/src/crypto/x509/x509_trs.c',
'third_party/boringssl-with-bazel/src/crypto/x509/x509_txt.c',
'third_party/boringssl-with-bazel/src/crypto/x509/x509_v3.c',
'third_party/boringssl-with-bazel/src/crypto/x509/x509_vfy.c',
'third_party/boringssl-with-bazel/src/crypto/x509/x509_vpm.c',
'third_party/boringssl-with-bazel/src/crypto/x509/x509cset.c',
'third_party/boringssl-with-bazel/src/crypto/x509/x509name.c',
'third_party/boringssl-with-bazel/src/crypto/x509/x509rset.c',
'third_party/boringssl-with-bazel/src/crypto/x509/x509spki.c',
'third_party/boringssl-with-bazel/src/crypto/x509/x_algor.c',
'third_party/boringssl-with-bazel/src/crypto/x509/x_all.c',
'third_party/boringssl-with-bazel/src/crypto/x509/x_attrib.c',
'third_party/boringssl-with-bazel/src/crypto/x509/x_crl.c',
'third_party/boringssl-with-bazel/src/crypto/x509/x_exten.c',
'third_party/boringssl-with-bazel/src/crypto/x509/x_info.c',
'third_party/boringssl-with-bazel/src/crypto/x509/x_name.c',
'third_party/boringssl-with-bazel/src/crypto/x509/x_pkey.c',
'third_party/boringssl-with-bazel/src/crypto/x509/x_pubkey.c',
'third_party/boringssl-with-bazel/src/crypto/x509/x_req.c',
'third_party/boringssl-with-bazel/src/crypto/x509/x_sig.c',
'third_party/boringssl-with-bazel/src/crypto/x509/x_spki.c',
'third_party/boringssl-with-bazel/src/crypto/x509/x_val.c',
'third_party/boringssl-with-bazel/src/crypto/x509/x_x509.c',
'third_party/boringssl-with-bazel/src/crypto/x509/x_x509a.c',
'third_party/boringssl-with-bazel/src/crypto/x509v3/v3_akey.c',
'third_party/boringssl-with-bazel/src/crypto/x509v3/v3_akeya.c',
'third_party/boringssl-with-bazel/src/crypto/x509v3/v3_alt.c',
'third_party/boringssl-with-bazel/src/crypto/x509v3/v3_bcons.c',
'third_party/boringssl-with-bazel/src/crypto/x509v3/v3_bitst.c',
'third_party/boringssl-with-bazel/src/crypto/x509v3/v3_conf.c',
'third_party/boringssl-with-bazel/src/crypto/x509v3/v3_cpols.c',
'third_party/boringssl-with-bazel/src/crypto/x509v3/v3_crld.c',
'third_party/boringssl-with-bazel/src/crypto/x509v3/v3_enum.c',
'third_party/boringssl-with-bazel/src/crypto/x509v3/v3_extku.c',
'third_party/boringssl-with-bazel/src/crypto/x509v3/v3_genn.c',
'third_party/boringssl-with-bazel/src/crypto/x509v3/v3_ia5.c',
'third_party/boringssl-with-bazel/src/crypto/x509v3/v3_info.c',
'third_party/boringssl-with-bazel/src/crypto/x509v3/v3_int.c',
'third_party/boringssl-with-bazel/src/crypto/x509v3/v3_lib.c',
'third_party/boringssl-with-bazel/src/crypto/x509v3/v3_ncons.c',
'third_party/boringssl-with-bazel/src/crypto/x509v3/v3_ocsp.c',
'third_party/boringssl-with-bazel/src/crypto/x509v3/v3_pcons.c',
'third_party/boringssl-with-bazel/src/crypto/x509v3/v3_pmaps.c',
'third_party/boringssl-with-bazel/src/crypto/x509v3/v3_prn.c',
'third_party/boringssl-with-bazel/src/crypto/x509v3/v3_purp.c',
'third_party/boringssl-with-bazel/src/crypto/x509v3/v3_skey.c',
'third_party/boringssl-with-bazel/src/crypto/x509v3/v3_utl.c',
'third_party/boringssl-with-bazel/src/ssl/bio_ssl.cc',
'third_party/boringssl-with-bazel/src/ssl/d1_both.cc',
'third_party/boringssl-with-bazel/src/ssl/d1_lib.cc',
'third_party/boringssl-with-bazel/src/ssl/d1_pkt.cc',
'third_party/boringssl-with-bazel/src/ssl/d1_srtp.cc',
'third_party/boringssl-with-bazel/src/ssl/dtls_method.cc',
'third_party/boringssl-with-bazel/src/ssl/dtls_record.cc',
'third_party/boringssl-with-bazel/src/ssl/encrypted_client_hello.cc',
'third_party/boringssl-with-bazel/src/ssl/extensions.cc',
'third_party/boringssl-with-bazel/src/ssl/handoff.cc',
'third_party/boringssl-with-bazel/src/ssl/handshake.cc',
'third_party/boringssl-with-bazel/src/ssl/handshake_client.cc',
'third_party/boringssl-with-bazel/src/ssl/handshake_server.cc',
'third_party/boringssl-with-bazel/src/ssl/s3_both.cc',
'third_party/boringssl-with-bazel/src/ssl/s3_lib.cc',
'third_party/boringssl-with-bazel/src/ssl/s3_pkt.cc',
'third_party/boringssl-with-bazel/src/ssl/ssl_aead_ctx.cc',
'third_party/boringssl-with-bazel/src/ssl/ssl_asn1.cc',
'third_party/boringssl-with-bazel/src/ssl/ssl_buffer.cc',
'third_party/boringssl-with-bazel/src/ssl/ssl_cert.cc',
'third_party/boringssl-with-bazel/src/ssl/ssl_cipher.cc',
'third_party/boringssl-with-bazel/src/ssl/ssl_file.cc',
'third_party/boringssl-with-bazel/src/ssl/ssl_key_share.cc',
'third_party/boringssl-with-bazel/src/ssl/ssl_lib.cc',
'third_party/boringssl-with-bazel/src/ssl/ssl_privkey.cc',
'third_party/boringssl-with-bazel/src/ssl/ssl_session.cc',
'third_party/boringssl-with-bazel/src/ssl/ssl_stat.cc',
'third_party/boringssl-with-bazel/src/ssl/ssl_transcript.cc',
'third_party/boringssl-with-bazel/src/ssl/ssl_versions.cc',
'third_party/boringssl-with-bazel/src/ssl/ssl_x509.cc',
'third_party/boringssl-with-bazel/src/ssl/t1_enc.cc',
'third_party/boringssl-with-bazel/src/ssl/tls13_both.cc',
'third_party/boringssl-with-bazel/src/ssl/tls13_client.cc',
'third_party/boringssl-with-bazel/src/ssl/tls13_enc.cc',
'third_party/boringssl-with-bazel/src/ssl/tls13_server.cc',
'third_party/boringssl-with-bazel/src/ssl/tls_method.cc',
'third_party/boringssl-with-bazel/src/ssl/tls_record.cc',
'third_party/cares/cares/src/lib/ares__addrinfo2hostent.c',
'third_party/cares/cares/src/lib/ares__addrinfo_localhost.c',
'third_party/cares/cares/src/lib/ares__close_sockets.c',
'third_party/cares/cares/src/lib/ares__get_hostent.c',
'third_party/cares/cares/src/lib/ares__parse_into_addrinfo.c',
'third_party/cares/cares/src/lib/ares__read_line.c',
'third_party/cares/cares/src/lib/ares__readaddrinfo.c',
'third_party/cares/cares/src/lib/ares__sortaddrinfo.c',
'third_party/cares/cares/src/lib/ares__timeval.c',
'third_party/cares/cares/src/lib/ares_android.c',
'third_party/cares/cares/src/lib/ares_cancel.c',
'third_party/cares/cares/src/lib/ares_create_query.c',
'third_party/cares/cares/src/lib/ares_data.c',
'third_party/cares/cares/src/lib/ares_destroy.c',
'third_party/cares/cares/src/lib/ares_expand_name.c',
'third_party/cares/cares/src/lib/ares_expand_string.c',
'third_party/cares/cares/src/lib/ares_fds.c',
'third_party/cares/cares/src/lib/ares_free_hostent.c',
'third_party/cares/cares/src/lib/ares_free_string.c',
'third_party/cares/cares/src/lib/ares_freeaddrinfo.c',
'third_party/cares/cares/src/lib/ares_getaddrinfo.c',
'third_party/cares/cares/src/lib/ares_getenv.c',
'third_party/cares/cares/src/lib/ares_gethostbyaddr.c',
'third_party/cares/cares/src/lib/ares_gethostbyname.c',
'third_party/cares/cares/src/lib/ares_getnameinfo.c',
'third_party/cares/cares/src/lib/ares_getsock.c',
'third_party/cares/cares/src/lib/ares_init.c',
'third_party/cares/cares/src/lib/ares_library_init.c',
'third_party/cares/cares/src/lib/ares_llist.c',
'third_party/cares/cares/src/lib/ares_mkquery.c',
'third_party/cares/cares/src/lib/ares_nowarn.c',
'third_party/cares/cares/src/lib/ares_options.c',
'third_party/cares/cares/src/lib/ares_parse_a_reply.c',
'third_party/cares/cares/src/lib/ares_parse_aaaa_reply.c',
'third_party/cares/cares/src/lib/ares_parse_caa_reply.c',
'third_party/cares/cares/src/lib/ares_parse_mx_reply.c',
'third_party/cares/cares/src/lib/ares_parse_naptr_reply.c',
'third_party/cares/cares/src/lib/ares_parse_ns_reply.c',
'third_party/cares/cares/src/lib/ares_parse_ptr_reply.c',
'third_party/cares/cares/src/lib/ares_parse_soa_reply.c',
'third_party/cares/cares/src/lib/ares_parse_srv_reply.c',
'third_party/cares/cares/src/lib/ares_parse_txt_reply.c',
'third_party/cares/cares/src/lib/ares_parse_uri_reply.c',
'third_party/cares/cares/src/lib/ares_platform.c',
'third_party/cares/cares/src/lib/ares_process.c',
'third_party/cares/cares/src/lib/ares_query.c',
'third_party/cares/cares/src/lib/ares_rand.c',
'third_party/cares/cares/src/lib/ares_search.c',
'third_party/cares/cares/src/lib/ares_send.c',
'third_party/cares/cares/src/lib/ares_strcasecmp.c',
'third_party/cares/cares/src/lib/ares_strdup.c',
'third_party/cares/cares/src/lib/ares_strerror.c',
'third_party/cares/cares/src/lib/ares_strsplit.c',
'third_party/cares/cares/src/lib/ares_timeout.c',
'third_party/cares/cares/src/lib/ares_version.c',
'third_party/cares/cares/src/lib/ares_writev.c',
'third_party/cares/cares/src/lib/bitncmp.c',
'third_party/cares/cares/src/lib/inet_net_pton.c',
'third_party/cares/cares/src/lib/inet_ntop.c',
'third_party/cares/cares/src/lib/windows_port.c',
'third_party/re2/re2/bitstate.cc',
'third_party/re2/re2/compile.cc',
'third_party/re2/re2/dfa.cc',
'third_party/re2/re2/filtered_re2.cc',
'third_party/re2/re2/mimics_pcre.cc',
'third_party/re2/re2/nfa.cc',
'third_party/re2/re2/onepass.cc',
'third_party/re2/re2/parse.cc',
'third_party/re2/re2/perl_groups.cc',
'third_party/re2/re2/prefilter.cc',
'third_party/re2/re2/prefilter_tree.cc',
'third_party/re2/re2/prog.cc',
'third_party/re2/re2/re2.cc',
'third_party/re2/re2/regexp.cc',
'third_party/re2/re2/set.cc',
'third_party/re2/re2/simplify.cc',
'third_party/re2/re2/stringpiece.cc',
'third_party/re2/re2/tostring.cc',
'third_party/re2/re2/unicode_casefold.cc',
'third_party/re2/re2/unicode_groups.cc',
'third_party/re2/util/pcre.cc',
'third_party/re2/util/rune.cc',
'third_party/re2/util/strutil.cc',
'third_party/upb/upb/base/status.c',
'third_party/upb/upb/collections/array.c',
'third_party/upb/upb/collections/map.c',
'third_party/upb/upb/collections/map_sorter.c',
'third_party/upb/upb/hash/common.c',
'third_party/upb/upb/json/decode.c',
'third_party/upb/upb/json/encode.c',
'third_party/upb/upb/lex/atoi.c',
'third_party/upb/upb/lex/round_trip.c',
'third_party/upb/upb/lex/strtod.c',
'third_party/upb/upb/lex/unicode.c',
'third_party/upb/upb/mem/alloc.c',
'third_party/upb/upb/mem/arena.c',
'third_party/upb/upb/message/accessors.c',
'third_party/upb/upb/message/message.c',
'third_party/upb/upb/mini_table/common.c',
'third_party/upb/upb/mini_table/decode.c',
'third_party/upb/upb/mini_table/encode.c',
'third_party/upb/upb/mini_table/extension_registry.c',
'third_party/upb/upb/reflection/def_builder.c',
'third_party/upb/upb/reflection/def_pool.c',
'third_party/upb/upb/reflection/def_type.c',
'third_party/upb/upb/reflection/desc_state.c',
'third_party/upb/upb/reflection/enum_def.c',
'third_party/upb/upb/reflection/enum_reserved_range.c',
'third_party/upb/upb/reflection/enum_value_def.c',
'third_party/upb/upb/reflection/extension_range.c',
'third_party/upb/upb/reflection/field_def.c',
'third_party/upb/upb/reflection/file_def.c',
'third_party/upb/upb/reflection/message.c',
'third_party/upb/upb/reflection/message_def.c',
'third_party/upb/upb/reflection/message_reserved_range.c',
'third_party/upb/upb/reflection/method_def.c',
'third_party/upb/upb/reflection/oneof_def.c',
'third_party/upb/upb/reflection/service_def.c',
'third_party/upb/upb/text/encode.c',
'third_party/upb/upb/wire/decode.c',
'third_party/upb/upb/wire/decode_fast.c',
'third_party/upb/upb/wire/encode.c',
'third_party/upb/upb/wire/eps_copy_input_stream.c',
'third_party/upb/upb/wire/reader.c',
'third_party/utf8_range/naive.c',
'third_party/utf8_range/range2-neon.c',
'third_party/utf8_range/range2-sse.c',
'third_party/zlib/adler32.c',
'third_party/zlib/compress.c',
'third_party/zlib/crc32.c',
'third_party/zlib/deflate.c',
'third_party/zlib/gzclose.c',
'third_party/zlib/gzlib.c',
'third_party/zlib/gzread.c',
'third_party/zlib/gzwrite.c',
'third_party/zlib/infback.c',
'third_party/zlib/inffast.c',
'third_party/zlib/inflate.c',
'third_party/zlib/inftrees.c',
'third_party/zlib/trees.c',
'third_party/zlib/uncompr.c',
'third_party/zlib/zutil.c',
]
ASM_SOURCE_FILES = {
'crypto_apple_aarch64': [
'third_party/boringssl-with-bazel/apple-aarch64/crypto/chacha/chacha-armv8-apple.S',
'third_party/boringssl-with-bazel/apple-aarch64/crypto/cipher_extra/chacha20_poly1305_armv8-apple.S',
'third_party/boringssl-with-bazel/apple-aarch64/crypto/fipsmodule/aesv8-armv8-apple.S',
'third_party/boringssl-with-bazel/apple-aarch64/crypto/fipsmodule/aesv8-gcm-armv8-apple.S',
'third_party/boringssl-with-bazel/apple-aarch64/crypto/fipsmodule/armv8-mont-apple.S',
'third_party/boringssl-with-bazel/apple-aarch64/crypto/fipsmodule/bn-armv8-apple.S',
'third_party/boringssl-with-bazel/apple-aarch64/crypto/fipsmodule/ghash-neon-armv8-apple.S',
'third_party/boringssl-with-bazel/apple-aarch64/crypto/fipsmodule/ghashv8-armv8-apple.S',
'third_party/boringssl-with-bazel/apple-aarch64/crypto/fipsmodule/p256-armv8-asm-apple.S',
'third_party/boringssl-with-bazel/apple-aarch64/crypto/fipsmodule/p256_beeu-armv8-asm-apple.S',
'third_party/boringssl-with-bazel/apple-aarch64/crypto/fipsmodule/sha1-armv8-apple.S',
'third_party/boringssl-with-bazel/apple-aarch64/crypto/fipsmodule/sha256-armv8-apple.S',
'third_party/boringssl-with-bazel/apple-aarch64/crypto/fipsmodule/sha512-armv8-apple.S',
'third_party/boringssl-with-bazel/apple-aarch64/crypto/fipsmodule/vpaes-armv8-apple.S',
'third_party/boringssl-with-bazel/apple-aarch64/crypto/test/trampoline-armv8-apple.S',
],
'crypto_apple_arm': [
'third_party/boringssl-with-bazel/apple-arm/crypto/chacha/chacha-armv4-apple.S',
'third_party/boringssl-with-bazel/apple-arm/crypto/fipsmodule/aesv8-armv7-apple.S',
'third_party/boringssl-with-bazel/apple-arm/crypto/fipsmodule/armv4-mont-apple.S',
'third_party/boringssl-with-bazel/apple-arm/crypto/fipsmodule/bsaes-armv7-apple.S',
'third_party/boringssl-with-bazel/apple-arm/crypto/fipsmodule/ghash-armv4-apple.S',
'third_party/boringssl-with-bazel/apple-arm/crypto/fipsmodule/ghashv8-armv7-apple.S',
'third_party/boringssl-with-bazel/apple-arm/crypto/fipsmodule/sha1-armv4-large-apple.S',
'third_party/boringssl-with-bazel/apple-arm/crypto/fipsmodule/sha256-armv4-apple.S',
'third_party/boringssl-with-bazel/apple-arm/crypto/fipsmodule/sha512-armv4-apple.S',
'third_party/boringssl-with-bazel/apple-arm/crypto/fipsmodule/vpaes-armv7-apple.S',
'third_party/boringssl-with-bazel/apple-arm/crypto/test/trampoline-armv4-apple.S',
],
'crypto_apple_x86': [
'third_party/boringssl-with-bazel/apple-x86/crypto/chacha/chacha-x86-apple.S',
'third_party/boringssl-with-bazel/apple-x86/crypto/fipsmodule/aesni-x86-apple.S',
'third_party/boringssl-with-bazel/apple-x86/crypto/fipsmodule/bn-586-apple.S',
'third_party/boringssl-with-bazel/apple-x86/crypto/fipsmodule/co-586-apple.S',
'third_party/boringssl-with-bazel/apple-x86/crypto/fipsmodule/ghash-ssse3-x86-apple.S',
'third_party/boringssl-with-bazel/apple-x86/crypto/fipsmodule/ghash-x86-apple.S',
'third_party/boringssl-with-bazel/apple-x86/crypto/fipsmodule/md5-586-apple.S',
'third_party/boringssl-with-bazel/apple-x86/crypto/fipsmodule/sha1-586-apple.S',
'third_party/boringssl-with-bazel/apple-x86/crypto/fipsmodule/sha256-586-apple.S',
'third_party/boringssl-with-bazel/apple-x86/crypto/fipsmodule/sha512-586-apple.S',
'third_party/boringssl-with-bazel/apple-x86/crypto/fipsmodule/vpaes-x86-apple.S',
'third_party/boringssl-with-bazel/apple-x86/crypto/fipsmodule/x86-mont-apple.S',
'third_party/boringssl-with-bazel/apple-x86/crypto/test/trampoline-x86-apple.S',
],
'crypto_apple_x86_64': [
'third_party/boringssl-with-bazel/apple-x86_64/crypto/chacha/chacha-x86_64-apple.S',
'third_party/boringssl-with-bazel/apple-x86_64/crypto/cipher_extra/aes128gcmsiv-x86_64-apple.S',
'third_party/boringssl-with-bazel/apple-x86_64/crypto/cipher_extra/chacha20_poly1305_x86_64-apple.S',
'third_party/boringssl-with-bazel/apple-x86_64/crypto/fipsmodule/aesni-gcm-x86_64-apple.S',
'third_party/boringssl-with-bazel/apple-x86_64/crypto/fipsmodule/aesni-x86_64-apple.S',
'third_party/boringssl-with-bazel/apple-x86_64/crypto/fipsmodule/ghash-ssse3-x86_64-apple.S',
'third_party/boringssl-with-bazel/apple-x86_64/crypto/fipsmodule/ghash-x86_64-apple.S',
'third_party/boringssl-with-bazel/apple-x86_64/crypto/fipsmodule/md5-x86_64-apple.S',
'third_party/boringssl-with-bazel/apple-x86_64/crypto/fipsmodule/p256-x86_64-asm-apple.S',
'third_party/boringssl-with-bazel/apple-x86_64/crypto/fipsmodule/p256_beeu-x86_64-asm-apple.S',
'third_party/boringssl-with-bazel/apple-x86_64/crypto/fipsmodule/rdrand-x86_64-apple.S',
'third_party/boringssl-with-bazel/apple-x86_64/crypto/fipsmodule/rsaz-avx2-apple.S',
'third_party/boringssl-with-bazel/apple-x86_64/crypto/fipsmodule/sha1-x86_64-apple.S',
'third_party/boringssl-with-bazel/apple-x86_64/crypto/fipsmodule/sha256-x86_64-apple.S',
'third_party/boringssl-with-bazel/apple-x86_64/crypto/fipsmodule/sha512-x86_64-apple.S',
'third_party/boringssl-with-bazel/apple-x86_64/crypto/fipsmodule/vpaes-x86_64-apple.S',
'third_party/boringssl-with-bazel/apple-x86_64/crypto/fipsmodule/x86_64-mont-apple.S',
'third_party/boringssl-with-bazel/apple-x86_64/crypto/fipsmodule/x86_64-mont5-apple.S',
'third_party/boringssl-with-bazel/apple-x86_64/crypto/test/trampoline-x86_64-apple.S',
'third_party/boringssl-with-bazel/src/third_party/fiat/asm/fiat_curve25519_adx_mul.S',
'third_party/boringssl-with-bazel/src/third_party/fiat/asm/fiat_curve25519_adx_square.S',
],
'crypto_asm': [
'third_party/boringssl-with-bazel/apple-aarch64/crypto/chacha/chacha-armv8-apple.S',
'third_party/boringssl-with-bazel/apple-aarch64/crypto/cipher_extra/chacha20_poly1305_armv8-apple.S',
'third_party/boringssl-with-bazel/apple-aarch64/crypto/fipsmodule/aesv8-armv8-apple.S',
'third_party/boringssl-with-bazel/apple-aarch64/crypto/fipsmodule/aesv8-gcm-armv8-apple.S',
'third_party/boringssl-with-bazel/apple-aarch64/crypto/fipsmodule/armv8-mont-apple.S',
'third_party/boringssl-with-bazel/apple-aarch64/crypto/fipsmodule/bn-armv8-apple.S',
'third_party/boringssl-with-bazel/apple-aarch64/crypto/fipsmodule/ghash-neon-armv8-apple.S',
'third_party/boringssl-with-bazel/apple-aarch64/crypto/fipsmodule/ghashv8-armv8-apple.S',
'third_party/boringssl-with-bazel/apple-aarch64/crypto/fipsmodule/p256-armv8-asm-apple.S',
'third_party/boringssl-with-bazel/apple-aarch64/crypto/fipsmodule/p256_beeu-armv8-asm-apple.S',
'third_party/boringssl-with-bazel/apple-aarch64/crypto/fipsmodule/sha1-armv8-apple.S',
'third_party/boringssl-with-bazel/apple-aarch64/crypto/fipsmodule/sha256-armv8-apple.S',
'third_party/boringssl-with-bazel/apple-aarch64/crypto/fipsmodule/sha512-armv8-apple.S',
'third_party/boringssl-with-bazel/apple-aarch64/crypto/fipsmodule/vpaes-armv8-apple.S',
'third_party/boringssl-with-bazel/apple-aarch64/crypto/test/trampoline-armv8-apple.S',
'third_party/boringssl-with-bazel/apple-arm/crypto/chacha/chacha-armv4-apple.S',
'third_party/boringssl-with-bazel/apple-arm/crypto/fipsmodule/aesv8-armv7-apple.S',
'third_party/boringssl-with-bazel/apple-arm/crypto/fipsmodule/armv4-mont-apple.S',
'third_party/boringssl-with-bazel/apple-arm/crypto/fipsmodule/bsaes-armv7-apple.S',
'third_party/boringssl-with-bazel/apple-arm/crypto/fipsmodule/ghash-armv4-apple.S',
'third_party/boringssl-with-bazel/apple-arm/crypto/fipsmodule/ghashv8-armv7-apple.S',
'third_party/boringssl-with-bazel/apple-arm/crypto/fipsmodule/sha1-armv4-large-apple.S',
'third_party/boringssl-with-bazel/apple-arm/crypto/fipsmodule/sha256-armv4-apple.S',
'third_party/boringssl-with-bazel/apple-arm/crypto/fipsmodule/sha512-armv4-apple.S',
'third_party/boringssl-with-bazel/apple-arm/crypto/fipsmodule/vpaes-armv7-apple.S',
'third_party/boringssl-with-bazel/apple-arm/crypto/test/trampoline-armv4-apple.S',
'third_party/boringssl-with-bazel/apple-x86/crypto/chacha/chacha-x86-apple.S',
'third_party/boringssl-with-bazel/apple-x86/crypto/fipsmodule/aesni-x86-apple.S',
'third_party/boringssl-with-bazel/apple-x86/crypto/fipsmodule/bn-586-apple.S',
'third_party/boringssl-with-bazel/apple-x86/crypto/fipsmodule/co-586-apple.S',
'third_party/boringssl-with-bazel/apple-x86/crypto/fipsmodule/ghash-ssse3-x86-apple.S',
'third_party/boringssl-with-bazel/apple-x86/crypto/fipsmodule/ghash-x86-apple.S',
'third_party/boringssl-with-bazel/apple-x86/crypto/fipsmodule/md5-586-apple.S',
'third_party/boringssl-with-bazel/apple-x86/crypto/fipsmodule/sha1-586-apple.S',
'third_party/boringssl-with-bazel/apple-x86/crypto/fipsmodule/sha256-586-apple.S',
'third_party/boringssl-with-bazel/apple-x86/crypto/fipsmodule/sha512-586-apple.S',
'third_party/boringssl-with-bazel/apple-x86/crypto/fipsmodule/vpaes-x86-apple.S',
'third_party/boringssl-with-bazel/apple-x86/crypto/fipsmodule/x86-mont-apple.S',
'third_party/boringssl-with-bazel/apple-x86/crypto/test/trampoline-x86-apple.S',
'third_party/boringssl-with-bazel/apple-x86_64/crypto/chacha/chacha-x86_64-apple.S',
'third_party/boringssl-with-bazel/apple-x86_64/crypto/cipher_extra/aes128gcmsiv-x86_64-apple.S',
'third_party/boringssl-with-bazel/apple-x86_64/crypto/cipher_extra/chacha20_poly1305_x86_64-apple.S',
'third_party/boringssl-with-bazel/apple-x86_64/crypto/fipsmodule/aesni-gcm-x86_64-apple.S',
'third_party/boringssl-with-bazel/apple-x86_64/crypto/fipsmodule/aesni-x86_64-apple.S',
'third_party/boringssl-with-bazel/apple-x86_64/crypto/fipsmodule/ghash-ssse3-x86_64-apple.S',
'third_party/boringssl-with-bazel/apple-x86_64/crypto/fipsmodule/ghash-x86_64-apple.S',
'third_party/boringssl-with-bazel/apple-x86_64/crypto/fipsmodule/md5-x86_64-apple.S',
'third_party/boringssl-with-bazel/apple-x86_64/crypto/fipsmodule/p256-x86_64-asm-apple.S',
'third_party/boringssl-with-bazel/apple-x86_64/crypto/fipsmodule/p256_beeu-x86_64-asm-apple.S',
'third_party/boringssl-with-bazel/apple-x86_64/crypto/fipsmodule/rdrand-x86_64-apple.S',
'third_party/boringssl-with-bazel/apple-x86_64/crypto/fipsmodule/rsaz-avx2-apple.S',
'third_party/boringssl-with-bazel/apple-x86_64/crypto/fipsmodule/sha1-x86_64-apple.S',
'third_party/boringssl-with-bazel/apple-x86_64/crypto/fipsmodule/sha256-x86_64-apple.S',
'third_party/boringssl-with-bazel/apple-x86_64/crypto/fipsmodule/sha512-x86_64-apple.S',
'third_party/boringssl-with-bazel/apple-x86_64/crypto/fipsmodule/vpaes-x86_64-apple.S',
'third_party/boringssl-with-bazel/apple-x86_64/crypto/fipsmodule/x86_64-mont-apple.S',
'third_party/boringssl-with-bazel/apple-x86_64/crypto/fipsmodule/x86_64-mont5-apple.S',
'third_party/boringssl-with-bazel/apple-x86_64/crypto/test/trampoline-x86_64-apple.S',
'third_party/boringssl-with-bazel/linux-aarch64/crypto/chacha/chacha-armv8-linux.S',
'third_party/boringssl-with-bazel/linux-aarch64/crypto/cipher_extra/chacha20_poly1305_armv8-linux.S',
'third_party/boringssl-with-bazel/linux-aarch64/crypto/fipsmodule/aesv8-armv8-linux.S',
'third_party/boringssl-with-bazel/linux-aarch64/crypto/fipsmodule/aesv8-gcm-armv8-linux.S',
'third_party/boringssl-with-bazel/linux-aarch64/crypto/fipsmodule/armv8-mont-linux.S',
'third_party/boringssl-with-bazel/linux-aarch64/crypto/fipsmodule/bn-armv8-linux.S',
'third_party/boringssl-with-bazel/linux-aarch64/crypto/fipsmodule/ghash-neon-armv8-linux.S',
'third_party/boringssl-with-bazel/linux-aarch64/crypto/fipsmodule/ghashv8-armv8-linux.S',
'third_party/boringssl-with-bazel/linux-aarch64/crypto/fipsmodule/p256-armv8-asm-linux.S',
'third_party/boringssl-with-bazel/linux-aarch64/crypto/fipsmodule/p256_beeu-armv8-asm-linux.S',
'third_party/boringssl-with-bazel/linux-aarch64/crypto/fipsmodule/sha1-armv8-linux.S',
'third_party/boringssl-with-bazel/linux-aarch64/crypto/fipsmodule/sha256-armv8-linux.S',
'third_party/boringssl-with-bazel/linux-aarch64/crypto/fipsmodule/sha512-armv8-linux.S',
'third_party/boringssl-with-bazel/linux-aarch64/crypto/fipsmodule/vpaes-armv8-linux.S',
'third_party/boringssl-with-bazel/linux-aarch64/crypto/test/trampoline-armv8-linux.S',
'third_party/boringssl-with-bazel/linux-arm/crypto/chacha/chacha-armv4-linux.S',
'third_party/boringssl-with-bazel/linux-arm/crypto/fipsmodule/aesv8-armv7-linux.S',
'third_party/boringssl-with-bazel/linux-arm/crypto/fipsmodule/armv4-mont-linux.S',
'third_party/boringssl-with-bazel/linux-arm/crypto/fipsmodule/bsaes-armv7-linux.S',
'third_party/boringssl-with-bazel/linux-arm/crypto/fipsmodule/ghash-armv4-linux.S',
'third_party/boringssl-with-bazel/linux-arm/crypto/fipsmodule/ghashv8-armv7-linux.S',
'third_party/boringssl-with-bazel/linux-arm/crypto/fipsmodule/sha1-armv4-large-linux.S',
'third_party/boringssl-with-bazel/linux-arm/crypto/fipsmodule/sha256-armv4-linux.S',
'third_party/boringssl-with-bazel/linux-arm/crypto/fipsmodule/sha512-armv4-linux.S',
'third_party/boringssl-with-bazel/linux-arm/crypto/fipsmodule/vpaes-armv7-linux.S',
'third_party/boringssl-with-bazel/linux-arm/crypto/test/trampoline-armv4-linux.S',
'third_party/boringssl-with-bazel/linux-x86/crypto/chacha/chacha-x86-linux.S',
'third_party/boringssl-with-bazel/linux-x86/crypto/fipsmodule/aesni-x86-linux.S',
'third_party/boringssl-with-bazel/linux-x86/crypto/fipsmodule/bn-586-linux.S',
'third_party/boringssl-with-bazel/linux-x86/crypto/fipsmodule/co-586-linux.S',
'third_party/boringssl-with-bazel/linux-x86/crypto/fipsmodule/ghash-ssse3-x86-linux.S',
'third_party/boringssl-with-bazel/linux-x86/crypto/fipsmodule/ghash-x86-linux.S',
'third_party/boringssl-with-bazel/linux-x86/crypto/fipsmodule/md5-586-linux.S',
'third_party/boringssl-with-bazel/linux-x86/crypto/fipsmodule/sha1-586-linux.S',
'third_party/boringssl-with-bazel/linux-x86/crypto/fipsmodule/sha256-586-linux.S',
'third_party/boringssl-with-bazel/linux-x86/crypto/fipsmodule/sha512-586-linux.S',
'third_party/boringssl-with-bazel/linux-x86/crypto/fipsmodule/vpaes-x86-linux.S',
'third_party/boringssl-with-bazel/linux-x86/crypto/fipsmodule/x86-mont-linux.S',
'third_party/boringssl-with-bazel/linux-x86/crypto/test/trampoline-x86-linux.S',
'third_party/boringssl-with-bazel/linux-x86_64/crypto/chacha/chacha-x86_64-linux.S',
'third_party/boringssl-with-bazel/linux-x86_64/crypto/cipher_extra/aes128gcmsiv-x86_64-linux.S',
'third_party/boringssl-with-bazel/linux-x86_64/crypto/cipher_extra/chacha20_poly1305_x86_64-linux.S',
'third_party/boringssl-with-bazel/linux-x86_64/crypto/fipsmodule/aesni-gcm-x86_64-linux.S',
'third_party/boringssl-with-bazel/linux-x86_64/crypto/fipsmodule/aesni-x86_64-linux.S',
'third_party/boringssl-with-bazel/linux-x86_64/crypto/fipsmodule/ghash-ssse3-x86_64-linux.S',
'third_party/boringssl-with-bazel/linux-x86_64/crypto/fipsmodule/ghash-x86_64-linux.S',
'third_party/boringssl-with-bazel/linux-x86_64/crypto/fipsmodule/md5-x86_64-linux.S',
'third_party/boringssl-with-bazel/linux-x86_64/crypto/fipsmodule/p256-x86_64-asm-linux.S',
'third_party/boringssl-with-bazel/linux-x86_64/crypto/fipsmodule/p256_beeu-x86_64-asm-linux.S',
'third_party/boringssl-with-bazel/linux-x86_64/crypto/fipsmodule/rdrand-x86_64-linux.S',
'third_party/boringssl-with-bazel/linux-x86_64/crypto/fipsmodule/rsaz-avx2-linux.S',
'third_party/boringssl-with-bazel/linux-x86_64/crypto/fipsmodule/sha1-x86_64-linux.S',
'third_party/boringssl-with-bazel/linux-x86_64/crypto/fipsmodule/sha256-x86_64-linux.S',
'third_party/boringssl-with-bazel/linux-x86_64/crypto/fipsmodule/sha512-x86_64-linux.S',
'third_party/boringssl-with-bazel/linux-x86_64/crypto/fipsmodule/vpaes-x86_64-linux.S',
'third_party/boringssl-with-bazel/linux-x86_64/crypto/fipsmodule/x86_64-mont-linux.S',
'third_party/boringssl-with-bazel/linux-x86_64/crypto/fipsmodule/x86_64-mont5-linux.S',
'third_party/boringssl-with-bazel/linux-x86_64/crypto/test/trampoline-x86_64-linux.S',
'third_party/boringssl-with-bazel/src/crypto/curve25519/asm/x25519-asm-arm.S',
'third_party/boringssl-with-bazel/src/crypto/hrss/asm/poly_rq_mul.S',
'third_party/boringssl-with-bazel/src/crypto/poly1305/poly1305_arm_asm.S',
'third_party/boringssl-with-bazel/src/third_party/fiat/asm/fiat_curve25519_adx_mul.S',
'third_party/boringssl-with-bazel/src/third_party/fiat/asm/fiat_curve25519_adx_square.S',
'third_party/boringssl-with-bazel/win-aarch64/crypto/chacha/chacha-armv8-win.S',
'third_party/boringssl-with-bazel/win-aarch64/crypto/cipher_extra/chacha20_poly1305_armv8-win.S',
'third_party/boringssl-with-bazel/win-aarch64/crypto/fipsmodule/aesv8-armv8-win.S',
'third_party/boringssl-with-bazel/win-aarch64/crypto/fipsmodule/aesv8-gcm-armv8-win.S',
'third_party/boringssl-with-bazel/win-aarch64/crypto/fipsmodule/armv8-mont-win.S',
'third_party/boringssl-with-bazel/win-aarch64/crypto/fipsmodule/bn-armv8-win.S',
'third_party/boringssl-with-bazel/win-aarch64/crypto/fipsmodule/ghash-neon-armv8-win.S',
'third_party/boringssl-with-bazel/win-aarch64/crypto/fipsmodule/ghashv8-armv8-win.S',
'third_party/boringssl-with-bazel/win-aarch64/crypto/fipsmodule/p256-armv8-asm-win.S',
'third_party/boringssl-with-bazel/win-aarch64/crypto/fipsmodule/p256_beeu-armv8-asm-win.S',
'third_party/boringssl-with-bazel/win-aarch64/crypto/fipsmodule/sha1-armv8-win.S',
'third_party/boringssl-with-bazel/win-aarch64/crypto/fipsmodule/sha256-armv8-win.S',
'third_party/boringssl-with-bazel/win-aarch64/crypto/fipsmodule/sha512-armv8-win.S',
'third_party/boringssl-with-bazel/win-aarch64/crypto/fipsmodule/vpaes-armv8-win.S',
'third_party/boringssl-with-bazel/win-aarch64/crypto/test/trampoline-armv8-win.S',
],
'crypto_linux_aarch64': [
'third_party/boringssl-with-bazel/linux-aarch64/crypto/chacha/chacha-armv8-linux.S',
'third_party/boringssl-with-bazel/linux-aarch64/crypto/cipher_extra/chacha20_poly1305_armv8-linux.S',
'third_party/boringssl-with-bazel/linux-aarch64/crypto/fipsmodule/aesv8-armv8-linux.S',
'third_party/boringssl-with-bazel/linux-aarch64/crypto/fipsmodule/aesv8-gcm-armv8-linux.S',
'third_party/boringssl-with-bazel/linux-aarch64/crypto/fipsmodule/armv8-mont-linux.S',
'third_party/boringssl-with-bazel/linux-aarch64/crypto/fipsmodule/bn-armv8-linux.S',
'third_party/boringssl-with-bazel/linux-aarch64/crypto/fipsmodule/ghash-neon-armv8-linux.S',
'third_party/boringssl-with-bazel/linux-aarch64/crypto/fipsmodule/ghashv8-armv8-linux.S',
'third_party/boringssl-with-bazel/linux-aarch64/crypto/fipsmodule/p256-armv8-asm-linux.S',
'third_party/boringssl-with-bazel/linux-aarch64/crypto/fipsmodule/p256_beeu-armv8-asm-linux.S',
'third_party/boringssl-with-bazel/linux-aarch64/crypto/fipsmodule/sha1-armv8-linux.S',
'third_party/boringssl-with-bazel/linux-aarch64/crypto/fipsmodule/sha256-armv8-linux.S',
'third_party/boringssl-with-bazel/linux-aarch64/crypto/fipsmodule/sha512-armv8-linux.S',
'third_party/boringssl-with-bazel/linux-aarch64/crypto/fipsmodule/vpaes-armv8-linux.S',
'third_party/boringssl-with-bazel/linux-aarch64/crypto/test/trampoline-armv8-linux.S',
],
'crypto_linux_arm': [
'third_party/boringssl-with-bazel/linux-arm/crypto/chacha/chacha-armv4-linux.S',
'third_party/boringssl-with-bazel/linux-arm/crypto/fipsmodule/aesv8-armv7-linux.S',
'third_party/boringssl-with-bazel/linux-arm/crypto/fipsmodule/armv4-mont-linux.S',
'third_party/boringssl-with-bazel/linux-arm/crypto/fipsmodule/bsaes-armv7-linux.S',
'third_party/boringssl-with-bazel/linux-arm/crypto/fipsmodule/ghash-armv4-linux.S',
'third_party/boringssl-with-bazel/linux-arm/crypto/fipsmodule/ghashv8-armv7-linux.S',
'third_party/boringssl-with-bazel/linux-arm/crypto/fipsmodule/sha1-armv4-large-linux.S',
'third_party/boringssl-with-bazel/linux-arm/crypto/fipsmodule/sha256-armv4-linux.S',
'third_party/boringssl-with-bazel/linux-arm/crypto/fipsmodule/sha512-armv4-linux.S',
'third_party/boringssl-with-bazel/linux-arm/crypto/fipsmodule/vpaes-armv7-linux.S',
'third_party/boringssl-with-bazel/linux-arm/crypto/test/trampoline-armv4-linux.S',
'third_party/boringssl-with-bazel/src/crypto/curve25519/asm/x25519-asm-arm.S',
'third_party/boringssl-with-bazel/src/crypto/poly1305/poly1305_arm_asm.S',
],
'crypto_linux_x86': [
'third_party/boringssl-with-bazel/linux-x86/crypto/chacha/chacha-x86-linux.S',
'third_party/boringssl-with-bazel/linux-x86/crypto/fipsmodule/aesni-x86-linux.S',
'third_party/boringssl-with-bazel/linux-x86/crypto/fipsmodule/bn-586-linux.S',
'third_party/boringssl-with-bazel/linux-x86/crypto/fipsmodule/co-586-linux.S',
'third_party/boringssl-with-bazel/linux-x86/crypto/fipsmodule/ghash-ssse3-x86-linux.S',
'third_party/boringssl-with-bazel/linux-x86/crypto/fipsmodule/ghash-x86-linux.S',
'third_party/boringssl-with-bazel/linux-x86/crypto/fipsmodule/md5-586-linux.S',
'third_party/boringssl-with-bazel/linux-x86/crypto/fipsmodule/sha1-586-linux.S',
'third_party/boringssl-with-bazel/linux-x86/crypto/fipsmodule/sha256-586-linux.S',
'third_party/boringssl-with-bazel/linux-x86/crypto/fipsmodule/sha512-586-linux.S',
'third_party/boringssl-with-bazel/linux-x86/crypto/fipsmodule/vpaes-x86-linux.S',
'third_party/boringssl-with-bazel/linux-x86/crypto/fipsmodule/x86-mont-linux.S',
'third_party/boringssl-with-bazel/linux-x86/crypto/test/trampoline-x86-linux.S',
],
'crypto_linux_x86_64': [
'third_party/boringssl-with-bazel/linux-x86_64/crypto/chacha/chacha-x86_64-linux.S',
'third_party/boringssl-with-bazel/linux-x86_64/crypto/cipher_extra/aes128gcmsiv-x86_64-linux.S',
'third_party/boringssl-with-bazel/linux-x86_64/crypto/cipher_extra/chacha20_poly1305_x86_64-linux.S',
'third_party/boringssl-with-bazel/linux-x86_64/crypto/fipsmodule/aesni-gcm-x86_64-linux.S',
'third_party/boringssl-with-bazel/linux-x86_64/crypto/fipsmodule/aesni-x86_64-linux.S',
'third_party/boringssl-with-bazel/linux-x86_64/crypto/fipsmodule/ghash-ssse3-x86_64-linux.S',
'third_party/boringssl-with-bazel/linux-x86_64/crypto/fipsmodule/ghash-x86_64-linux.S',
'third_party/boringssl-with-bazel/linux-x86_64/crypto/fipsmodule/md5-x86_64-linux.S',
'third_party/boringssl-with-bazel/linux-x86_64/crypto/fipsmodule/p256-x86_64-asm-linux.S',
'third_party/boringssl-with-bazel/linux-x86_64/crypto/fipsmodule/p256_beeu-x86_64-asm-linux.S',
'third_party/boringssl-with-bazel/linux-x86_64/crypto/fipsmodule/rdrand-x86_64-linux.S',
'third_party/boringssl-with-bazel/linux-x86_64/crypto/fipsmodule/rsaz-avx2-linux.S',
'third_party/boringssl-with-bazel/linux-x86_64/crypto/fipsmodule/sha1-x86_64-linux.S',
'third_party/boringssl-with-bazel/linux-x86_64/crypto/fipsmodule/sha256-x86_64-linux.S',
'third_party/boringssl-with-bazel/linux-x86_64/crypto/fipsmodule/sha512-x86_64-linux.S',
'third_party/boringssl-with-bazel/linux-x86_64/crypto/fipsmodule/vpaes-x86_64-linux.S',
'third_party/boringssl-with-bazel/linux-x86_64/crypto/fipsmodule/x86_64-mont-linux.S',
'third_party/boringssl-with-bazel/linux-x86_64/crypto/fipsmodule/x86_64-mont5-linux.S',
'third_party/boringssl-with-bazel/linux-x86_64/crypto/test/trampoline-x86_64-linux.S',
'third_party/boringssl-with-bazel/src/crypto/hrss/asm/poly_rq_mul.S',
'third_party/boringssl-with-bazel/src/third_party/fiat/asm/fiat_curve25519_adx_mul.S',
'third_party/boringssl-with-bazel/src/third_party/fiat/asm/fiat_curve25519_adx_square.S',
],
'crypto_nasm': [
'third_party/boringssl-with-bazel/win-x86/crypto/chacha/chacha-x86-win.asm',
'third_party/boringssl-with-bazel/win-x86/crypto/fipsmodule/aesni-x86-win.asm',
'third_party/boringssl-with-bazel/win-x86/crypto/fipsmodule/bn-586-win.asm',
'third_party/boringssl-with-bazel/win-x86/crypto/fipsmodule/co-586-win.asm',
'third_party/boringssl-with-bazel/win-x86/crypto/fipsmodule/ghash-ssse3-x86-win.asm',
'third_party/boringssl-with-bazel/win-x86/crypto/fipsmodule/ghash-x86-win.asm',
'third_party/boringssl-with-bazel/win-x86/crypto/fipsmodule/md5-586-win.asm',
'third_party/boringssl-with-bazel/win-x86/crypto/fipsmodule/sha1-586-win.asm',
'third_party/boringssl-with-bazel/win-x86/crypto/fipsmodule/sha256-586-win.asm',
'third_party/boringssl-with-bazel/win-x86/crypto/fipsmodule/sha512-586-win.asm',
'third_party/boringssl-with-bazel/win-x86/crypto/fipsmodule/vpaes-x86-win.asm',
'third_party/boringssl-with-bazel/win-x86/crypto/fipsmodule/x86-mont-win.asm',
'third_party/boringssl-with-bazel/win-x86/crypto/test/trampoline-x86-win.asm',
'third_party/boringssl-with-bazel/win-x86_64/crypto/chacha/chacha-x86_64-win.asm',
'third_party/boringssl-with-bazel/win-x86_64/crypto/cipher_extra/aes128gcmsiv-x86_64-win.asm',
'third_party/boringssl-with-bazel/win-x86_64/crypto/cipher_extra/chacha20_poly1305_x86_64-win.asm',
'third_party/boringssl-with-bazel/win-x86_64/crypto/fipsmodule/aesni-gcm-x86_64-win.asm',
'third_party/boringssl-with-bazel/win-x86_64/crypto/fipsmodule/aesni-x86_64-win.asm',
'third_party/boringssl-with-bazel/win-x86_64/crypto/fipsmodule/ghash-ssse3-x86_64-win.asm',
'third_party/boringssl-with-bazel/win-x86_64/crypto/fipsmodule/ghash-x86_64-win.asm',
'third_party/boringssl-with-bazel/win-x86_64/crypto/fipsmodule/md5-x86_64-win.asm',
'third_party/boringssl-with-bazel/win-x86_64/crypto/fipsmodule/p256-x86_64-asm-win.asm',
'third_party/boringssl-with-bazel/win-x86_64/crypto/fipsmodule/p256_beeu-x86_64-asm-win.asm',
'third_party/boringssl-with-bazel/win-x86_64/crypto/fipsmodule/rdrand-x86_64-win.asm',
'third_party/boringssl-with-bazel/win-x86_64/crypto/fipsmodule/rsaz-avx2-win.asm',
'third_party/boringssl-with-bazel/win-x86_64/crypto/fipsmodule/sha1-x86_64-win.asm',
'third_party/boringssl-with-bazel/win-x86_64/crypto/fipsmodule/sha256-x86_64-win.asm',
'third_party/boringssl-with-bazel/win-x86_64/crypto/fipsmodule/sha512-x86_64-win.asm',
'third_party/boringssl-with-bazel/win-x86_64/crypto/fipsmodule/vpaes-x86_64-win.asm',
'third_party/boringssl-with-bazel/win-x86_64/crypto/fipsmodule/x86_64-mont-win.asm',
'third_party/boringssl-with-bazel/win-x86_64/crypto/fipsmodule/x86_64-mont5-win.asm',
'third_party/boringssl-with-bazel/win-x86_64/crypto/test/trampoline-x86_64-win.asm',
],
'crypto_win_aarch64': [
'third_party/boringssl-with-bazel/win-aarch64/crypto/chacha/chacha-armv8-win.S',
'third_party/boringssl-with-bazel/win-aarch64/crypto/cipher_extra/chacha20_poly1305_armv8-win.S',
'third_party/boringssl-with-bazel/win-aarch64/crypto/fipsmodule/aesv8-armv8-win.S',
'third_party/boringssl-with-bazel/win-aarch64/crypto/fipsmodule/aesv8-gcm-armv8-win.S',
'third_party/boringssl-with-bazel/win-aarch64/crypto/fipsmodule/armv8-mont-win.S',
'third_party/boringssl-with-bazel/win-aarch64/crypto/fipsmodule/bn-armv8-win.S',
'third_party/boringssl-with-bazel/win-aarch64/crypto/fipsmodule/ghash-neon-armv8-win.S',
'third_party/boringssl-with-bazel/win-aarch64/crypto/fipsmodule/ghashv8-armv8-win.S',
'third_party/boringssl-with-bazel/win-aarch64/crypto/fipsmodule/p256-armv8-asm-win.S',
'third_party/boringssl-with-bazel/win-aarch64/crypto/fipsmodule/p256_beeu-armv8-asm-win.S',
'third_party/boringssl-with-bazel/win-aarch64/crypto/fipsmodule/sha1-armv8-win.S',
'third_party/boringssl-with-bazel/win-aarch64/crypto/fipsmodule/sha256-armv8-win.S',
'third_party/boringssl-with-bazel/win-aarch64/crypto/fipsmodule/sha512-armv8-win.S',
'third_party/boringssl-with-bazel/win-aarch64/crypto/fipsmodule/vpaes-armv8-win.S',
'third_party/boringssl-with-bazel/win-aarch64/crypto/test/trampoline-armv8-win.S',
],
'crypto_win_x86': [
'third_party/boringssl-with-bazel/win-x86/crypto/chacha/chacha-x86-win.asm',
'third_party/boringssl-with-bazel/win-x86/crypto/fipsmodule/aesni-x86-win.asm',
'third_party/boringssl-with-bazel/win-x86/crypto/fipsmodule/bn-586-win.asm',
'third_party/boringssl-with-bazel/win-x86/crypto/fipsmodule/co-586-win.asm',
'third_party/boringssl-with-bazel/win-x86/crypto/fipsmodule/ghash-ssse3-x86-win.asm',
'third_party/boringssl-with-bazel/win-x86/crypto/fipsmodule/ghash-x86-win.asm',
'third_party/boringssl-with-bazel/win-x86/crypto/fipsmodule/md5-586-win.asm',
'third_party/boringssl-with-bazel/win-x86/crypto/fipsmodule/sha1-586-win.asm',
'third_party/boringssl-with-bazel/win-x86/crypto/fipsmodule/sha256-586-win.asm',
'third_party/boringssl-with-bazel/win-x86/crypto/fipsmodule/sha512-586-win.asm',
'third_party/boringssl-with-bazel/win-x86/crypto/fipsmodule/vpaes-x86-win.asm',
'third_party/boringssl-with-bazel/win-x86/crypto/fipsmodule/x86-mont-win.asm',
'third_party/boringssl-with-bazel/win-x86/crypto/test/trampoline-x86-win.asm',
],
'crypto_win_x86_64': [
'third_party/boringssl-with-bazel/win-x86_64/crypto/chacha/chacha-x86_64-win.asm',
'third_party/boringssl-with-bazel/win-x86_64/crypto/cipher_extra/aes128gcmsiv-x86_64-win.asm',
'third_party/boringssl-with-bazel/win-x86_64/crypto/cipher_extra/chacha20_poly1305_x86_64-win.asm',
'third_party/boringssl-with-bazel/win-x86_64/crypto/fipsmodule/aesni-gcm-x86_64-win.asm',
'third_party/boringssl-with-bazel/win-x86_64/crypto/fipsmodule/aesni-x86_64-win.asm',
'third_party/boringssl-with-bazel/win-x86_64/crypto/fipsmodule/ghash-ssse3-x86_64-win.asm',
'third_party/boringssl-with-bazel/win-x86_64/crypto/fipsmodule/ghash-x86_64-win.asm',
'third_party/boringssl-with-bazel/win-x86_64/crypto/fipsmodule/md5-x86_64-win.asm',
'third_party/boringssl-with-bazel/win-x86_64/crypto/fipsmodule/p256-x86_64-asm-win.asm',
'third_party/boringssl-with-bazel/win-x86_64/crypto/fipsmodule/p256_beeu-x86_64-asm-win.asm',
'third_party/boringssl-with-bazel/win-x86_64/crypto/fipsmodule/rdrand-x86_64-win.asm',
'third_party/boringssl-with-bazel/win-x86_64/crypto/fipsmodule/rsaz-avx2-win.asm',
'third_party/boringssl-with-bazel/win-x86_64/crypto/fipsmodule/sha1-x86_64-win.asm',
'third_party/boringssl-with-bazel/win-x86_64/crypto/fipsmodule/sha256-x86_64-win.asm',
'third_party/boringssl-with-bazel/win-x86_64/crypto/fipsmodule/sha512-x86_64-win.asm',
'third_party/boringssl-with-bazel/win-x86_64/crypto/fipsmodule/vpaes-x86_64-win.asm',
'third_party/boringssl-with-bazel/win-x86_64/crypto/fipsmodule/x86_64-mont-win.asm',
'third_party/boringssl-with-bazel/win-x86_64/crypto/fipsmodule/x86_64-mont5-win.asm',
'third_party/boringssl-with-bazel/win-x86_64/crypto/test/trampoline-x86_64-win.asm',
],
}
| 117,427
| 66.956019
| 149
|
py
|
grpc
|
grpc-master/src/python/grpcio/support.py
|
# Copyright 2016 gRPC authors.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
from distutils import errors
import os
import os.path
import shutil
import sys
import tempfile
import commands
C_PYTHON_DEV = """
#include <Python.h>
int main(int argc, char **argv) { return 0; }
"""
C_PYTHON_DEV_ERROR_MESSAGE = """
Could not find <Python.h>. This could mean the following:
* You're on Ubuntu and haven't run `apt-get install <PY_REPR>-dev`.
* You're on RHEL/Fedora and haven't run `yum install <PY_REPR>-devel` or
`dnf install <PY_REPR>-devel` (make sure you also have redhat-rpm-config
installed)
* You're on Mac OS X and the usual Python framework was somehow corrupted
(check your environment variables or try re-installing?)
* You're on Windows and your Python installation was somehow corrupted
(check your environment variables or try re-installing?)
"""
if sys.version_info[0] == 2:
PYTHON_REPRESENTATION = "python"
elif sys.version_info[0] == 3:
PYTHON_REPRESENTATION = "python3"
else:
raise NotImplementedError("Unsupported Python version: %s" % sys.version)
C_CHECKS = {
C_PYTHON_DEV: C_PYTHON_DEV_ERROR_MESSAGE.replace(
"<PY_REPR>", PYTHON_REPRESENTATION
),
}
def _compile(compiler, source_string):
tempdir = tempfile.mkdtemp()
cpath = os.path.join(tempdir, "a.c")
with open(cpath, "w") as cfile:
cfile.write(source_string)
try:
compiler.compile([cpath])
except errors.CompileError as error:
return error
finally:
shutil.rmtree(tempdir)
def _expect_compile(compiler, source_string, error_message):
if _compile(compiler, source_string) is not None:
sys.stderr.write(error_message)
raise commands.CommandError(
"Diagnostics found a compilation environment issue:\n{}".format(
error_message
)
)
def diagnose_compile_error(build_ext, error):
"""Attempt to diagnose an error during compilation."""
for c_check, message in C_CHECKS.items():
_expect_compile(build_ext.compiler, c_check, message)
python_sources = [
source
for source in build_ext.get_source_files()
if source.startswith("./src/python") and source.endswith("c")
]
for source in python_sources:
if not os.path.isfile(source):
raise commands.CommandError(
(
"Diagnostics found a missing Python extension source"
" file:\n{}\n\nThis is usually because the Cython sources"
" haven't been transpiled into C yet and you're building"
" from source.\nTry setting the environment variable"
" `GRPC_PYTHON_BUILD_WITH_CYTHON=1` when invoking"
" `setup.py` or when using `pip`, e.g.:\n\npip install"
" -rrequirements.txt\nGRPC_PYTHON_BUILD_WITH_CYTHON=1 pip"
" install ."
).format(source)
)
def diagnose_attribute_error(build_ext, error):
if any("_needs_stub" in arg for arg in error.args):
raise commands.CommandError(
"We expect a missing `_needs_stub` attribute from older versions of"
" setuptools. Consider upgrading setuptools."
)
_ERROR_DIAGNOSES = {
errors.CompileError: diagnose_compile_error,
AttributeError: diagnose_attribute_error,
}
def diagnose_build_ext_error(build_ext, error, formatted):
diagnostic = _ERROR_DIAGNOSES.get(type(error))
if diagnostic is None:
raise commands.CommandError(
"\n\nWe could not diagnose your build failure. If you are unable to"
" proceed, please file an issue at http://www.github.com/grpc/grpc"
" with `[Python install]` in the title; please attach the whole log"
" (including everything that may have appeared above the Python"
" backtrace).\n\n{}".format(formatted)
)
else:
diagnostic(build_ext, error)
| 4,520
| 34.598425
| 80
|
py
|
grpc
|
grpc-master/src/python/grpcio/commands.py
|
# Copyright 2015 gRPC authors.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Provides distutils command classes for the GRPC Python setup process."""
# NOTE(https://github.com/grpc/grpc/issues/24028): allow setuptools to monkey
# patch distutils
import setuptools # isort:skip
import glob
import os
import os.path
import shutil
import subprocess
import sys
import sysconfig
import traceback
from setuptools.command import build_ext
from setuptools.command import build_py
import support
PYTHON_STEM = os.path.dirname(os.path.abspath(__file__))
GRPC_STEM = os.path.abspath(PYTHON_STEM + "../../../../")
PROTO_STEM = os.path.join(GRPC_STEM, "src", "proto")
PROTO_GEN_STEM = os.path.join(GRPC_STEM, "src", "python", "gens")
CYTHON_STEM = os.path.join(PYTHON_STEM, "grpc", "_cython")
class CommandError(Exception):
"""Simple exception class for GRPC custom commands."""
# TODO(atash): Remove this once PyPI has better Linux bdist support. See
# https://bitbucket.org/pypa/pypi/issues/120/binary-wheels-for-linux-are-not-supported
def _get_grpc_custom_bdist(decorated_basename, target_bdist_basename):
"""Returns a string path to a bdist file for Linux to install.
If we can retrieve a pre-compiled bdist from online, uses it. Else, emits a
warning and builds from source.
"""
# TODO(atash): somehow the name that's returned from `wheel` is different
# between different versions of 'wheel' (but from a compatibility standpoint,
# the names are compatible); we should have some way of determining name
# compatibility in the same way `wheel` does to avoid having to rename all of
# the custom wheels that we build/upload to GCS.
# Break import style to ensure that setup.py has had a chance to install the
# relevant package.
from urllib import request
decorated_path = decorated_basename + GRPC_CUSTOM_BDIST_EXT
try:
url = BINARIES_REPOSITORY + "/{target}".format(target=decorated_path)
bdist_data = request.urlopen(url).read()
except IOError as error:
raise CommandError(
"{}\n\nCould not find the bdist {}: {}".format(
traceback.format_exc(), decorated_path, error.message
)
)
# Our chosen local bdist path.
bdist_path = target_bdist_basename + GRPC_CUSTOM_BDIST_EXT
try:
with open(bdist_path, "w") as bdist_file:
bdist_file.write(bdist_data)
except IOError as error:
raise CommandError(
"{}\n\nCould not write grpcio bdist: {}".format(
traceback.format_exc(), error.message
)
)
return bdist_path
class SphinxDocumentation(setuptools.Command):
"""Command to generate documentation via sphinx."""
description = "generate sphinx documentation"
user_options = []
def initialize_options(self):
pass
def finalize_options(self):
pass
def run(self):
# We import here to ensure that setup.py has had a chance to install the
# relevant package eggs first.
import sphinx.cmd.build
source_dir = os.path.join(GRPC_STEM, "doc", "python", "sphinx")
target_dir = os.path.join(GRPC_STEM, "doc", "build")
exit_code = sphinx.cmd.build.build_main(
["-b", "html", "-W", "--keep-going", source_dir, target_dir]
)
if exit_code != 0:
raise CommandError(
"Documentation generation has warnings or errors"
)
class BuildProjectMetadata(setuptools.Command):
"""Command to generate project metadata in a module."""
description = "build grpcio project metadata files"
user_options = []
def initialize_options(self):
pass
def finalize_options(self):
pass
def run(self):
with open(
os.path.join(PYTHON_STEM, "grpc/_grpcio_metadata.py"), "w"
) as module_file:
module_file.write(
'__version__ = """{}"""'.format(self.distribution.get_version())
)
class BuildPy(build_py.build_py):
"""Custom project build command."""
def run(self):
self.run_command("build_project_metadata")
build_py.build_py.run(self)
def _poison_extensions(extensions, message):
"""Includes a file that will always fail to compile in all extensions."""
poison_filename = os.path.join(PYTHON_STEM, "poison.c")
with open(poison_filename, "w") as poison:
poison.write("#error {}".format(message))
for extension in extensions:
extension.sources = [poison_filename]
def check_and_update_cythonization(extensions):
"""Replace .pyx files with their generated counterparts and return whether or
not cythonization still needs to occur."""
for extension in extensions:
generated_pyx_sources = []
other_sources = []
for source in extension.sources:
base, file_ext = os.path.splitext(source)
if file_ext == ".pyx":
generated_pyx_source = next(
(
base + gen_ext
for gen_ext in (
".c",
".cpp",
)
if os.path.isfile(base + gen_ext)
),
None,
)
if generated_pyx_source:
generated_pyx_sources.append(generated_pyx_source)
else:
sys.stderr.write("Cython-generated files are missing...\n")
return False
else:
other_sources.append(source)
extension.sources = generated_pyx_sources + other_sources
sys.stderr.write("Found cython-generated files...\n")
return True
def try_cythonize(extensions, linetracing=False, mandatory=True):
"""Attempt to cythonize the extensions.
Args:
extensions: A list of `distutils.extension.Extension`.
linetracing: A bool indicating whether or not to enable linetracing.
mandatory: Whether or not having Cython-generated files is mandatory. If it
is, extensions will be poisoned when they can't be fully generated.
"""
try:
# Break import style to ensure we have access to Cython post-setup_requires
import Cython.Build
except ImportError:
if mandatory:
sys.stderr.write(
"This package needs to generate C files with Cython but it"
" cannot. Poisoning extension sources to disallow extension"
" commands..."
)
_poison_extensions(
extensions,
(
"Extensions have been poisoned due to missing"
" Cython-generated code."
),
)
return extensions
cython_compiler_directives = {}
if linetracing:
additional_define_macros = [("CYTHON_TRACE_NOGIL", "1")]
cython_compiler_directives["linetrace"] = True
return Cython.Build.cythonize(
extensions,
include_path=[
include_dir
for extension in extensions
for include_dir in extension.include_dirs
]
+ [CYTHON_STEM],
compiler_directives=cython_compiler_directives,
)
class BuildExt(build_ext.build_ext):
"""Custom build_ext command to enable compiler-specific flags."""
C_OPTIONS = {
"unix": ("-pthread",),
"msvc": (),
}
LINK_OPTIONS = {}
def get_ext_filename(self, ext_name):
# since python3.5, python extensions' shared libraries use a suffix that corresponds to the value
# of sysconfig.get_config_var('EXT_SUFFIX') and contains info about the architecture the library targets.
# E.g. on x64 linux the suffix is ".cpython-XYZ-x86_64-linux-gnu.so"
# When crosscompiling python wheels, we need to be able to override this suffix
# so that the resulting file name matches the target architecture and we end up with a well-formed
# wheel.
filename = build_ext.build_ext.get_ext_filename(self, ext_name)
orig_ext_suffix = sysconfig.get_config_var("EXT_SUFFIX")
new_ext_suffix = os.getenv("GRPC_PYTHON_OVERRIDE_EXT_SUFFIX")
if new_ext_suffix and filename.endswith(orig_ext_suffix):
filename = filename[: -len(orig_ext_suffix)] + new_ext_suffix
return filename
def build_extensions(self):
def compiler_ok_with_extra_std():
"""Test if default compiler is okay with specifying c++ version
when invoked in C mode. GCC is okay with this, while clang is not.
"""
try:
# TODO(lidiz) Remove the generated a.out for success tests.
cc = os.environ.get("CC", "cc")
cc_test = subprocess.Popen(
[cc, "-x", "c", "-std=c++14", "-"],
stdin=subprocess.PIPE,
stdout=subprocess.PIPE,
stderr=subprocess.PIPE,
)
_, cc_err = cc_test.communicate(input=b"int main(){return 0;}")
return not "invalid argument" in str(cc_err)
except:
sys.stderr.write(
"Non-fatal exception:" + traceback.format_exc() + "\n"
)
return False
# This special conditioning is here due to difference of compiler
# behavior in gcc and clang. The clang doesn't take --stdc++11
# flags but gcc does. Since the setuptools of Python only support
# all C or all C++ compilation, the mix of C and C++ will crash.
# *By default*, macOS and FreBSD use clang and Linux use gcc
#
# If we are not using a permissive compiler that's OK with being
# passed wrong std flags, swap out compile function by adding a filter
# for it.
if not compiler_ok_with_extra_std():
old_compile = self.compiler._compile
def new_compile(obj, src, ext, cc_args, extra_postargs, pp_opts):
if src.endswith(".c"):
extra_postargs = [
arg for arg in extra_postargs if not "-std=c++" in arg
]
elif src.endswith(".cc") or src.endswith(".cpp"):
extra_postargs = [
arg for arg in extra_postargs if not "-std=gnu99" in arg
]
return old_compile(
obj, src, ext, cc_args, extra_postargs, pp_opts
)
self.compiler._compile = new_compile
compiler = self.compiler.compiler_type
if compiler in BuildExt.C_OPTIONS:
for extension in self.extensions:
extension.extra_compile_args += list(
BuildExt.C_OPTIONS[compiler]
)
if compiler in BuildExt.LINK_OPTIONS:
for extension in self.extensions:
extension.extra_link_args += list(
BuildExt.LINK_OPTIONS[compiler]
)
if not check_and_update_cythonization(self.extensions):
self.extensions = try_cythonize(self.extensions)
try:
build_ext.build_ext.build_extensions(self)
except Exception as error:
formatted_exception = traceback.format_exc()
support.diagnose_build_ext_error(self, error, formatted_exception)
raise CommandError(
"Failed `build_ext` step:\n{}".format(formatted_exception)
)
class Gather(setuptools.Command):
"""Command to gather project dependencies."""
description = "gather dependencies for grpcio"
user_options = [
("test", "t", "flag indicating to gather test dependencies"),
("install", "i", "flag indicating to gather install dependencies"),
]
def initialize_options(self):
self.test = False
self.install = False
def finalize_options(self):
# distutils requires this override.
pass
def run(self):
if self.install and self.distribution.install_requires:
self.distribution.fetch_build_eggs(
self.distribution.install_requires
)
if self.test and self.distribution.tests_require:
self.distribution.fetch_build_eggs(self.distribution.tests_require)
class Clean(setuptools.Command):
"""Command to clean build artifacts."""
description = "Clean build artifacts."
user_options = [
("all", "a", "a phony flag to allow our script to continue"),
]
_FILE_PATTERNS = (
"python_build",
"src/python/grpcio/__pycache__/",
"src/python/grpcio/grpc/_cython/cygrpc.cpp",
"src/python/grpcio/grpc/_cython/*.so",
"src/python/grpcio/grpcio.egg-info/",
)
_CURRENT_DIRECTORY = os.path.normpath(
os.path.join(os.path.dirname(os.path.realpath(__file__)), "../../..")
)
def initialize_options(self):
self.all = False
def finalize_options(self):
pass
def run(self):
for path_spec in self._FILE_PATTERNS:
this_glob = os.path.normpath(
os.path.join(Clean._CURRENT_DIRECTORY, path_spec)
)
abs_paths = glob.glob(this_glob)
for path in abs_paths:
if not str(path).startswith(Clean._CURRENT_DIRECTORY):
raise ValueError(
"Cowardly refusing to delete {}.".format(path)
)
print("Removing {}".format(os.path.relpath(path)))
if os.path.isfile(path):
os.remove(str(path))
else:
shutil.rmtree(str(path))
| 14,375
| 35.956298
| 113
|
py
|
grpc
|
grpc-master/src/python/grpcio/_parallel_compile_patch.py
|
# Copyright 2018 The gRPC Authors
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Patches the compile() to allow enable parallel compilation of C/C++.
build_ext has lots of C/C++ files and normally them one by one.
Enabling parallel build helps a lot.
"""
import distutils.ccompiler
import os
try:
BUILD_EXT_COMPILER_JOBS = int(
os.environ["GRPC_PYTHON_BUILD_EXT_COMPILER_JOBS"]
)
except KeyError:
import multiprocessing
BUILD_EXT_COMPILER_JOBS = multiprocessing.cpu_count()
except ValueError:
BUILD_EXT_COMPILER_JOBS = 1
# monkey-patch for parallel compilation
def _parallel_compile(
self,
sources,
output_dir=None,
macros=None,
include_dirs=None,
debug=0,
extra_preargs=None,
extra_postargs=None,
depends=None,
):
# setup the same way as distutils.ccompiler.CCompiler
# https://github.com/python/cpython/blob/31368a4f0e531c19affe2a1becd25fc316bc7501/Lib/distutils/ccompiler.py#L564
macros, objects, extra_postargs, pp_opts, build = self._setup_compile(
str(output_dir), macros, include_dirs, sources, depends, extra_postargs
)
cc_args = self._get_cc_args(pp_opts, debug, extra_preargs)
def _compile_single_file(obj):
try:
src, ext = build[obj]
except KeyError:
return
self._compile(obj, src, ext, cc_args, extra_postargs, pp_opts)
# run compilation of individual files in parallel
import multiprocessing.pool
multiprocessing.pool.ThreadPool(BUILD_EXT_COMPILER_JOBS).map(
_compile_single_file, objects
)
return objects
def monkeypatch_compile_maybe():
"""Monkeypatching is dumb, but the build speed gain is worth it."""
if BUILD_EXT_COMPILER_JOBS > 1:
distutils.ccompiler.CCompiler.compile = _parallel_compile
| 2,311
| 30.243243
| 117
|
py
|
grpc
|
grpc-master/src/python/grpcio/grpc/_utilities.py
|
# Copyright 2015 gRPC authors.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Internal utilities for gRPC Python."""
import collections
import logging
import threading
import time
from typing import Callable, Dict, Optional, Sequence
import grpc # pytype: disable=pyi-error
from grpc import _common # pytype: disable=pyi-error
from grpc._typing import DoneCallbackType
_LOGGER = logging.getLogger(__name__)
_DONE_CALLBACK_EXCEPTION_LOG_MESSAGE = (
'Exception calling connectivity future "done" callback!'
)
class RpcMethodHandler(
collections.namedtuple(
"_RpcMethodHandler",
(
"request_streaming",
"response_streaming",
"request_deserializer",
"response_serializer",
"unary_unary",
"unary_stream",
"stream_unary",
"stream_stream",
),
),
grpc.RpcMethodHandler,
):
pass
class DictionaryGenericHandler(grpc.ServiceRpcHandler):
_name: str
_method_handlers: Dict[str, grpc.RpcMethodHandler]
def __init__(
self, service: str, method_handlers: Dict[str, grpc.RpcMethodHandler]
):
self._name = service
self._method_handlers = {
_common.fully_qualified_method(service, method): method_handler
for method, method_handler in method_handlers.items()
}
def service_name(self) -> str:
return self._name
def service(
self, handler_call_details: grpc.HandlerCallDetails
) -> Optional[grpc.RpcMethodHandler]:
details_method = handler_call_details.method
return self._method_handlers.get(
details_method
) # pytype: disable=attribute-error
class _ChannelReadyFuture(grpc.Future):
_condition: threading.Condition
_channel: grpc.Channel
_matured: bool
_cancelled: bool
_done_callbacks: Sequence[Callable]
def __init__(self, channel: grpc.Channel):
self._condition = threading.Condition()
self._channel = channel
self._matured = False
self._cancelled = False
self._done_callbacks = []
def _block(self, timeout: Optional[float]) -> None:
until = None if timeout is None else time.time() + timeout
with self._condition:
while True:
if self._cancelled:
raise grpc.FutureCancelledError()
elif self._matured:
return
else:
if until is None:
self._condition.wait()
else:
remaining = until - time.time()
if remaining < 0:
raise grpc.FutureTimeoutError()
else:
self._condition.wait(timeout=remaining)
def _update(self, connectivity: Optional[grpc.ChannelConnectivity]) -> None:
with self._condition:
if (
not self._cancelled
and connectivity is grpc.ChannelConnectivity.READY
):
self._matured = True
self._channel.unsubscribe(self._update)
self._condition.notify_all()
done_callbacks = tuple(self._done_callbacks)
self._done_callbacks = None
else:
return
for done_callback in done_callbacks:
try:
done_callback(self)
except Exception: # pylint: disable=broad-except
_LOGGER.exception(_DONE_CALLBACK_EXCEPTION_LOG_MESSAGE)
def cancel(self) -> bool:
with self._condition:
if not self._matured:
self._cancelled = True
self._channel.unsubscribe(self._update)
self._condition.notify_all()
done_callbacks = tuple(self._done_callbacks)
self._done_callbacks = None
else:
return False
for done_callback in done_callbacks:
try:
done_callback(self)
except Exception: # pylint: disable=broad-except
_LOGGER.exception(_DONE_CALLBACK_EXCEPTION_LOG_MESSAGE)
return True
def cancelled(self) -> bool:
with self._condition:
return self._cancelled
def running(self) -> bool:
with self._condition:
return not self._cancelled and not self._matured
def done(self) -> bool:
with self._condition:
return self._cancelled or self._matured
def result(self, timeout: Optional[float] = None) -> None:
self._block(timeout)
def exception(self, timeout: Optional[float] = None) -> None:
self._block(timeout)
def traceback(self, timeout: Optional[float] = None) -> None:
self._block(timeout)
def add_done_callback(self, fn: DoneCallbackType):
with self._condition:
if not self._cancelled and not self._matured:
self._done_callbacks.append(fn)
return
fn(self)
def start(self):
with self._condition:
self._channel.subscribe(self._update, try_to_connect=True)
def __del__(self):
with self._condition:
if not self._cancelled and not self._matured:
self._channel.unsubscribe(self._update)
def channel_ready_future(channel: grpc.Channel) -> _ChannelReadyFuture:
ready_future = _ChannelReadyFuture(channel)
ready_future.start()
return ready_future
| 6,063
| 30.583333
| 80
|
py
|
grpc
|
grpc-master/src/python/grpcio/grpc/_simple_stubs.py
|
# Copyright 2020 The gRPC authors.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Functions that obviate explicit stubs and explicit channels."""
import collections
import datetime
import logging
import os
import threading
from typing import (
Any,
AnyStr,
Callable,
Dict,
Iterator,
Optional,
Sequence,
Tuple,
TypeVar,
Union,
)
import grpc
from grpc.experimental import experimental_api
RequestType = TypeVar("RequestType")
ResponseType = TypeVar("ResponseType")
OptionsType = Sequence[Tuple[str, str]]
CacheKey = Tuple[
str,
OptionsType,
Optional[grpc.ChannelCredentials],
Optional[grpc.Compression],
]
_LOGGER = logging.getLogger(__name__)
_EVICTION_PERIOD_KEY = "GRPC_PYTHON_MANAGED_CHANNEL_EVICTION_SECONDS"
if _EVICTION_PERIOD_KEY in os.environ:
_EVICTION_PERIOD = datetime.timedelta(
seconds=float(os.environ[_EVICTION_PERIOD_KEY])
)
_LOGGER.debug(
"Setting managed channel eviction period to %s", _EVICTION_PERIOD
)
else:
_EVICTION_PERIOD = datetime.timedelta(minutes=10)
_MAXIMUM_CHANNELS_KEY = "GRPC_PYTHON_MANAGED_CHANNEL_MAXIMUM"
if _MAXIMUM_CHANNELS_KEY in os.environ:
_MAXIMUM_CHANNELS = int(os.environ[_MAXIMUM_CHANNELS_KEY])
_LOGGER.debug("Setting maximum managed channels to %d", _MAXIMUM_CHANNELS)
else:
_MAXIMUM_CHANNELS = 2**8
_DEFAULT_TIMEOUT_KEY = "GRPC_PYTHON_DEFAULT_TIMEOUT_SECONDS"
if _DEFAULT_TIMEOUT_KEY in os.environ:
_DEFAULT_TIMEOUT = float(os.environ[_DEFAULT_TIMEOUT_KEY])
_LOGGER.debug("Setting default timeout seconds to %f", _DEFAULT_TIMEOUT)
else:
_DEFAULT_TIMEOUT = 60.0
def _create_channel(
target: str,
options: Sequence[Tuple[str, str]],
channel_credentials: Optional[grpc.ChannelCredentials],
compression: Optional[grpc.Compression],
) -> grpc.Channel:
_LOGGER.debug(
f"Creating secure channel with credentials '{channel_credentials}', "
+ f"options '{options}' and compression '{compression}'"
)
return grpc.secure_channel(
target,
credentials=channel_credentials,
options=options,
compression=compression,
)
class ChannelCache:
# NOTE(rbellevi): Untyped due to reference cycle.
_singleton = None
_lock: threading.RLock = threading.RLock()
_condition: threading.Condition = threading.Condition(lock=_lock)
_eviction_ready: threading.Event = threading.Event()
_mapping: Dict[CacheKey, Tuple[grpc.Channel, datetime.datetime]]
_eviction_thread: threading.Thread
def __init__(self):
self._mapping = collections.OrderedDict()
self._eviction_thread = threading.Thread(
target=ChannelCache._perform_evictions, daemon=True
)
self._eviction_thread.start()
@staticmethod
def get():
with ChannelCache._lock:
if ChannelCache._singleton is None:
ChannelCache._singleton = ChannelCache()
ChannelCache._eviction_ready.wait()
return ChannelCache._singleton
def _evict_locked(self, key: CacheKey):
channel, _ = self._mapping.pop(key)
_LOGGER.debug(
"Evicting channel %s with configuration %s.", channel, key
)
channel.close()
del channel
@staticmethod
def _perform_evictions():
while True:
with ChannelCache._lock:
ChannelCache._eviction_ready.set()
if not ChannelCache._singleton._mapping:
ChannelCache._condition.wait()
elif len(ChannelCache._singleton._mapping) > _MAXIMUM_CHANNELS:
key = next(iter(ChannelCache._singleton._mapping.keys()))
ChannelCache._singleton._evict_locked(key)
# And immediately reevaluate.
else:
key, (_, eviction_time) = next(
iter(ChannelCache._singleton._mapping.items())
)
now = datetime.datetime.now()
if eviction_time <= now:
ChannelCache._singleton._evict_locked(key)
continue
else:
time_to_eviction = (eviction_time - now).total_seconds()
# NOTE: We aim to *eventually* coalesce to a state in
# which no overdue channels are in the cache and the
# length of the cache is longer than _MAXIMUM_CHANNELS.
# We tolerate momentary states in which these two
# criteria are not met.
ChannelCache._condition.wait(timeout=time_to_eviction)
def get_channel(
self,
target: str,
options: Sequence[Tuple[str, str]],
channel_credentials: Optional[grpc.ChannelCredentials],
insecure: bool,
compression: Optional[grpc.Compression],
) -> grpc.Channel:
if insecure and channel_credentials:
raise ValueError(
"The insecure option is mutually exclusive with "
+ "the channel_credentials option. Please use one "
+ "or the other."
)
if insecure:
channel_credentials = (
grpc.experimental.insecure_channel_credentials()
)
elif channel_credentials is None:
_LOGGER.debug("Defaulting to SSL channel credentials.")
channel_credentials = grpc.ssl_channel_credentials()
key = (target, options, channel_credentials, compression)
with self._lock:
channel_data = self._mapping.get(key, None)
if channel_data is not None:
channel = channel_data[0]
self._mapping.pop(key)
self._mapping[key] = (
channel,
datetime.datetime.now() + _EVICTION_PERIOD,
)
return channel
else:
channel = _create_channel(
target, options, channel_credentials, compression
)
self._mapping[key] = (
channel,
datetime.datetime.now() + _EVICTION_PERIOD,
)
if (
len(self._mapping) == 1
or len(self._mapping) >= _MAXIMUM_CHANNELS
):
self._condition.notify()
return channel
def _test_only_channel_count(self) -> int:
with self._lock:
return len(self._mapping)
@experimental_api
def unary_unary(
request: RequestType,
target: str,
method: str,
request_serializer: Optional[Callable[[Any], bytes]] = None,
response_deserializer: Optional[Callable[[bytes], Any]] = None,
options: Sequence[Tuple[AnyStr, AnyStr]] = (),
channel_credentials: Optional[grpc.ChannelCredentials] = None,
insecure: bool = False,
call_credentials: Optional[grpc.CallCredentials] = None,
compression: Optional[grpc.Compression] = None,
wait_for_ready: Optional[bool] = None,
timeout: Optional[float] = _DEFAULT_TIMEOUT,
metadata: Optional[Sequence[Tuple[str, Union[str, bytes]]]] = None,
) -> ResponseType:
"""Invokes a unary-unary RPC without an explicitly specified channel.
THIS IS AN EXPERIMENTAL API.
This is backed by a per-process cache of channels. Channels are evicted
from the cache after a fixed period by a background. Channels will also be
evicted if more than a configured maximum accumulate.
The default eviction period is 10 minutes. One may set the environment
variable "GRPC_PYTHON_MANAGED_CHANNEL_EVICTION_SECONDS" to configure this.
The default maximum number of channels is 256. One may set the
environment variable "GRPC_PYTHON_MANAGED_CHANNEL_MAXIMUM" to configure
this.
Args:
request: An iterator that yields request values for the RPC.
target: The server address.
method: The name of the RPC method.
request_serializer: Optional :term:`serializer` for serializing the request
message. Request goes unserialized in case None is passed.
response_deserializer: Optional :term:`deserializer` for deserializing the response
message. Response goes undeserialized in case None is passed.
options: An optional list of key-value pairs (:term:`channel_arguments` in gRPC Core
runtime) to configure the channel.
channel_credentials: A credential applied to the whole channel, e.g. the
return value of grpc.ssl_channel_credentials() or
grpc.insecure_channel_credentials().
insecure: If True, specifies channel_credentials as
:term:`grpc.insecure_channel_credentials()`. This option is mutually
exclusive with the `channel_credentials` option.
call_credentials: A call credential applied to each call individually,
e.g. the output of grpc.metadata_call_credentials() or
grpc.access_token_call_credentials().
compression: An optional value indicating the compression method to be
used over the lifetime of the channel, e.g. grpc.Compression.Gzip.
wait_for_ready: An optional flag indicating whether the RPC should fail
immediately if the connection is not ready at the time the RPC is
invoked, or if it should wait until the connection to the server
becomes ready. When using this option, the user will likely also want
to set a timeout. Defaults to True.
timeout: An optional duration of time in seconds to allow for the RPC,
after which an exception will be raised. If timeout is unspecified,
defaults to a timeout controlled by the
GRPC_PYTHON_DEFAULT_TIMEOUT_SECONDS environment variable. If that is
unset, defaults to 60 seconds. Supply a value of None to indicate that
no timeout should be enforced.
metadata: Optional metadata to send to the server.
Returns:
The response to the RPC.
"""
channel = ChannelCache.get().get_channel(
target, options, channel_credentials, insecure, compression
)
multicallable = channel.unary_unary(
method, request_serializer, response_deserializer
)
wait_for_ready = wait_for_ready if wait_for_ready is not None else True
return multicallable(
request,
metadata=metadata,
wait_for_ready=wait_for_ready,
credentials=call_credentials,
timeout=timeout,
)
@experimental_api
def unary_stream(
request: RequestType,
target: str,
method: str,
request_serializer: Optional[Callable[[Any], bytes]] = None,
response_deserializer: Optional[Callable[[bytes], Any]] = None,
options: Sequence[Tuple[AnyStr, AnyStr]] = (),
channel_credentials: Optional[grpc.ChannelCredentials] = None,
insecure: bool = False,
call_credentials: Optional[grpc.CallCredentials] = None,
compression: Optional[grpc.Compression] = None,
wait_for_ready: Optional[bool] = None,
timeout: Optional[float] = _DEFAULT_TIMEOUT,
metadata: Optional[Sequence[Tuple[str, Union[str, bytes]]]] = None,
) -> Iterator[ResponseType]:
"""Invokes a unary-stream RPC without an explicitly specified channel.
THIS IS AN EXPERIMENTAL API.
This is backed by a per-process cache of channels. Channels are evicted
from the cache after a fixed period by a background. Channels will also be
evicted if more than a configured maximum accumulate.
The default eviction period is 10 minutes. One may set the environment
variable "GRPC_PYTHON_MANAGED_CHANNEL_EVICTION_SECONDS" to configure this.
The default maximum number of channels is 256. One may set the
environment variable "GRPC_PYTHON_MANAGED_CHANNEL_MAXIMUM" to configure
this.
Args:
request: An iterator that yields request values for the RPC.
target: The server address.
method: The name of the RPC method.
request_serializer: Optional :term:`serializer` for serializing the request
message. Request goes unserialized in case None is passed.
response_deserializer: Optional :term:`deserializer` for deserializing the response
message. Response goes undeserialized in case None is passed.
options: An optional list of key-value pairs (:term:`channel_arguments` in gRPC Core
runtime) to configure the channel.
channel_credentials: A credential applied to the whole channel, e.g. the
return value of grpc.ssl_channel_credentials().
insecure: If True, specifies channel_credentials as
:term:`grpc.insecure_channel_credentials()`. This option is mutually
exclusive with the `channel_credentials` option.
call_credentials: A call credential applied to each call individually,
e.g. the output of grpc.metadata_call_credentials() or
grpc.access_token_call_credentials().
compression: An optional value indicating the compression method to be
used over the lifetime of the channel, e.g. grpc.Compression.Gzip.
wait_for_ready: An optional flag indicating whether the RPC should fail
immediately if the connection is not ready at the time the RPC is
invoked, or if it should wait until the connection to the server
becomes ready. When using this option, the user will likely also want
to set a timeout. Defaults to True.
timeout: An optional duration of time in seconds to allow for the RPC,
after which an exception will be raised. If timeout is unspecified,
defaults to a timeout controlled by the
GRPC_PYTHON_DEFAULT_TIMEOUT_SECONDS environment variable. If that is
unset, defaults to 60 seconds. Supply a value of None to indicate that
no timeout should be enforced.
metadata: Optional metadata to send to the server.
Returns:
An iterator of responses.
"""
channel = ChannelCache.get().get_channel(
target, options, channel_credentials, insecure, compression
)
multicallable = channel.unary_stream(
method, request_serializer, response_deserializer
)
wait_for_ready = wait_for_ready if wait_for_ready is not None else True
return multicallable(
request,
metadata=metadata,
wait_for_ready=wait_for_ready,
credentials=call_credentials,
timeout=timeout,
)
@experimental_api
def stream_unary(
request_iterator: Iterator[RequestType],
target: str,
method: str,
request_serializer: Optional[Callable[[Any], bytes]] = None,
response_deserializer: Optional[Callable[[bytes], Any]] = None,
options: Sequence[Tuple[AnyStr, AnyStr]] = (),
channel_credentials: Optional[grpc.ChannelCredentials] = None,
insecure: bool = False,
call_credentials: Optional[grpc.CallCredentials] = None,
compression: Optional[grpc.Compression] = None,
wait_for_ready: Optional[bool] = None,
timeout: Optional[float] = _DEFAULT_TIMEOUT,
metadata: Optional[Sequence[Tuple[str, Union[str, bytes]]]] = None,
) -> ResponseType:
"""Invokes a stream-unary RPC without an explicitly specified channel.
THIS IS AN EXPERIMENTAL API.
This is backed by a per-process cache of channels. Channels are evicted
from the cache after a fixed period by a background. Channels will also be
evicted if more than a configured maximum accumulate.
The default eviction period is 10 minutes. One may set the environment
variable "GRPC_PYTHON_MANAGED_CHANNEL_EVICTION_SECONDS" to configure this.
The default maximum number of channels is 256. One may set the
environment variable "GRPC_PYTHON_MANAGED_CHANNEL_MAXIMUM" to configure
this.
Args:
request_iterator: An iterator that yields request values for the RPC.
target: The server address.
method: The name of the RPC method.
request_serializer: Optional :term:`serializer` for serializing the request
message. Request goes unserialized in case None is passed.
response_deserializer: Optional :term:`deserializer` for deserializing the response
message. Response goes undeserialized in case None is passed.
options: An optional list of key-value pairs (:term:`channel_arguments` in gRPC Core
runtime) to configure the channel.
channel_credentials: A credential applied to the whole channel, e.g. the
return value of grpc.ssl_channel_credentials().
call_credentials: A call credential applied to each call individually,
e.g. the output of grpc.metadata_call_credentials() or
grpc.access_token_call_credentials().
insecure: If True, specifies channel_credentials as
:term:`grpc.insecure_channel_credentials()`. This option is mutually
exclusive with the `channel_credentials` option.
compression: An optional value indicating the compression method to be
used over the lifetime of the channel, e.g. grpc.Compression.Gzip.
wait_for_ready: An optional flag indicating whether the RPC should fail
immediately if the connection is not ready at the time the RPC is
invoked, or if it should wait until the connection to the server
becomes ready. When using this option, the user will likely also want
to set a timeout. Defaults to True.
timeout: An optional duration of time in seconds to allow for the RPC,
after which an exception will be raised. If timeout is unspecified,
defaults to a timeout controlled by the
GRPC_PYTHON_DEFAULT_TIMEOUT_SECONDS environment variable. If that is
unset, defaults to 60 seconds. Supply a value of None to indicate that
no timeout should be enforced.
metadata: Optional metadata to send to the server.
Returns:
The response to the RPC.
"""
channel = ChannelCache.get().get_channel(
target, options, channel_credentials, insecure, compression
)
multicallable = channel.stream_unary(
method, request_serializer, response_deserializer
)
wait_for_ready = wait_for_ready if wait_for_ready is not None else True
return multicallable(
request_iterator,
metadata=metadata,
wait_for_ready=wait_for_ready,
credentials=call_credentials,
timeout=timeout,
)
@experimental_api
def stream_stream(
request_iterator: Iterator[RequestType],
target: str,
method: str,
request_serializer: Optional[Callable[[Any], bytes]] = None,
response_deserializer: Optional[Callable[[bytes], Any]] = None,
options: Sequence[Tuple[AnyStr, AnyStr]] = (),
channel_credentials: Optional[grpc.ChannelCredentials] = None,
insecure: bool = False,
call_credentials: Optional[grpc.CallCredentials] = None,
compression: Optional[grpc.Compression] = None,
wait_for_ready: Optional[bool] = None,
timeout: Optional[float] = _DEFAULT_TIMEOUT,
metadata: Optional[Sequence[Tuple[str, Union[str, bytes]]]] = None,
) -> Iterator[ResponseType]:
"""Invokes a stream-stream RPC without an explicitly specified channel.
THIS IS AN EXPERIMENTAL API.
This is backed by a per-process cache of channels. Channels are evicted
from the cache after a fixed period by a background. Channels will also be
evicted if more than a configured maximum accumulate.
The default eviction period is 10 minutes. One may set the environment
variable "GRPC_PYTHON_MANAGED_CHANNEL_EVICTION_SECONDS" to configure this.
The default maximum number of channels is 256. One may set the
environment variable "GRPC_PYTHON_MANAGED_CHANNEL_MAXIMUM" to configure
this.
Args:
request_iterator: An iterator that yields request values for the RPC.
target: The server address.
method: The name of the RPC method.
request_serializer: Optional :term:`serializer` for serializing the request
message. Request goes unserialized in case None is passed.
response_deserializer: Optional :term:`deserializer` for deserializing the response
message. Response goes undeserialized in case None is passed.
options: An optional list of key-value pairs (:term:`channel_arguments` in gRPC Core
runtime) to configure the channel.
channel_credentials: A credential applied to the whole channel, e.g. the
return value of grpc.ssl_channel_credentials().
call_credentials: A call credential applied to each call individually,
e.g. the output of grpc.metadata_call_credentials() or
grpc.access_token_call_credentials().
insecure: If True, specifies channel_credentials as
:term:`grpc.insecure_channel_credentials()`. This option is mutually
exclusive with the `channel_credentials` option.
compression: An optional value indicating the compression method to be
used over the lifetime of the channel, e.g. grpc.Compression.Gzip.
wait_for_ready: An optional flag indicating whether the RPC should fail
immediately if the connection is not ready at the time the RPC is
invoked, or if it should wait until the connection to the server
becomes ready. When using this option, the user will likely also want
to set a timeout. Defaults to True.
timeout: An optional duration of time in seconds to allow for the RPC,
after which an exception will be raised. If timeout is unspecified,
defaults to a timeout controlled by the
GRPC_PYTHON_DEFAULT_TIMEOUT_SECONDS environment variable. If that is
unset, defaults to 60 seconds. Supply a value of None to indicate that
no timeout should be enforced.
metadata: Optional metadata to send to the server.
Returns:
An iterator of responses.
"""
channel = ChannelCache.get().get_channel(
target, options, channel_credentials, insecure, compression
)
multicallable = channel.stream_stream(
method, request_serializer, response_deserializer
)
wait_for_ready = wait_for_ready if wait_for_ready is not None else True
return multicallable(
request_iterator,
metadata=metadata,
wait_for_ready=wait_for_ready,
credentials=call_credentials,
timeout=timeout,
)
| 22,865
| 41.501859
| 90
|
py
|
grpc
|
grpc-master/src/python/grpcio/grpc/_interceptor.py
|
# Copyright 2017 gRPC authors.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Implementation of gRPC Python interceptors."""
import collections
import sys
import types
from typing import Any, Callable, Optional, Sequence, Tuple, Union
import grpc
from ._typing import DeserializingFunction
from ._typing import DoneCallbackType
from ._typing import MetadataType
from ._typing import RequestIterableType
from ._typing import SerializingFunction
class _ServicePipeline(object):
interceptors: Tuple[grpc.ServerInterceptor]
def __init__(self, interceptors: Sequence[grpc.ServerInterceptor]):
self.interceptors = tuple(interceptors)
def _continuation(self, thunk: Callable, index: int) -> Callable:
return lambda context: self._intercept_at(thunk, index, context)
def _intercept_at(
self, thunk: Callable, index: int, context: grpc.HandlerCallDetails
) -> grpc.RpcMethodHandler:
if index < len(self.interceptors):
interceptor = self.interceptors[index]
thunk = self._continuation(thunk, index + 1)
return interceptor.intercept_service(thunk, context)
else:
return thunk(context)
def execute(
self, thunk: Callable, context: grpc.HandlerCallDetails
) -> grpc.RpcMethodHandler:
return self._intercept_at(thunk, 0, context)
def service_pipeline(
interceptors: Optional[Sequence[grpc.ServerInterceptor]],
) -> Optional[_ServicePipeline]:
return _ServicePipeline(interceptors) if interceptors else None
class _ClientCallDetails(
collections.namedtuple(
"_ClientCallDetails",
(
"method",
"timeout",
"metadata",
"credentials",
"wait_for_ready",
"compression",
),
),
grpc.ClientCallDetails,
):
pass
def _unwrap_client_call_details(
call_details: grpc.ClientCallDetails,
default_details: grpc.ClientCallDetails,
) -> Tuple[
str, float, MetadataType, grpc.CallCredentials, bool, grpc.Compression
]:
try:
method = call_details.method # pytype: disable=attribute-error
except AttributeError:
method = default_details.method # pytype: disable=attribute-error
try:
timeout = call_details.timeout # pytype: disable=attribute-error
except AttributeError:
timeout = default_details.timeout # pytype: disable=attribute-error
try:
metadata = call_details.metadata # pytype: disable=attribute-error
except AttributeError:
metadata = default_details.metadata # pytype: disable=attribute-error
try:
credentials = (
call_details.credentials
) # pytype: disable=attribute-error
except AttributeError:
credentials = (
default_details.credentials
) # pytype: disable=attribute-error
try:
wait_for_ready = (
call_details.wait_for_ready
) # pytype: disable=attribute-error
except AttributeError:
wait_for_ready = (
default_details.wait_for_ready
) # pytype: disable=attribute-error
try:
compression = (
call_details.compression
) # pytype: disable=attribute-error
except AttributeError:
compression = (
default_details.compression
) # pytype: disable=attribute-error
return method, timeout, metadata, credentials, wait_for_ready, compression
class _FailureOutcome(
grpc.RpcError, grpc.Future, grpc.Call
): # pylint: disable=too-many-ancestors
_exception: Exception
_traceback: types.TracebackType
def __init__(self, exception: Exception, traceback: types.TracebackType):
super(_FailureOutcome, self).__init__()
self._exception = exception
self._traceback = traceback
def initial_metadata(self) -> Optional[MetadataType]:
return None
def trailing_metadata(self) -> Optional[MetadataType]:
return None
def code(self) -> Optional[grpc.StatusCode]:
return grpc.StatusCode.INTERNAL
def details(self) -> Optional[str]:
return "Exception raised while intercepting the RPC"
def cancel(self) -> bool:
return False
def cancelled(self) -> bool:
return False
def is_active(self) -> bool:
return False
def time_remaining(self) -> Optional[float]:
return None
def running(self) -> bool:
return False
def done(self) -> bool:
return True
def result(self, ignored_timeout: Optional[float] = None):
raise self._exception
def exception(
self, ignored_timeout: Optional[float] = None
) -> Optional[Exception]:
return self._exception
def traceback(
self, ignored_timeout: Optional[float] = None
) -> Optional[types.TracebackType]:
return self._traceback
def add_callback(self, unused_callback) -> bool:
return False
def add_done_callback(self, fn: DoneCallbackType) -> None:
fn(self)
def __iter__(self):
return self
def __next__(self):
raise self._exception
def next(self):
return self.__next__()
class _UnaryOutcome(grpc.Call, grpc.Future):
_response: Any
_call: grpc.Call
def __init__(self, response: Any, call: grpc.Call):
self._response = response
self._call = call
def initial_metadata(self) -> Optional[MetadataType]:
return self._call.initial_metadata()
def trailing_metadata(self) -> Optional[MetadataType]:
return self._call.trailing_metadata()
def code(self) -> Optional[grpc.StatusCode]:
return self._call.code()
def details(self) -> Optional[str]:
return self._call.details()
def is_active(self) -> bool:
return self._call.is_active()
def time_remaining(self) -> Optional[float]:
return self._call.time_remaining()
def cancel(self) -> bool:
return self._call.cancel()
def add_callback(self, callback) -> bool:
return self._call.add_callback(callback)
def cancelled(self) -> bool:
return False
def running(self) -> bool:
return False
def done(self) -> bool:
return True
def result(self, ignored_timeout: Optional[float] = None):
return self._response
def exception(self, ignored_timeout: Optional[float] = None):
return None
def traceback(self, ignored_timeout: Optional[float] = None):
return None
def add_done_callback(self, fn: DoneCallbackType) -> None:
fn(self)
class _UnaryUnaryMultiCallable(grpc.UnaryUnaryMultiCallable):
_thunk: Callable
_method: str
_interceptor: grpc.UnaryUnaryClientInterceptor
def __init__(
self,
thunk: Callable,
method: str,
interceptor: grpc.UnaryUnaryClientInterceptor,
):
self._thunk = thunk
self._method = method
self._interceptor = interceptor
def __call__(
self,
request: Any,
timeout: Optional[float] = None,
metadata: Optional[MetadataType] = None,
credentials: Optional[grpc.CallCredentials] = None,
wait_for_ready: Optional[bool] = None,
compression: Optional[grpc.Compression] = None,
) -> Any:
response, ignored_call = self._with_call(
request,
timeout=timeout,
metadata=metadata,
credentials=credentials,
wait_for_ready=wait_for_ready,
compression=compression,
)
return response
def _with_call(
self,
request: Any,
timeout: Optional[float] = None,
metadata: Optional[MetadataType] = None,
credentials: Optional[grpc.CallCredentials] = None,
wait_for_ready: Optional[bool] = None,
compression: Optional[grpc.Compression] = None,
) -> Tuple[Any, grpc.Call]:
client_call_details = _ClientCallDetails(
self._method,
timeout,
metadata,
credentials,
wait_for_ready,
compression,
)
def continuation(new_details, request):
(
new_method,
new_timeout,
new_metadata,
new_credentials,
new_wait_for_ready,
new_compression,
) = _unwrap_client_call_details(new_details, client_call_details)
try:
response, call = self._thunk(new_method).with_call(
request,
timeout=new_timeout,
metadata=new_metadata,
credentials=new_credentials,
wait_for_ready=new_wait_for_ready,
compression=new_compression,
)
return _UnaryOutcome(response, call)
except grpc.RpcError as rpc_error:
return rpc_error
except Exception as exception: # pylint:disable=broad-except
return _FailureOutcome(exception, sys.exc_info()[2])
call = self._interceptor.intercept_unary_unary(
continuation, client_call_details, request
)
return call.result(), call
def with_call(
self,
request: Any,
timeout: Optional[float] = None,
metadata: Optional[MetadataType] = None,
credentials: Optional[grpc.CallCredentials] = None,
wait_for_ready: Optional[bool] = None,
compression: Optional[grpc.Compression] = None,
) -> Tuple[Any, grpc.Call]:
return self._with_call(
request,
timeout=timeout,
metadata=metadata,
credentials=credentials,
wait_for_ready=wait_for_ready,
compression=compression,
)
def future(
self,
request: Any,
timeout: Optional[float] = None,
metadata: Optional[MetadataType] = None,
credentials: Optional[grpc.CallCredentials] = None,
wait_for_ready: Optional[bool] = None,
compression: Optional[grpc.Compression] = None,
) -> Any:
client_call_details = _ClientCallDetails(
self._method,
timeout,
metadata,
credentials,
wait_for_ready,
compression,
)
def continuation(new_details, request):
(
new_method,
new_timeout,
new_metadata,
new_credentials,
new_wait_for_ready,
new_compression,
) = _unwrap_client_call_details(new_details, client_call_details)
return self._thunk(new_method).future(
request,
timeout=new_timeout,
metadata=new_metadata,
credentials=new_credentials,
wait_for_ready=new_wait_for_ready,
compression=new_compression,
)
try:
return self._interceptor.intercept_unary_unary(
continuation, client_call_details, request
)
except Exception as exception: # pylint:disable=broad-except
return _FailureOutcome(exception, sys.exc_info()[2])
class _UnaryStreamMultiCallable(grpc.UnaryStreamMultiCallable):
_thunk: Callable
_method: str
_interceptor: grpc.UnaryStreamClientInterceptor
def __init__(
self,
thunk: Callable,
method: str,
interceptor: grpc.UnaryStreamClientInterceptor,
):
self._thunk = thunk
self._method = method
self._interceptor = interceptor
def __call__(
self,
request: Any,
timeout: Optional[float] = None,
metadata: Optional[MetadataType] = None,
credentials: Optional[grpc.CallCredentials] = None,
wait_for_ready: Optional[bool] = None,
compression: Optional[grpc.Compression] = None,
):
client_call_details = _ClientCallDetails(
self._method,
timeout,
metadata,
credentials,
wait_for_ready,
compression,
)
def continuation(new_details, request):
(
new_method,
new_timeout,
new_metadata,
new_credentials,
new_wait_for_ready,
new_compression,
) = _unwrap_client_call_details(new_details, client_call_details)
return self._thunk(new_method)(
request,
timeout=new_timeout,
metadata=new_metadata,
credentials=new_credentials,
wait_for_ready=new_wait_for_ready,
compression=new_compression,
)
try:
return self._interceptor.intercept_unary_stream(
continuation, client_call_details, request
)
except Exception as exception: # pylint:disable=broad-except
return _FailureOutcome(exception, sys.exc_info()[2])
class _StreamUnaryMultiCallable(grpc.StreamUnaryMultiCallable):
_thunk: Callable
_method: str
_interceptor: grpc.StreamUnaryClientInterceptor
def __init__(
self,
thunk: Callable,
method: str,
interceptor: grpc.StreamUnaryClientInterceptor,
):
self._thunk = thunk
self._method = method
self._interceptor = interceptor
def __call__(
self,
request_iterator: RequestIterableType,
timeout: Optional[float] = None,
metadata: Optional[MetadataType] = None,
credentials: Optional[grpc.CallCredentials] = None,
wait_for_ready: Optional[bool] = None,
compression: Optional[grpc.Compression] = None,
) -> Any:
response, ignored_call = self._with_call(
request_iterator,
timeout=timeout,
metadata=metadata,
credentials=credentials,
wait_for_ready=wait_for_ready,
compression=compression,
)
return response
def _with_call(
self,
request_iterator: RequestIterableType,
timeout: Optional[float] = None,
metadata: Optional[MetadataType] = None,
credentials: Optional[grpc.CallCredentials] = None,
wait_for_ready: Optional[bool] = None,
compression: Optional[grpc.Compression] = None,
) -> Tuple[Any, grpc.Call]:
client_call_details = _ClientCallDetails(
self._method,
timeout,
metadata,
credentials,
wait_for_ready,
compression,
)
def continuation(new_details, request_iterator):
(
new_method,
new_timeout,
new_metadata,
new_credentials,
new_wait_for_ready,
new_compression,
) = _unwrap_client_call_details(new_details, client_call_details)
try:
response, call = self._thunk(new_method).with_call(
request_iterator,
timeout=new_timeout,
metadata=new_metadata,
credentials=new_credentials,
wait_for_ready=new_wait_for_ready,
compression=new_compression,
)
return _UnaryOutcome(response, call)
except grpc.RpcError as rpc_error:
return rpc_error
except Exception as exception: # pylint:disable=broad-except
return _FailureOutcome(exception, sys.exc_info()[2])
call = self._interceptor.intercept_stream_unary(
continuation, client_call_details, request_iterator
)
return call.result(), call
def with_call(
self,
request_iterator: RequestIterableType,
timeout: Optional[float] = None,
metadata: Optional[MetadataType] = None,
credentials: Optional[grpc.CallCredentials] = None,
wait_for_ready: Optional[bool] = None,
compression: Optional[grpc.Compression] = None,
) -> Tuple[Any, grpc.Call]:
return self._with_call(
request_iterator,
timeout=timeout,
metadata=metadata,
credentials=credentials,
wait_for_ready=wait_for_ready,
compression=compression,
)
def future(
self,
request_iterator: RequestIterableType,
timeout: Optional[float] = None,
metadata: Optional[MetadataType] = None,
credentials: Optional[grpc.CallCredentials] = None,
wait_for_ready: Optional[bool] = None,
compression: Optional[grpc.Compression] = None,
) -> Any:
client_call_details = _ClientCallDetails(
self._method,
timeout,
metadata,
credentials,
wait_for_ready,
compression,
)
def continuation(new_details, request_iterator):
(
new_method,
new_timeout,
new_metadata,
new_credentials,
new_wait_for_ready,
new_compression,
) = _unwrap_client_call_details(new_details, client_call_details)
return self._thunk(new_method).future(
request_iterator,
timeout=new_timeout,
metadata=new_metadata,
credentials=new_credentials,
wait_for_ready=new_wait_for_ready,
compression=new_compression,
)
try:
return self._interceptor.intercept_stream_unary(
continuation, client_call_details, request_iterator
)
except Exception as exception: # pylint:disable=broad-except
return _FailureOutcome(exception, sys.exc_info()[2])
class _StreamStreamMultiCallable(grpc.StreamStreamMultiCallable):
_thunk: Callable
_method: str
_interceptor: grpc.StreamStreamClientInterceptor
def __init__(
self,
thunk: Callable,
method: str,
interceptor: grpc.StreamStreamClientInterceptor,
):
self._thunk = thunk
self._method = method
self._interceptor = interceptor
def __call__(
self,
request_iterator: RequestIterableType,
timeout: Optional[float] = None,
metadata: Optional[MetadataType] = None,
credentials: Optional[grpc.CallCredentials] = None,
wait_for_ready: Optional[bool] = None,
compression: Optional[grpc.Compression] = None,
):
client_call_details = _ClientCallDetails(
self._method,
timeout,
metadata,
credentials,
wait_for_ready,
compression,
)
def continuation(new_details, request_iterator):
(
new_method,
new_timeout,
new_metadata,
new_credentials,
new_wait_for_ready,
new_compression,
) = _unwrap_client_call_details(new_details, client_call_details)
return self._thunk(new_method)(
request_iterator,
timeout=new_timeout,
metadata=new_metadata,
credentials=new_credentials,
wait_for_ready=new_wait_for_ready,
compression=new_compression,
)
try:
return self._interceptor.intercept_stream_stream(
continuation, client_call_details, request_iterator
)
except Exception as exception: # pylint:disable=broad-except
return _FailureOutcome(exception, sys.exc_info()[2])
class _Channel(grpc.Channel):
_channel: grpc.Channel
_interceptor: Union[
grpc.UnaryUnaryClientInterceptor,
grpc.UnaryStreamClientInterceptor,
grpc.StreamStreamClientInterceptor,
grpc.StreamUnaryClientInterceptor,
]
def __init__(
self,
channel: grpc.Channel,
interceptor: Union[
grpc.UnaryUnaryClientInterceptor,
grpc.UnaryStreamClientInterceptor,
grpc.StreamStreamClientInterceptor,
grpc.StreamUnaryClientInterceptor,
],
):
self._channel = channel
self._interceptor = interceptor
def subscribe(
self, callback: Callable, try_to_connect: Optional[bool] = False
):
self._channel.subscribe(callback, try_to_connect=try_to_connect)
def unsubscribe(self, callback: Callable):
self._channel.unsubscribe(callback)
def unary_unary(
self,
method: str,
request_serializer: Optional[SerializingFunction] = None,
response_deserializer: Optional[DeserializingFunction] = None,
) -> grpc.UnaryUnaryMultiCallable:
thunk = lambda m: self._channel.unary_unary(
m, request_serializer, response_deserializer
)
if isinstance(self._interceptor, grpc.UnaryUnaryClientInterceptor):
return _UnaryUnaryMultiCallable(thunk, method, self._interceptor)
else:
return thunk(method)
def unary_stream(
self,
method: str,
request_serializer: Optional[SerializingFunction] = None,
response_deserializer: Optional[DeserializingFunction] = None,
) -> grpc.UnaryStreamMultiCallable:
thunk = lambda m: self._channel.unary_stream(
m, request_serializer, response_deserializer
)
if isinstance(self._interceptor, grpc.UnaryStreamClientInterceptor):
return _UnaryStreamMultiCallable(thunk, method, self._interceptor)
else:
return thunk(method)
def stream_unary(
self,
method: str,
request_serializer: Optional[SerializingFunction] = None,
response_deserializer: Optional[DeserializingFunction] = None,
) -> grpc.StreamUnaryMultiCallable:
thunk = lambda m: self._channel.stream_unary(
m, request_serializer, response_deserializer
)
if isinstance(self._interceptor, grpc.StreamUnaryClientInterceptor):
return _StreamUnaryMultiCallable(thunk, method, self._interceptor)
else:
return thunk(method)
def stream_stream(
self,
method: str,
request_serializer: Optional[SerializingFunction] = None,
response_deserializer: Optional[DeserializingFunction] = None,
) -> grpc.StreamStreamMultiCallable:
thunk = lambda m: self._channel.stream_stream(
m, request_serializer, response_deserializer
)
if isinstance(self._interceptor, grpc.StreamStreamClientInterceptor):
return _StreamStreamMultiCallable(thunk, method, self._interceptor)
else:
return thunk(method)
def _close(self):
self._channel.close()
def __enter__(self):
return self
def __exit__(self, exc_type, exc_val, exc_tb):
self._close()
return False
def close(self):
self._channel.close()
def intercept_channel(
channel: grpc.Channel,
*interceptors: Optional[
Sequence[
Union[
grpc.UnaryUnaryClientInterceptor,
grpc.UnaryStreamClientInterceptor,
grpc.StreamStreamClientInterceptor,
grpc.StreamUnaryClientInterceptor,
]
]
],
) -> grpc.Channel:
for interceptor in reversed(list(interceptors)):
if (
not isinstance(interceptor, grpc.UnaryUnaryClientInterceptor)
and not isinstance(interceptor, grpc.UnaryStreamClientInterceptor)
and not isinstance(interceptor, grpc.StreamUnaryClientInterceptor)
and not isinstance(interceptor, grpc.StreamStreamClientInterceptor)
):
raise TypeError(
"interceptor must be "
"grpc.UnaryUnaryClientInterceptor or "
"grpc.UnaryStreamClientInterceptor or "
"grpc.StreamUnaryClientInterceptor or "
"grpc.StreamStreamClientInterceptor or "
)
channel = _Channel(channel, interceptor)
return channel
| 24,938
| 30.729008
| 79
|
py
|
grpc
|
grpc-master/src/python/grpcio/grpc/_grpcio_metadata.py
|
# Copyright 2017 gRPC authors.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# AUTO-GENERATED FROM `$REPO_ROOT/templates/src/python/grpcio/grpc/_grpcio_metadata.py.template`!!!
__version__ = """1.57.0.dev0"""
| 711
| 38.555556
| 99
|
py
|
grpc
|
grpc-master/src/python/grpcio/grpc/_runtime_protos.py
|
# Copyright 2020 The gRPC authors.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import sys
import types
from typing import Tuple, Union
_REQUIRED_SYMBOLS = ("_protos", "_services", "_protos_and_services")
_MINIMUM_VERSION = (3, 5, 0)
_UNINSTALLED_TEMPLATE = (
"Install the grpcio-tools package (1.32.0+) to use the {} function."
)
_VERSION_ERROR_TEMPLATE = (
"The {} function is only on available on Python 3.X interpreters."
)
def _has_runtime_proto_symbols(mod: types.ModuleType) -> bool:
return all(hasattr(mod, sym) for sym in _REQUIRED_SYMBOLS)
def _is_grpc_tools_importable() -> bool:
try:
import grpc_tools # pylint: disable=unused-import # pytype: disable=import-error
return True
except ImportError as e:
# NOTE: It's possible that we're encountering a transitive ImportError, so
# we check for that and re-raise if so.
if "grpc_tools" not in e.args[0]:
raise
return False
def _call_with_lazy_import(
fn_name: str, protobuf_path: str
) -> Union[types.ModuleType, Tuple[types.ModuleType, types.ModuleType]]:
"""Calls one of the three functions, lazily importing grpc_tools.
Args:
fn_name: The name of the function to import from grpc_tools.protoc.
protobuf_path: The path to import.
Returns:
The appropriate module object.
"""
if sys.version_info < _MINIMUM_VERSION:
raise NotImplementedError(_VERSION_ERROR_TEMPLATE.format(fn_name))
else:
if not _is_grpc_tools_importable():
raise NotImplementedError(_UNINSTALLED_TEMPLATE.format(fn_name))
import grpc_tools.protoc # pytype: disable=import-error
if _has_runtime_proto_symbols(grpc_tools.protoc):
fn = getattr(grpc_tools.protoc, "_" + fn_name)
return fn(protobuf_path)
else:
raise NotImplementedError(_UNINSTALLED_TEMPLATE.format(fn_name))
def protos(protobuf_path): # pylint: disable=unused-argument
"""Returns a module generated by the indicated .proto file.
THIS IS AN EXPERIMENTAL API.
Use this function to retrieve classes corresponding to message
definitions in the .proto file.
To inspect the contents of the returned module, use the dir function.
For example:
```
protos = grpc.protos("foo.proto")
print(dir(protos))
```
The returned module object corresponds to the _pb2.py file generated
by protoc. The path is expected to be relative to an entry on sys.path
and all transitive dependencies of the file should also be resolveable
from an entry on sys.path.
To completely disable the machinery behind this function, set the
GRPC_PYTHON_DISABLE_DYNAMIC_STUBS environment variable to "true".
Args:
protobuf_path: The path to the .proto file on the filesystem. This path
must be resolveable from an entry on sys.path and so must all of its
transitive dependencies.
Returns:
A module object corresponding to the message code for the indicated
.proto file. Equivalent to a generated _pb2.py file.
"""
return _call_with_lazy_import("protos", protobuf_path)
def services(protobuf_path): # pylint: disable=unused-argument
"""Returns a module generated by the indicated .proto file.
THIS IS AN EXPERIMENTAL API.
Use this function to retrieve classes and functions corresponding to
service definitions in the .proto file, including both stub and servicer
definitions.
To inspect the contents of the returned module, use the dir function.
For example:
```
services = grpc.services("foo.proto")
print(dir(services))
```
The returned module object corresponds to the _pb2_grpc.py file generated
by protoc. The path is expected to be relative to an entry on sys.path
and all transitive dependencies of the file should also be resolveable
from an entry on sys.path.
To completely disable the machinery behind this function, set the
GRPC_PYTHON_DISABLE_DYNAMIC_STUBS environment variable to "true".
Args:
protobuf_path: The path to the .proto file on the filesystem. This path
must be resolveable from an entry on sys.path and so must all of its
transitive dependencies.
Returns:
A module object corresponding to the stub/service code for the indicated
.proto file. Equivalent to a generated _pb2_grpc.py file.
"""
return _call_with_lazy_import("services", protobuf_path)
def protos_and_services(protobuf_path): # pylint: disable=unused-argument
"""Returns a 2-tuple of modules corresponding to protos and services.
THIS IS AN EXPERIMENTAL API.
The return value of this function is equivalent to a call to protos and a
call to services.
To completely disable the machinery behind this function, set the
GRPC_PYTHON_DISABLE_DYNAMIC_STUBS environment variable to "true".
Args:
protobuf_path: The path to the .proto file on the filesystem. This path
must be resolveable from an entry on sys.path and so must all of its
transitive dependencies.
Returns:
A 2-tuple of module objects corresponding to (protos(path), services(path)).
"""
return _call_with_lazy_import("protos_and_services", protobuf_path)
| 5,810
| 34.006024
| 89
|
py
|
grpc
|
grpc-master/src/python/grpcio/grpc/_plugin_wrapping.py
|
# Copyright 2015 gRPC authors.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import collections
import logging
import threading
from typing import Callable, Optional, Type
import grpc
from grpc import _common
from grpc._cython import cygrpc
from grpc._typing import MetadataType
_LOGGER = logging.getLogger(__name__)
class _AuthMetadataContext(
collections.namedtuple(
"AuthMetadataContext",
(
"service_url",
"method_name",
),
),
grpc.AuthMetadataContext,
):
pass
class _CallbackState(object):
def __init__(self):
self.lock = threading.Lock()
self.called = False
self.exception = None
class _AuthMetadataPluginCallback(grpc.AuthMetadataPluginCallback):
_state: _CallbackState
_callback: Callable
def __init__(self, state: _CallbackState, callback: Callable):
self._state = state
self._callback = callback
def __call__(
self, metadata: MetadataType, error: Optional[Type[BaseException]]
):
with self._state.lock:
if self._state.exception is None:
if self._state.called:
raise RuntimeError(
"AuthMetadataPluginCallback invoked more than once!"
)
else:
self._state.called = True
else:
raise RuntimeError(
'AuthMetadataPluginCallback raised exception "{}"!'.format(
self._state.exception
)
)
if error is None:
self._callback(metadata, cygrpc.StatusCode.ok, None)
else:
self._callback(
None, cygrpc.StatusCode.internal, _common.encode(str(error))
)
class _Plugin(object):
_metadata_plugin: grpc.AuthMetadataPlugin
def __init__(self, metadata_plugin: grpc.AuthMetadataPlugin):
self._metadata_plugin = metadata_plugin
self._stored_ctx = None
try:
import contextvars # pylint: disable=wrong-import-position
# The plugin may be invoked on a thread created by Core, which will not
# have the context propagated. This context is stored and installed in
# the thread invoking the plugin.
self._stored_ctx = contextvars.copy_context()
except ImportError:
# Support versions predating contextvars.
pass
def __call__(self, service_url: str, method_name: str, callback: Callable):
context = _AuthMetadataContext(
_common.decode(service_url), _common.decode(method_name)
)
callback_state = _CallbackState()
try:
self._metadata_plugin(
context, _AuthMetadataPluginCallback(callback_state, callback)
)
except Exception as exception: # pylint: disable=broad-except
_LOGGER.exception(
'AuthMetadataPluginCallback "%s" raised exception!',
self._metadata_plugin,
)
with callback_state.lock:
callback_state.exception = exception
if callback_state.called:
return
callback(
None, cygrpc.StatusCode.internal, _common.encode(str(exception))
)
def metadata_plugin_call_credentials(
metadata_plugin: grpc.AuthMetadataPlugin, name: Optional[str]
) -> grpc.CallCredentials:
if name is None:
try:
effective_name = metadata_plugin.__name__
except AttributeError:
effective_name = metadata_plugin.__class__.__name__
else:
effective_name = name
return grpc.CallCredentials(
cygrpc.MetadataPluginCallCredentials(
_Plugin(metadata_plugin), _common.encode(effective_name)
)
)
| 4,382
| 30.992701
| 83
|
py
|
grpc
|
grpc-master/src/python/grpcio/grpc/_observability.py
|
# Copyright 2023 The gRPC authors.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
from __future__ import annotations
import abc
import contextlib
import logging
import threading
from typing import Any, Generator, Generic, List, Optional, TypeVar
from grpc._cython import cygrpc as _cygrpc
_LOGGER = logging.getLogger(__name__)
_channel = Any # _channel.py imports this module.
ClientCallTracerCapsule = TypeVar("ClientCallTracerCapsule")
ServerCallTracerFactoryCapsule = TypeVar("ServerCallTracerFactoryCapsule")
_plugin_lock: threading.RLock = threading.RLock()
_OBSERVABILITY_PLUGIN: Optional["ObservabilityPlugin"] = None
_SERVICES_TO_EXCLUDE: List[bytes] = [
b"google.monitoring.v3.MetricService",
b"google.devtools.cloudtrace.v2.TraceService",
]
class ObservabilityPlugin(
Generic[ClientCallTracerCapsule, ServerCallTracerFactoryCapsule],
metaclass=abc.ABCMeta,
):
"""Abstract base class for observability plugin.
*This is a semi-private class that was intended for the exclusive use of
the gRPC team.*
The ClientCallTracerCapsule and ClientCallTracerCapsule created by this
plugin should be inject to gRPC core using observability_init at the
start of a program, before any channels/servers are built.
Any future methods added to this interface cannot have the
@abc.abstractmethod annotation.
Attributes:
_stats_enabled: A bool indicates whether tracing is enabled.
_tracing_enabled: A bool indicates whether stats(metrics) is enabled.
"""
_tracing_enabled: bool = False
_stats_enabled: bool = False
@abc.abstractmethod
def create_client_call_tracer(
self, method_name: bytes
) -> ClientCallTracerCapsule:
"""Creates a ClientCallTracerCapsule.
After register the plugin, if tracing or stats is enabled, this method
will be called after a call was created, the ClientCallTracer created
by this method will be saved to call context.
The ClientCallTracer is an object which implements `grpc_core::ClientCallTracer`
interface and wrapped in a PyCapsule using `client_call_tracer` as name.
Args:
method_name: The method name of the call in byte format.
Returns:
A PyCapsule which stores a ClientCallTracer object.
"""
raise NotImplementedError()
@abc.abstractmethod
def delete_client_call_tracer(
self, client_call_tracer: ClientCallTracerCapsule
) -> None:
"""Deletes the ClientCallTracer stored in ClientCallTracerCapsule.
After register the plugin, if tracing or stats is enabled, this method
will be called at the end of the call to destroy the ClientCallTracer.
The ClientCallTracer is an object which implements `grpc_core::ClientCallTracer`
interface and wrapped in a PyCapsule using `client_call_tracer` as name.
Args:
client_call_tracer: A PyCapsule which stores a ClientCallTracer object.
"""
raise NotImplementedError()
@abc.abstractmethod
def save_trace_context(
self, trace_id: str, span_id: str, is_sampled: bool
) -> None:
"""Saves the trace_id and span_id related to the current span.
After register the plugin, if tracing is enabled, this method will be
called after the server finished sending response.
This method can be used to propagate census context.
Args:
trace_id: The identifier for the trace associated with the span as a
32-character hexadecimal encoded string,
e.g. 26ed0036f2eff2b7317bccce3e28d01f
span_id: The identifier for the span as a 16-character hexadecimal encoded
string. e.g. 113ec879e62583bc
is_sampled: A bool indicates whether the span is sampled.
"""
raise NotImplementedError()
@abc.abstractmethod
def create_server_call_tracer_factory(
self,
) -> ServerCallTracerFactoryCapsule:
"""Creates a ServerCallTracerFactoryCapsule.
After register the plugin, if tracing or stats is enabled, this method
will be called by calling observability_init, the ServerCallTracerFactory
created by this method will be registered to gRPC core.
The ServerCallTracerFactory is an object which implements
`grpc_core::ServerCallTracerFactory` interface and wrapped in a PyCapsule
using `server_call_tracer_factory` as name.
Returns:
A PyCapsule which stores a ServerCallTracerFactory object.
"""
raise NotImplementedError()
@abc.abstractmethod
def record_rpc_latency(
self, method: str, rpc_latency: float, status_code: Any
) -> None:
"""Record the latency of the RPC.
After register the plugin, if stats is enabled, this method will be
called at the end of each RPC.
Args:
method: The fully-qualified name of the RPC method being invoked.
rpc_latency: The latency for the RPC, equals to the time between
when the client invokes the RPC and when the client receives the status.
status_code: An element of grpc.StatusCode in string format representing the
final status for the RPC.
"""
raise NotImplementedError()
def set_tracing(self, enable: bool) -> None:
"""Enable or disable tracing.
Args:
enable: A bool indicates whether tracing should be enabled.
"""
self._tracing_enabled = enable
def set_stats(self, enable: bool) -> None:
"""Enable or disable stats(metrics).
Args:
enable: A bool indicates whether stats should be enabled.
"""
self._stats_enabled = enable
@property
def tracing_enabled(self) -> bool:
return self._tracing_enabled
@property
def stats_enabled(self) -> bool:
return self._stats_enabled
@property
def observability_enabled(self) -> bool:
return self.tracing_enabled or self.stats_enabled
@contextlib.contextmanager
def get_plugin() -> Generator[Optional[ObservabilityPlugin], None, None]:
"""Get the ObservabilityPlugin in _observability module.
Returns:
The ObservabilityPlugin currently registered with the _observability
module. Or None if no plugin exists at the time of calling this method.
"""
with _plugin_lock:
yield _OBSERVABILITY_PLUGIN
def set_plugin(observability_plugin: Optional[ObservabilityPlugin]) -> None:
"""Save ObservabilityPlugin to _observability module.
Args:
observability_plugin: The ObservabilityPlugin to save.
Raises:
ValueError: If an ObservabilityPlugin was already registered at the
time of calling this method.
"""
global _OBSERVABILITY_PLUGIN # pylint: disable=global-statement
with _plugin_lock:
if observability_plugin and _OBSERVABILITY_PLUGIN:
raise ValueError("observability_plugin was already set!")
_OBSERVABILITY_PLUGIN = observability_plugin
def observability_init(observability_plugin: ObservabilityPlugin) -> None:
"""Initialize observability with provided ObservabilityPlugin.
This method have to be called at the start of a program, before any
channels/servers are built.
Args:
observability_plugin: The ObservabilityPlugin to use.
Raises:
ValueError: If an ObservabilityPlugin was already registered at the
time of calling this method.
"""
set_plugin(observability_plugin)
try:
_cygrpc.set_server_call_tracer_factory(observability_plugin)
except Exception: # pylint:disable=broad-except
_LOGGER.exception("Failed to set server call tracer factory!")
def observability_deinit() -> None:
"""Clear the observability context, including ObservabilityPlugin and
ServerCallTracerFactory
This method have to be called after exit observability context so that
it's possible to re-initialize again.
"""
set_plugin(None)
_cygrpc.clear_server_call_tracer_factory()
def delete_call_tracer(client_call_tracer_capsule: Any) -> None:
"""Deletes the ClientCallTracer stored in ClientCallTracerCapsule.
This method will be called at the end of the call to destroy the ClientCallTracer.
The ClientCallTracer is an object which implements `grpc_core::ClientCallTracer`
interface and wrapped in a PyCapsule using `client_call_tracer` as the name.
Args:
client_call_tracer_capsule: A PyCapsule which stores a ClientCallTracer object.
"""
with get_plugin() as plugin:
if not (plugin and plugin.observability_enabled):
return
plugin.delete_client_call_tracer(client_call_tracer_capsule)
def maybe_record_rpc_latency(state: "_channel._RPCState") -> None:
"""Record the latency of the RPC, if the plugin is registered and stats is enabled.
This method will be called at the end of each RPC.
Args:
state: a grpc._channel._RPCState object which contains the stats related to the
RPC.
"""
# TODO(xuanwn): use channel args to exclude those metrics.
for exclude_prefix in _SERVICES_TO_EXCLUDE:
if exclude_prefix in state.method.encode("utf8"):
return
with get_plugin() as plugin:
if not (plugin and plugin.stats_enabled):
return
rpc_latency = state.rpc_end_time - state.rpc_start_time
rpc_latency_ms = rpc_latency.total_seconds() * 1000
plugin.record_rpc_latency(state.method, rpc_latency_ms, state.code)
| 10,058
| 34.419014
| 88
|
py
|
grpc
|
grpc-master/src/python/grpcio/grpc/_channel.py
|
# Copyright 2016 gRPC authors.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Invocation-side implementation of gRPC Python."""
import copy
from datetime import datetime
import functools
import logging
import os
import sys
import threading
import time
import types
from typing import (
Any,
Callable,
Iterator,
List,
Optional,
Sequence,
Set,
Tuple,
Union,
)
import grpc # pytype: disable=pyi-error
from grpc import _common # pytype: disable=pyi-error
from grpc import _compression # pytype: disable=pyi-error
from grpc import _grpcio_metadata # pytype: disable=pyi-error
from grpc import _observability # pytype: disable=pyi-error
from grpc._cython import cygrpc
from grpc._typing import ChannelArgumentType
from grpc._typing import DeserializingFunction
from grpc._typing import IntegratedCallFactory
from grpc._typing import MetadataType
from grpc._typing import NullaryCallbackType
from grpc._typing import ResponseType
from grpc._typing import SerializingFunction
from grpc._typing import UserTag
import grpc.experimental # pytype: disable=pyi-error
_LOGGER = logging.getLogger(__name__)
_USER_AGENT = "grpc-python/{}".format(_grpcio_metadata.__version__)
_EMPTY_FLAGS = 0
# NOTE(rbellevi): No guarantees are given about the maintenance of this
# environment variable.
_DEFAULT_SINGLE_THREADED_UNARY_STREAM = (
os.getenv("GRPC_SINGLE_THREADED_UNARY_STREAM") is not None
)
_UNARY_UNARY_INITIAL_DUE = (
cygrpc.OperationType.send_initial_metadata,
cygrpc.OperationType.send_message,
cygrpc.OperationType.send_close_from_client,
cygrpc.OperationType.receive_initial_metadata,
cygrpc.OperationType.receive_message,
cygrpc.OperationType.receive_status_on_client,
)
_UNARY_STREAM_INITIAL_DUE = (
cygrpc.OperationType.send_initial_metadata,
cygrpc.OperationType.send_message,
cygrpc.OperationType.send_close_from_client,
cygrpc.OperationType.receive_initial_metadata,
cygrpc.OperationType.receive_status_on_client,
)
_STREAM_UNARY_INITIAL_DUE = (
cygrpc.OperationType.send_initial_metadata,
cygrpc.OperationType.receive_initial_metadata,
cygrpc.OperationType.receive_message,
cygrpc.OperationType.receive_status_on_client,
)
_STREAM_STREAM_INITIAL_DUE = (
cygrpc.OperationType.send_initial_metadata,
cygrpc.OperationType.receive_initial_metadata,
cygrpc.OperationType.receive_status_on_client,
)
_CHANNEL_SUBSCRIPTION_CALLBACK_ERROR_LOG_MESSAGE = (
"Exception calling channel subscription callback!"
)
_OK_RENDEZVOUS_REPR_FORMAT = (
'<{} of RPC that terminated with:\n\tstatus = {}\n\tdetails = "{}"\n>'
)
_NON_OK_RENDEZVOUS_REPR_FORMAT = (
"<{} of RPC that terminated with:\n"
"\tstatus = {}\n"
'\tdetails = "{}"\n'
'\tdebug_error_string = "{}"\n'
">"
)
def _deadline(timeout: Optional[float]) -> Optional[float]:
return None if timeout is None else time.time() + timeout
def _unknown_code_details(
unknown_cygrpc_code: Optional[grpc.StatusCode], details: Optional[str]
) -> str:
return 'Server sent unknown code {} and details "{}"'.format(
unknown_cygrpc_code, details
)
class _RPCState(object):
condition: threading.Condition
due: Set[cygrpc.OperationType]
initial_metadata: Optional[MetadataType]
response: Any
trailing_metadata: Optional[MetadataType]
code: Optional[grpc.StatusCode]
details: Optional[str]
debug_error_string: Optional[str]
cancelled: bool
callbacks: List[NullaryCallbackType]
fork_epoch: Optional[int]
rpc_start_time: Optional[datetime]
rpc_end_time: Optional[datetime]
method: Optional[str]
def __init__(
self,
due: Sequence[cygrpc.OperationType],
initial_metadata: Optional[MetadataType],
trailing_metadata: Optional[MetadataType],
code: Optional[grpc.StatusCode],
details: Optional[str],
):
# `condition` guards all members of _RPCState. `notify_all` is called on
# `condition` when the state of the RPC has changed.
self.condition = threading.Condition()
# The cygrpc.OperationType objects representing events due from the RPC's
# completion queue. If an operation is in `due`, it is guaranteed that
# `operate()` has been called on a corresponding operation. But the
# converse is not true. That is, in the case of failed `operate()`
# calls, there may briefly be events in `due` that do not correspond to
# operations submitted to Core.
self.due = set(due)
self.initial_metadata = initial_metadata
self.response = None
self.trailing_metadata = trailing_metadata
self.code = code
self.details = details
self.debug_error_string = None
# The following three fields are used for observability.
# Updates to those fields do not trigger self.condition.
self.rpc_start_time = None
self.rpc_end_time = None
self.method = None
# The semantics of grpc.Future.cancel and grpc.Future.cancelled are
# slightly wonky, so they have to be tracked separately from the rest of the
# result of the RPC. This field tracks whether cancellation was requested
# prior to termination of the RPC.
self.cancelled = False
self.callbacks = []
self.fork_epoch = cygrpc.get_fork_epoch()
def reset_postfork_child(self):
self.condition = threading.Condition()
def _abort(state: _RPCState, code: grpc.StatusCode, details: str) -> None:
if state.code is None:
state.code = code
state.details = details
if state.initial_metadata is None:
state.initial_metadata = ()
state.trailing_metadata = ()
def _handle_event(
event: cygrpc.BaseEvent,
state: _RPCState,
response_deserializer: Optional[DeserializingFunction],
) -> List[NullaryCallbackType]:
callbacks = []
for batch_operation in event.batch_operations:
operation_type = batch_operation.type()
state.due.remove(operation_type)
if operation_type == cygrpc.OperationType.receive_initial_metadata:
state.initial_metadata = batch_operation.initial_metadata()
elif operation_type == cygrpc.OperationType.receive_message:
serialized_response = batch_operation.message()
if serialized_response is not None:
response = _common.deserialize(
serialized_response, response_deserializer
)
if response is None:
details = "Exception deserializing response!"
_abort(state, grpc.StatusCode.INTERNAL, details)
else:
state.response = response
elif operation_type == cygrpc.OperationType.receive_status_on_client:
state.trailing_metadata = batch_operation.trailing_metadata()
if state.code is None:
code = _common.CYGRPC_STATUS_CODE_TO_STATUS_CODE.get(
batch_operation.code()
)
if code is None:
state.code = grpc.StatusCode.UNKNOWN
state.details = _unknown_code_details(
code, batch_operation.details()
)
else:
state.code = code
state.details = batch_operation.details()
state.debug_error_string = batch_operation.error_string()
state.rpc_end_time = datetime.utcnow()
_observability.maybe_record_rpc_latency(state)
callbacks.extend(state.callbacks)
state.callbacks = None
return callbacks
def _event_handler(
state: _RPCState, response_deserializer: Optional[DeserializingFunction]
) -> UserTag:
def handle_event(event):
with state.condition:
callbacks = _handle_event(event, state, response_deserializer)
state.condition.notify_all()
done = not state.due
for callback in callbacks:
try:
callback()
except Exception as e: # pylint: disable=broad-except
# NOTE(rbellevi): We suppress but log errors here so as not to
# kill the channel spin thread.
logging.error(
"Exception in callback %s: %s", repr(callback.func), repr(e)
)
return done and state.fork_epoch >= cygrpc.get_fork_epoch()
return handle_event
# TODO(xuanwn): Create a base class for IntegratedCall and SegregatedCall.
# pylint: disable=too-many-statements
def _consume_request_iterator(
request_iterator: Iterator,
state: _RPCState,
call: Union[cygrpc.IntegratedCall, cygrpc.SegregatedCall],
request_serializer: SerializingFunction,
event_handler: Optional[UserTag],
) -> None:
"""Consume a request supplied by the user."""
def consume_request_iterator(): # pylint: disable=too-many-branches
# Iterate over the request iterator until it is exhausted or an error
# condition is encountered.
while True:
return_from_user_request_generator_invoked = False
try:
# The thread may die in user-code. Do not block fork for this.
cygrpc.enter_user_request_generator()
request = next(request_iterator)
except StopIteration:
break
except Exception: # pylint: disable=broad-except
cygrpc.return_from_user_request_generator()
return_from_user_request_generator_invoked = True
code = grpc.StatusCode.UNKNOWN
details = "Exception iterating requests!"
_LOGGER.exception(details)
call.cancel(
_common.STATUS_CODE_TO_CYGRPC_STATUS_CODE[code], details
)
_abort(state, code, details)
return
finally:
if not return_from_user_request_generator_invoked:
cygrpc.return_from_user_request_generator()
serialized_request = _common.serialize(request, request_serializer)
with state.condition:
if state.code is None and not state.cancelled:
if serialized_request is None:
code = grpc.StatusCode.INTERNAL
details = "Exception serializing request!"
call.cancel(
_common.STATUS_CODE_TO_CYGRPC_STATUS_CODE[code],
details,
)
_abort(state, code, details)
return
else:
state.due.add(cygrpc.OperationType.send_message)
operations = (
cygrpc.SendMessageOperation(
serialized_request, _EMPTY_FLAGS
),
)
operating = call.operate(operations, event_handler)
if not operating:
state.due.remove(cygrpc.OperationType.send_message)
return
def _done():
return (
state.code is not None
or cygrpc.OperationType.send_message
not in state.due
)
_common.wait(
state.condition.wait,
_done,
spin_cb=functools.partial(
cygrpc.block_if_fork_in_progress, state
),
)
if state.code is not None:
return
else:
return
with state.condition:
if state.code is None:
state.due.add(cygrpc.OperationType.send_close_from_client)
operations = (
cygrpc.SendCloseFromClientOperation(_EMPTY_FLAGS),
)
operating = call.operate(operations, event_handler)
if not operating:
state.due.remove(
cygrpc.OperationType.send_close_from_client
)
consumption_thread = cygrpc.ForkManagedThread(
target=consume_request_iterator
)
consumption_thread.setDaemon(True)
consumption_thread.start()
def _rpc_state_string(class_name: str, rpc_state: _RPCState) -> str:
"""Calculates error string for RPC."""
with rpc_state.condition:
if rpc_state.code is None:
return "<{} object>".format(class_name)
elif rpc_state.code is grpc.StatusCode.OK:
return _OK_RENDEZVOUS_REPR_FORMAT.format(
class_name, rpc_state.code, rpc_state.details
)
else:
return _NON_OK_RENDEZVOUS_REPR_FORMAT.format(
class_name,
rpc_state.code,
rpc_state.details,
rpc_state.debug_error_string,
)
class _InactiveRpcError(grpc.RpcError, grpc.Call, grpc.Future):
"""An RPC error not tied to the execution of a particular RPC.
The RPC represented by the state object must not be in-progress or
cancelled.
Attributes:
_state: An instance of _RPCState.
"""
_state: _RPCState
def __init__(self, state: _RPCState):
with state.condition:
self._state = _RPCState(
(),
copy.deepcopy(state.initial_metadata),
copy.deepcopy(state.trailing_metadata),
state.code,
copy.deepcopy(state.details),
)
self._state.response = copy.copy(state.response)
self._state.debug_error_string = copy.copy(state.debug_error_string)
def initial_metadata(self) -> Optional[MetadataType]:
return self._state.initial_metadata
def trailing_metadata(self) -> Optional[MetadataType]:
return self._state.trailing_metadata
def code(self) -> Optional[grpc.StatusCode]:
return self._state.code
def details(self) -> Optional[str]:
return _common.decode(self._state.details)
def debug_error_string(self) -> Optional[str]:
return _common.decode(self._state.debug_error_string)
def _repr(self) -> str:
return _rpc_state_string(self.__class__.__name__, self._state)
def __repr__(self) -> str:
return self._repr()
def __str__(self) -> str:
return self._repr()
def cancel(self) -> bool:
"""See grpc.Future.cancel."""
return False
def cancelled(self) -> bool:
"""See grpc.Future.cancelled."""
return False
def running(self) -> bool:
"""See grpc.Future.running."""
return False
def done(self) -> bool:
"""See grpc.Future.done."""
return True
def result(
self, timeout: Optional[float] = None
) -> Any: # pylint: disable=unused-argument
"""See grpc.Future.result."""
raise self
def exception(
self, timeout: Optional[float] = None # pylint: disable=unused-argument
) -> Optional[Exception]:
"""See grpc.Future.exception."""
return self
def traceback(
self, timeout: Optional[float] = None # pylint: disable=unused-argument
) -> Optional[types.TracebackType]:
"""See grpc.Future.traceback."""
try:
raise self
except grpc.RpcError:
return sys.exc_info()[2]
def add_done_callback(
self,
fn: Callable[[grpc.Future], None],
timeout: Optional[float] = None, # pylint: disable=unused-argument
) -> None:
"""See grpc.Future.add_done_callback."""
fn(self)
class _Rendezvous(grpc.RpcError, grpc.RpcContext):
"""An RPC iterator.
Attributes:
_state: An instance of _RPCState.
_call: An instance of SegregatedCall or IntegratedCall.
In either case, the _call object is expected to have operate, cancel,
and next_event methods.
_response_deserializer: A callable taking bytes and return a Python
object.
_deadline: A float representing the deadline of the RPC in seconds. Or
possibly None, to represent an RPC with no deadline at all.
"""
_state: _RPCState
_call: Union[cygrpc.SegregatedCall, cygrpc.IntegratedCall]
_response_deserializer: Optional[DeserializingFunction]
_deadline: Optional[float]
def __init__(
self,
state: _RPCState,
call: Union[cygrpc.SegregatedCall, cygrpc.IntegratedCall],
response_deserializer: Optional[DeserializingFunction],
deadline: Optional[float],
):
super(_Rendezvous, self).__init__()
self._state = state
self._call = call
self._response_deserializer = response_deserializer
self._deadline = deadline
def is_active(self) -> bool:
"""See grpc.RpcContext.is_active"""
with self._state.condition:
return self._state.code is None
def time_remaining(self) -> Optional[float]:
"""See grpc.RpcContext.time_remaining"""
with self._state.condition:
if self._deadline is None:
return None
else:
return max(self._deadline - time.time(), 0)
def cancel(self) -> bool:
"""See grpc.RpcContext.cancel"""
with self._state.condition:
if self._state.code is None:
code = grpc.StatusCode.CANCELLED
details = "Locally cancelled by application!"
self._call.cancel(
_common.STATUS_CODE_TO_CYGRPC_STATUS_CODE[code], details
)
self._state.cancelled = True
_abort(self._state, code, details)
self._state.condition.notify_all()
return True
else:
return False
def add_callback(self, callback: NullaryCallbackType) -> bool:
"""See grpc.RpcContext.add_callback"""
with self._state.condition:
if self._state.callbacks is None:
return False
else:
self._state.callbacks.append(callback)
return True
def __iter__(self):
return self
def next(self):
return self._next()
def __next__(self):
return self._next()
def _next(self):
raise NotImplementedError()
def debug_error_string(self) -> Optional[str]:
raise NotImplementedError()
def _repr(self) -> str:
return _rpc_state_string(self.__class__.__name__, self._state)
def __repr__(self) -> str:
return self._repr()
def __str__(self) -> str:
return self._repr()
def __del__(self) -> None:
with self._state.condition:
if self._state.code is None:
self._state.code = grpc.StatusCode.CANCELLED
self._state.details = "Cancelled upon garbage collection!"
self._state.cancelled = True
self._call.cancel(
_common.STATUS_CODE_TO_CYGRPC_STATUS_CODE[self._state.code],
self._state.details,
)
self._state.condition.notify_all()
class _SingleThreadedRendezvous(
_Rendezvous, grpc.Call, grpc.Future
): # pylint: disable=too-many-ancestors
"""An RPC iterator operating entirely on a single thread.
The __next__ method of _SingleThreadedRendezvous does not depend on the
existence of any other thread, including the "channel spin thread".
However, this means that its interface is entirely synchronous. So this
class cannot completely fulfill the grpc.Future interface. The result,
exception, and traceback methods will never block and will instead raise
an exception if calling the method would result in blocking.
This means that these methods are safe to call from add_done_callback
handlers.
"""
_state: _RPCState
def _is_complete(self) -> bool:
return self._state.code is not None
def cancelled(self) -> bool:
with self._state.condition:
return self._state.cancelled
def running(self) -> bool:
with self._state.condition:
return self._state.code is None
def done(self) -> bool:
with self._state.condition:
return self._state.code is not None
def result(self, timeout: Optional[float] = None) -> Any:
"""Returns the result of the computation or raises its exception.
This method will never block. Instead, it will raise an exception
if calling this method would otherwise result in blocking.
Since this method will never block, any `timeout` argument passed will
be ignored.
"""
del timeout
with self._state.condition:
if not self._is_complete():
raise grpc.experimental.UsageError(
"_SingleThreadedRendezvous only supports result() when the"
" RPC is complete."
)
if self._state.code is grpc.StatusCode.OK:
return self._state.response
elif self._state.cancelled:
raise grpc.FutureCancelledError()
else:
raise self
def exception(self, timeout: Optional[float] = None) -> Optional[Exception]:
"""Return the exception raised by the computation.
This method will never block. Instead, it will raise an exception
if calling this method would otherwise result in blocking.
Since this method will never block, any `timeout` argument passed will
be ignored.
"""
del timeout
with self._state.condition:
if not self._is_complete():
raise grpc.experimental.UsageError(
"_SingleThreadedRendezvous only supports exception() when"
" the RPC is complete."
)
if self._state.code is grpc.StatusCode.OK:
return None
elif self._state.cancelled:
raise grpc.FutureCancelledError()
else:
return self
def traceback(
self, timeout: Optional[float] = None
) -> Optional[types.TracebackType]:
"""Access the traceback of the exception raised by the computation.
This method will never block. Instead, it will raise an exception
if calling this method would otherwise result in blocking.
Since this method will never block, any `timeout` argument passed will
be ignored.
"""
del timeout
with self._state.condition:
if not self._is_complete():
raise grpc.experimental.UsageError(
"_SingleThreadedRendezvous only supports traceback() when"
" the RPC is complete."
)
if self._state.code is grpc.StatusCode.OK:
return None
elif self._state.cancelled:
raise grpc.FutureCancelledError()
else:
try:
raise self
except grpc.RpcError:
return sys.exc_info()[2]
def add_done_callback(self, fn: Callable[[grpc.Future], None]) -> None:
with self._state.condition:
if self._state.code is None:
self._state.callbacks.append(functools.partial(fn, self))
return
fn(self)
def initial_metadata(self) -> Optional[MetadataType]:
"""See grpc.Call.initial_metadata"""
with self._state.condition:
# NOTE(gnossen): Based on our initial call batch, we are guaranteed
# to receive initial metadata before any messages.
while self._state.initial_metadata is None:
self._consume_next_event()
return self._state.initial_metadata
def trailing_metadata(self) -> Optional[MetadataType]:
"""See grpc.Call.trailing_metadata"""
with self._state.condition:
if self._state.trailing_metadata is None:
raise grpc.experimental.UsageError(
"Cannot get trailing metadata until RPC is completed."
)
return self._state.trailing_metadata
def code(self) -> Optional[grpc.StatusCode]:
"""See grpc.Call.code"""
with self._state.condition:
if self._state.code is None:
raise grpc.experimental.UsageError(
"Cannot get code until RPC is completed."
)
return self._state.code
def details(self) -> Optional[str]:
"""See grpc.Call.details"""
with self._state.condition:
if self._state.details is None:
raise grpc.experimental.UsageError(
"Cannot get details until RPC is completed."
)
return _common.decode(self._state.details)
def _consume_next_event(self) -> Optional[cygrpc.BaseEvent]:
event = self._call.next_event()
with self._state.condition:
callbacks = _handle_event(
event, self._state, self._response_deserializer
)
for callback in callbacks:
# NOTE(gnossen): We intentionally allow exceptions to bubble up
# to the user when running on a single thread.
callback()
return event
def _next_response(self) -> Any:
while True:
self._consume_next_event()
with self._state.condition:
if self._state.response is not None:
response = self._state.response
self._state.response = None
return response
elif (
cygrpc.OperationType.receive_message not in self._state.due
):
if self._state.code is grpc.StatusCode.OK:
raise StopIteration()
elif self._state.code is not None:
raise self
def _next(self) -> Any:
with self._state.condition:
if self._state.code is None:
# We tentatively add the operation as expected and remove
# it if the enqueue operation fails. This allows us to guarantee that
# if an event has been submitted to the core completion queue,
# it is in `due`. If we waited until after a successful
# enqueue operation then a signal could interrupt this
# thread between the enqueue operation and the addition of the
# operation to `due`. This would cause an exception on the
# channel spin thread when the operation completes and no
# corresponding operation would be present in state.due.
# Note that, since `condition` is held through this block, there is
# no data race on `due`.
self._state.due.add(cygrpc.OperationType.receive_message)
operating = self._call.operate(
(cygrpc.ReceiveMessageOperation(_EMPTY_FLAGS),), None
)
if not operating:
self._state.due.remove(cygrpc.OperationType.receive_message)
elif self._state.code is grpc.StatusCode.OK:
raise StopIteration()
else:
raise self
return self._next_response()
def debug_error_string(self) -> Optional[str]:
with self._state.condition:
if self._state.debug_error_string is None:
raise grpc.experimental.UsageError(
"Cannot get debug error string until RPC is completed."
)
return _common.decode(self._state.debug_error_string)
class _MultiThreadedRendezvous(
_Rendezvous, grpc.Call, grpc.Future
): # pylint: disable=too-many-ancestors
"""An RPC iterator that depends on a channel spin thread.
This iterator relies upon a per-channel thread running in the background,
dequeueing events from the completion queue, and notifying threads waiting
on the threading.Condition object in the _RPCState object.
This extra thread allows _MultiThreadedRendezvous to fulfill the grpc.Future interface
and to mediate a bidirection streaming RPC.
"""
_state: _RPCState
def initial_metadata(self) -> Optional[MetadataType]:
"""See grpc.Call.initial_metadata"""
with self._state.condition:
def _done():
return self._state.initial_metadata is not None
_common.wait(self._state.condition.wait, _done)
return self._state.initial_metadata
def trailing_metadata(self) -> Optional[MetadataType]:
"""See grpc.Call.trailing_metadata"""
with self._state.condition:
def _done():
return self._state.trailing_metadata is not None
_common.wait(self._state.condition.wait, _done)
return self._state.trailing_metadata
def code(self) -> Optional[grpc.StatusCode]:
"""See grpc.Call.code"""
with self._state.condition:
def _done():
return self._state.code is not None
_common.wait(self._state.condition.wait, _done)
return self._state.code
def details(self) -> Optional[str]:
"""See grpc.Call.details"""
with self._state.condition:
def _done():
return self._state.details is not None
_common.wait(self._state.condition.wait, _done)
return _common.decode(self._state.details)
def debug_error_string(self) -> Optional[str]:
with self._state.condition:
def _done():
return self._state.debug_error_string is not None
_common.wait(self._state.condition.wait, _done)
return _common.decode(self._state.debug_error_string)
def cancelled(self) -> bool:
with self._state.condition:
return self._state.cancelled
def running(self) -> bool:
with self._state.condition:
return self._state.code is None
def done(self) -> bool:
with self._state.condition:
return self._state.code is not None
def _is_complete(self) -> bool:
return self._state.code is not None
def result(self, timeout: Optional[float] = None) -> Any:
"""Returns the result of the computation or raises its exception.
See grpc.Future.result for the full API contract.
"""
with self._state.condition:
timed_out = _common.wait(
self._state.condition.wait, self._is_complete, timeout=timeout
)
if timed_out:
raise grpc.FutureTimeoutError()
else:
if self._state.code is grpc.StatusCode.OK:
return self._state.response
elif self._state.cancelled:
raise grpc.FutureCancelledError()
else:
raise self
def exception(self, timeout: Optional[float] = None) -> Optional[Exception]:
"""Return the exception raised by the computation.
See grpc.Future.exception for the full API contract.
"""
with self._state.condition:
timed_out = _common.wait(
self._state.condition.wait, self._is_complete, timeout=timeout
)
if timed_out:
raise grpc.FutureTimeoutError()
else:
if self._state.code is grpc.StatusCode.OK:
return None
elif self._state.cancelled:
raise grpc.FutureCancelledError()
else:
return self
def traceback(
self, timeout: Optional[float] = None
) -> Optional[types.TracebackType]:
"""Access the traceback of the exception raised by the computation.
See grpc.future.traceback for the full API contract.
"""
with self._state.condition:
timed_out = _common.wait(
self._state.condition.wait, self._is_complete, timeout=timeout
)
if timed_out:
raise grpc.FutureTimeoutError()
else:
if self._state.code is grpc.StatusCode.OK:
return None
elif self._state.cancelled:
raise grpc.FutureCancelledError()
else:
try:
raise self
except grpc.RpcError:
return sys.exc_info()[2]
def add_done_callback(self, fn: Callable[[grpc.Future], None]) -> None:
with self._state.condition:
if self._state.code is None:
self._state.callbacks.append(functools.partial(fn, self))
return
fn(self)
def _next(self) -> Any:
with self._state.condition:
if self._state.code is None:
event_handler = _event_handler(
self._state, self._response_deserializer
)
self._state.due.add(cygrpc.OperationType.receive_message)
operating = self._call.operate(
(cygrpc.ReceiveMessageOperation(_EMPTY_FLAGS),),
event_handler,
)
if not operating:
self._state.due.remove(cygrpc.OperationType.receive_message)
elif self._state.code is grpc.StatusCode.OK:
raise StopIteration()
else:
raise self
def _response_ready():
return self._state.response is not None or (
cygrpc.OperationType.receive_message not in self._state.due
and self._state.code is not None
)
_common.wait(self._state.condition.wait, _response_ready)
if self._state.response is not None:
response = self._state.response
self._state.response = None
return response
elif cygrpc.OperationType.receive_message not in self._state.due:
if self._state.code is grpc.StatusCode.OK:
raise StopIteration()
elif self._state.code is not None:
raise self
def _start_unary_request(
request: Any,
timeout: Optional[float],
request_serializer: SerializingFunction,
) -> Tuple[Optional[float], Optional[bytes], Optional[grpc.RpcError]]:
deadline = _deadline(timeout)
serialized_request = _common.serialize(request, request_serializer)
if serialized_request is None:
state = _RPCState(
(),
(),
(),
grpc.StatusCode.INTERNAL,
"Exception serializing request!",
)
error = _InactiveRpcError(state)
return deadline, None, error
else:
return deadline, serialized_request, None
def _end_unary_response_blocking(
state: _RPCState,
call: cygrpc.SegregatedCall,
with_call: bool,
deadline: Optional[float],
) -> Union[ResponseType, Tuple[ResponseType, grpc.Call]]:
if state.code is grpc.StatusCode.OK:
if with_call:
rendezvous = _MultiThreadedRendezvous(state, call, None, deadline)
return state.response, rendezvous
else:
return state.response
else:
raise _InactiveRpcError(state) # pytype: disable=not-instantiable
def _stream_unary_invocation_operations(
metadata: Optional[MetadataType], initial_metadata_flags: int
) -> Sequence[Sequence[cygrpc.Operation]]:
return (
(
cygrpc.SendInitialMetadataOperation(
metadata, initial_metadata_flags
),
cygrpc.ReceiveMessageOperation(_EMPTY_FLAGS),
cygrpc.ReceiveStatusOnClientOperation(_EMPTY_FLAGS),
),
(cygrpc.ReceiveInitialMetadataOperation(_EMPTY_FLAGS),),
)
def _stream_unary_invocation_operations_and_tags(
metadata: Optional[MetadataType], initial_metadata_flags: int
) -> Sequence[Tuple[Sequence[cygrpc.Operation], Optional[UserTag]]]:
return tuple(
(
operations,
None,
)
for operations in _stream_unary_invocation_operations(
metadata, initial_metadata_flags
)
)
def _determine_deadline(user_deadline: Optional[float]) -> Optional[float]:
parent_deadline = cygrpc.get_deadline_from_context()
if parent_deadline is None and user_deadline is None:
return None
elif parent_deadline is not None and user_deadline is None:
return parent_deadline
elif user_deadline is not None and parent_deadline is None:
return user_deadline
else:
return min(parent_deadline, user_deadline)
class _UnaryUnaryMultiCallable(grpc.UnaryUnaryMultiCallable):
_channel: cygrpc.Channel
_managed_call: IntegratedCallFactory
_method: bytes
_request_serializer: Optional[SerializingFunction]
_response_deserializer: Optional[DeserializingFunction]
_context: Any
# pylint: disable=too-many-arguments
def __init__(
self,
channel: cygrpc.Channel,
managed_call: IntegratedCallFactory,
method: bytes,
request_serializer: Optional[SerializingFunction],
response_deserializer: Optional[DeserializingFunction],
):
self._channel = channel
self._managed_call = managed_call
self._method = method
self._request_serializer = request_serializer
self._response_deserializer = response_deserializer
self._context = cygrpc.build_census_context()
def _prepare(
self,
request: Any,
timeout: Optional[float],
metadata: Optional[MetadataType],
wait_for_ready: Optional[bool],
compression: Optional[grpc.Compression],
) -> Tuple[
Optional[_RPCState],
Optional[Sequence[cygrpc.Operation]],
Optional[float],
Optional[grpc.RpcError],
]:
deadline, serialized_request, rendezvous = _start_unary_request(
request, timeout, self._request_serializer
)
initial_metadata_flags = _InitialMetadataFlags().with_wait_for_ready(
wait_for_ready
)
augmented_metadata = _compression.augment_metadata(
metadata, compression
)
if serialized_request is None:
return None, None, None, rendezvous
else:
state = _RPCState(_UNARY_UNARY_INITIAL_DUE, None, None, None, None)
operations = (
cygrpc.SendInitialMetadataOperation(
augmented_metadata, initial_metadata_flags
),
cygrpc.SendMessageOperation(serialized_request, _EMPTY_FLAGS),
cygrpc.SendCloseFromClientOperation(_EMPTY_FLAGS),
cygrpc.ReceiveInitialMetadataOperation(_EMPTY_FLAGS),
cygrpc.ReceiveMessageOperation(_EMPTY_FLAGS),
cygrpc.ReceiveStatusOnClientOperation(_EMPTY_FLAGS),
)
return state, operations, deadline, None
def _blocking(
self,
request: Any,
timeout: Optional[float] = None,
metadata: Optional[MetadataType] = None,
credentials: Optional[grpc.CallCredentials] = None,
wait_for_ready: Optional[bool] = None,
compression: Optional[grpc.Compression] = None,
) -> Tuple[_RPCState, cygrpc.SegregatedCall]:
state, operations, deadline, rendezvous = self._prepare(
request, timeout, metadata, wait_for_ready, compression
)
if state is None:
raise rendezvous # pylint: disable-msg=raising-bad-type
else:
state.rpc_start_time = datetime.utcnow()
state.method = _common.decode(self._method)
call = self._channel.segregated_call(
cygrpc.PropagationConstants.GRPC_PROPAGATE_DEFAULTS,
self._method,
None,
_determine_deadline(deadline),
metadata,
None if credentials is None else credentials._credentials,
(
(
operations,
None,
),
),
self._context,
)
event = call.next_event()
_handle_event(event, state, self._response_deserializer)
return state, call
def __call__(
self,
request: Any,
timeout: Optional[float] = None,
metadata: Optional[MetadataType] = None,
credentials: Optional[grpc.CallCredentials] = None,
wait_for_ready: Optional[bool] = None,
compression: Optional[grpc.Compression] = None,
) -> Any:
(
state,
call,
) = self._blocking(
request, timeout, metadata, credentials, wait_for_ready, compression
)
return _end_unary_response_blocking(state, call, False, None)
def with_call(
self,
request: Any,
timeout: Optional[float] = None,
metadata: Optional[MetadataType] = None,
credentials: Optional[grpc.CallCredentials] = None,
wait_for_ready: Optional[bool] = None,
compression: Optional[grpc.Compression] = None,
) -> Tuple[Any, grpc.Call]:
(
state,
call,
) = self._blocking(
request, timeout, metadata, credentials, wait_for_ready, compression
)
return _end_unary_response_blocking(state, call, True, None)
def future(
self,
request: Any,
timeout: Optional[float] = None,
metadata: Optional[MetadataType] = None,
credentials: Optional[grpc.CallCredentials] = None,
wait_for_ready: Optional[bool] = None,
compression: Optional[grpc.Compression] = None,
) -> _MultiThreadedRendezvous:
state, operations, deadline, rendezvous = self._prepare(
request, timeout, metadata, wait_for_ready, compression
)
if state is None:
raise rendezvous # pylint: disable-msg=raising-bad-type
else:
event_handler = _event_handler(state, self._response_deserializer)
state.rpc_start_time = datetime.utcnow()
state.method = _common.decode(self._method)
call = self._managed_call(
cygrpc.PropagationConstants.GRPC_PROPAGATE_DEFAULTS,
self._method,
None,
deadline,
metadata,
None if credentials is None else credentials._credentials,
(operations,),
event_handler,
self._context,
)
return _MultiThreadedRendezvous(
state, call, self._response_deserializer, deadline
)
class _SingleThreadedUnaryStreamMultiCallable(grpc.UnaryStreamMultiCallable):
_channel: cygrpc.Channel
_method: bytes
_request_serializer: Optional[SerializingFunction]
_response_deserializer: Optional[DeserializingFunction]
_context: Any
# pylint: disable=too-many-arguments
def __init__(
self,
channel: cygrpc.Channel,
method: bytes,
request_serializer: SerializingFunction,
response_deserializer: DeserializingFunction,
):
self._channel = channel
self._method = method
self._request_serializer = request_serializer
self._response_deserializer = response_deserializer
self._context = cygrpc.build_census_context()
def __call__( # pylint: disable=too-many-locals
self,
request: Any,
timeout: Optional[float] = None,
metadata: Optional[MetadataType] = None,
credentials: Optional[grpc.CallCredentials] = None,
wait_for_ready: Optional[bool] = None,
compression: Optional[grpc.Compression] = None,
) -> _SingleThreadedRendezvous:
deadline = _deadline(timeout)
serialized_request = _common.serialize(
request, self._request_serializer
)
if serialized_request is None:
state = _RPCState(
(),
(),
(),
grpc.StatusCode.INTERNAL,
"Exception serializing request!",
)
raise _InactiveRpcError(state)
state = _RPCState(_UNARY_STREAM_INITIAL_DUE, None, None, None, None)
call_credentials = (
None if credentials is None else credentials._credentials
)
initial_metadata_flags = _InitialMetadataFlags().with_wait_for_ready(
wait_for_ready
)
augmented_metadata = _compression.augment_metadata(
metadata, compression
)
operations = (
(
cygrpc.SendInitialMetadataOperation(
augmented_metadata, initial_metadata_flags
),
cygrpc.SendMessageOperation(serialized_request, _EMPTY_FLAGS),
cygrpc.SendCloseFromClientOperation(_EMPTY_FLAGS),
),
(cygrpc.ReceiveStatusOnClientOperation(_EMPTY_FLAGS),),
(cygrpc.ReceiveInitialMetadataOperation(_EMPTY_FLAGS),),
)
operations_and_tags = tuple((ops, None) for ops in operations)
state.rpc_start_time = datetime.utcnow()
state.method = _common.decode(self._method)
call = self._channel.segregated_call(
cygrpc.PropagationConstants.GRPC_PROPAGATE_DEFAULTS,
self._method,
None,
_determine_deadline(deadline),
metadata,
call_credentials,
operations_and_tags,
self._context,
)
return _SingleThreadedRendezvous(
state, call, self._response_deserializer, deadline
)
class _UnaryStreamMultiCallable(grpc.UnaryStreamMultiCallable):
_channel: cygrpc.Channel
_managed_call: IntegratedCallFactory
_method: bytes
_request_serializer: Optional[SerializingFunction]
_response_deserializer: Optional[DeserializingFunction]
_context: Any
# pylint: disable=too-many-arguments
def __init__(
self,
channel: cygrpc.Channel,
managed_call: IntegratedCallFactory,
method: bytes,
request_serializer: SerializingFunction,
response_deserializer: DeserializingFunction,
):
self._channel = channel
self._managed_call = managed_call
self._method = method
self._request_serializer = request_serializer
self._response_deserializer = response_deserializer
self._context = cygrpc.build_census_context()
def __call__( # pylint: disable=too-many-locals
self,
request: Any,
timeout: Optional[float] = None,
metadata: Optional[MetadataType] = None,
credentials: Optional[grpc.CallCredentials] = None,
wait_for_ready: Optional[bool] = None,
compression: Optional[grpc.Compression] = None,
) -> _MultiThreadedRendezvous:
deadline, serialized_request, rendezvous = _start_unary_request(
request, timeout, self._request_serializer
)
initial_metadata_flags = _InitialMetadataFlags().with_wait_for_ready(
wait_for_ready
)
if serialized_request is None:
raise rendezvous # pylint: disable-msg=raising-bad-type
else:
augmented_metadata = _compression.augment_metadata(
metadata, compression
)
state = _RPCState(_UNARY_STREAM_INITIAL_DUE, None, None, None, None)
operations = (
(
cygrpc.SendInitialMetadataOperation(
augmented_metadata, initial_metadata_flags
),
cygrpc.SendMessageOperation(
serialized_request, _EMPTY_FLAGS
),
cygrpc.SendCloseFromClientOperation(_EMPTY_FLAGS),
cygrpc.ReceiveStatusOnClientOperation(_EMPTY_FLAGS),
),
(cygrpc.ReceiveInitialMetadataOperation(_EMPTY_FLAGS),),
)
state.rpc_start_time = datetime.utcnow()
state.method = _common.decode(self._method)
call = self._managed_call(
cygrpc.PropagationConstants.GRPC_PROPAGATE_DEFAULTS,
self._method,
None,
_determine_deadline(deadline),
metadata,
None if credentials is None else credentials._credentials,
operations,
_event_handler(state, self._response_deserializer),
self._context,
)
return _MultiThreadedRendezvous(
state, call, self._response_deserializer, deadline
)
class _StreamUnaryMultiCallable(grpc.StreamUnaryMultiCallable):
_channel: cygrpc.Channel
_managed_call: IntegratedCallFactory
_method: bytes
_request_serializer: Optional[SerializingFunction]
_response_deserializer: Optional[DeserializingFunction]
_context: Any
# pylint: disable=too-many-arguments
def __init__(
self,
channel: cygrpc.Channel,
managed_call: IntegratedCallFactory,
method: bytes,
request_serializer: Optional[SerializingFunction],
response_deserializer: Optional[DeserializingFunction],
):
self._channel = channel
self._managed_call = managed_call
self._method = method
self._request_serializer = request_serializer
self._response_deserializer = response_deserializer
self._context = cygrpc.build_census_context()
def _blocking(
self,
request_iterator: Iterator,
timeout: Optional[float],
metadata: Optional[MetadataType],
credentials: Optional[grpc.CallCredentials],
wait_for_ready: Optional[bool],
compression: Optional[grpc.Compression],
) -> Tuple[_RPCState, cygrpc.SegregatedCall]:
deadline = _deadline(timeout)
state = _RPCState(_STREAM_UNARY_INITIAL_DUE, None, None, None, None)
initial_metadata_flags = _InitialMetadataFlags().with_wait_for_ready(
wait_for_ready
)
augmented_metadata = _compression.augment_metadata(
metadata, compression
)
state.rpc_start_time = datetime.utcnow()
state.method = _common.decode(self._method)
call = self._channel.segregated_call(
cygrpc.PropagationConstants.GRPC_PROPAGATE_DEFAULTS,
self._method,
None,
_determine_deadline(deadline),
augmented_metadata,
None if credentials is None else credentials._credentials,
_stream_unary_invocation_operations_and_tags(
augmented_metadata, initial_metadata_flags
),
self._context,
)
_consume_request_iterator(
request_iterator, state, call, self._request_serializer, None
)
while True:
event = call.next_event()
with state.condition:
_handle_event(event, state, self._response_deserializer)
state.condition.notify_all()
if not state.due:
break
return state, call
def __call__(
self,
request_iterator: Iterator,
timeout: Optional[float] = None,
metadata: Optional[MetadataType] = None,
credentials: Optional[grpc.CallCredentials] = None,
wait_for_ready: Optional[bool] = None,
compression: Optional[grpc.Compression] = None,
) -> Any:
(
state,
call,
) = self._blocking(
request_iterator,
timeout,
metadata,
credentials,
wait_for_ready,
compression,
)
return _end_unary_response_blocking(state, call, False, None)
def with_call(
self,
request_iterator: Iterator,
timeout: Optional[float] = None,
metadata: Optional[MetadataType] = None,
credentials: Optional[grpc.CallCredentials] = None,
wait_for_ready: Optional[bool] = None,
compression: Optional[grpc.Compression] = None,
) -> Tuple[Any, grpc.Call]:
(
state,
call,
) = self._blocking(
request_iterator,
timeout,
metadata,
credentials,
wait_for_ready,
compression,
)
return _end_unary_response_blocking(state, call, True, None)
def future(
self,
request_iterator: Iterator,
timeout: Optional[float] = None,
metadata: Optional[MetadataType] = None,
credentials: Optional[grpc.CallCredentials] = None,
wait_for_ready: Optional[bool] = None,
compression: Optional[grpc.Compression] = None,
) -> _MultiThreadedRendezvous:
deadline = _deadline(timeout)
state = _RPCState(_STREAM_UNARY_INITIAL_DUE, None, None, None, None)
event_handler = _event_handler(state, self._response_deserializer)
initial_metadata_flags = _InitialMetadataFlags().with_wait_for_ready(
wait_for_ready
)
augmented_metadata = _compression.augment_metadata(
metadata, compression
)
state.rpc_start_time = datetime.utcnow()
state.method = _common.decode(self._method)
call = self._managed_call(
cygrpc.PropagationConstants.GRPC_PROPAGATE_DEFAULTS,
self._method,
None,
deadline,
augmented_metadata,
None if credentials is None else credentials._credentials,
_stream_unary_invocation_operations(
metadata, initial_metadata_flags
),
event_handler,
self._context,
)
_consume_request_iterator(
request_iterator,
state,
call,
self._request_serializer,
event_handler,
)
return _MultiThreadedRendezvous(
state, call, self._response_deserializer, deadline
)
class _StreamStreamMultiCallable(grpc.StreamStreamMultiCallable):
_channel: cygrpc.Channel
_managed_call: IntegratedCallFactory
_method: bytes
_request_serializer: Optional[SerializingFunction]
_response_deserializer: Optional[DeserializingFunction]
_context: Any
# pylint: disable=too-many-arguments
def __init__(
self,
channel: cygrpc.Channel,
managed_call: IntegratedCallFactory,
method: bytes,
request_serializer: Optional[SerializingFunction] = None,
response_deserializer: Optional[DeserializingFunction] = None,
):
self._channel = channel
self._managed_call = managed_call
self._method = method
self._request_serializer = request_serializer
self._response_deserializer = response_deserializer
self._context = cygrpc.build_census_context()
def __call__(
self,
request_iterator: Iterator,
timeout: Optional[float] = None,
metadata: Optional[MetadataType] = None,
credentials: Optional[grpc.CallCredentials] = None,
wait_for_ready: Optional[bool] = None,
compression: Optional[grpc.Compression] = None,
) -> _MultiThreadedRendezvous:
deadline = _deadline(timeout)
state = _RPCState(_STREAM_STREAM_INITIAL_DUE, None, None, None, None)
initial_metadata_flags = _InitialMetadataFlags().with_wait_for_ready(
wait_for_ready
)
augmented_metadata = _compression.augment_metadata(
metadata, compression
)
operations = (
(
cygrpc.SendInitialMetadataOperation(
augmented_metadata, initial_metadata_flags
),
cygrpc.ReceiveStatusOnClientOperation(_EMPTY_FLAGS),
),
(cygrpc.ReceiveInitialMetadataOperation(_EMPTY_FLAGS),),
)
event_handler = _event_handler(state, self._response_deserializer)
state.rpc_start_time = datetime.utcnow()
state.method = _common.decode(self._method)
call = self._managed_call(
cygrpc.PropagationConstants.GRPC_PROPAGATE_DEFAULTS,
self._method,
None,
_determine_deadline(deadline),
augmented_metadata,
None if credentials is None else credentials._credentials,
operations,
event_handler,
self._context,
)
_consume_request_iterator(
request_iterator,
state,
call,
self._request_serializer,
event_handler,
)
return _MultiThreadedRendezvous(
state, call, self._response_deserializer, deadline
)
class _InitialMetadataFlags(int):
"""Stores immutable initial metadata flags"""
def __new__(cls, value: int = _EMPTY_FLAGS):
value &= cygrpc.InitialMetadataFlags.used_mask
return super(_InitialMetadataFlags, cls).__new__(cls, value)
def with_wait_for_ready(self, wait_for_ready: Optional[bool]) -> int:
if wait_for_ready is not None:
if wait_for_ready:
return self.__class__(
self
| cygrpc.InitialMetadataFlags.wait_for_ready
| cygrpc.InitialMetadataFlags.wait_for_ready_explicitly_set
)
elif not wait_for_ready:
return self.__class__(
self & ~cygrpc.InitialMetadataFlags.wait_for_ready
| cygrpc.InitialMetadataFlags.wait_for_ready_explicitly_set
)
return self
class _ChannelCallState(object):
channel: cygrpc.Channel
managed_calls: int
threading: bool
def __init__(self, channel: cygrpc.Channel):
self.lock = threading.Lock()
self.channel = channel
self.managed_calls = 0
self.threading = False
def reset_postfork_child(self) -> None:
self.managed_calls = 0
def __del__(self):
try:
self.channel.close(
cygrpc.StatusCode.cancelled, "Channel deallocated!"
)
except (TypeError, AttributeError):
pass
def _run_channel_spin_thread(state: _ChannelCallState) -> None:
def channel_spin():
while True:
cygrpc.block_if_fork_in_progress(state)
event = state.channel.next_call_event()
if event.completion_type == cygrpc.CompletionType.queue_timeout:
continue
call_completed = event.tag(event)
if call_completed:
with state.lock:
state.managed_calls -= 1
if state.managed_calls == 0:
return
channel_spin_thread = cygrpc.ForkManagedThread(target=channel_spin)
channel_spin_thread.setDaemon(True)
channel_spin_thread.start()
def _channel_managed_call_management(state: _ChannelCallState):
# pylint: disable=too-many-arguments
def create(
flags: int,
method: bytes,
host: Optional[str],
deadline: Optional[float],
metadata: Optional[MetadataType],
credentials: Optional[cygrpc.CallCredentials],
operations: Sequence[Sequence[cygrpc.Operation]],
event_handler: UserTag,
context,
) -> cygrpc.IntegratedCall:
"""Creates a cygrpc.IntegratedCall.
Args:
flags: An integer bitfield of call flags.
method: The RPC method.
host: A host string for the created call.
deadline: A float to be the deadline of the created call or None if
the call is to have an infinite deadline.
metadata: The metadata for the call or None.
credentials: A cygrpc.CallCredentials or None.
operations: A sequence of sequences of cygrpc.Operations to be
started on the call.
event_handler: A behavior to call to handle the events resultant from
the operations on the call.
context: Context object for distributed tracing.
Returns:
A cygrpc.IntegratedCall with which to conduct an RPC.
"""
operations_and_tags = tuple(
(
operation,
event_handler,
)
for operation in operations
)
with state.lock:
call = state.channel.integrated_call(
flags,
method,
host,
deadline,
metadata,
credentials,
operations_and_tags,
context,
)
if state.managed_calls == 0:
state.managed_calls = 1
_run_channel_spin_thread(state)
else:
state.managed_calls += 1
return call
return create
class _ChannelConnectivityState(object):
lock: threading.RLock
channel: grpc.Channel
polling: bool
connectivity: grpc.ChannelConnectivity
try_to_connect: bool
# TODO(xuanwn): Refactor this: https://github.com/grpc/grpc/issues/31704
callbacks_and_connectivities: List[
Sequence[
Union[
Callable[[grpc.ChannelConnectivity], None],
Optional[grpc.ChannelConnectivity],
]
]
]
delivering: bool
def __init__(self, channel: grpc.Channel):
self.lock = threading.RLock()
self.channel = channel
self.polling = False
self.connectivity = None
self.try_to_connect = False
self.callbacks_and_connectivities = []
self.delivering = False
def reset_postfork_child(self) -> None:
self.polling = False
self.connectivity = None
self.try_to_connect = False
self.callbacks_and_connectivities = []
self.delivering = False
def _deliveries(
state: _ChannelConnectivityState,
) -> List[Callable[[grpc.ChannelConnectivity], None]]:
callbacks_needing_update = []
for callback_and_connectivity in state.callbacks_and_connectivities:
(
callback,
callback_connectivity,
) = callback_and_connectivity
if callback_connectivity is not state.connectivity:
callbacks_needing_update.append(callback)
callback_and_connectivity[1] = state.connectivity
return callbacks_needing_update
def _deliver(
state: _ChannelConnectivityState,
initial_connectivity: grpc.ChannelConnectivity,
initial_callbacks: Sequence[Callable[[grpc.ChannelConnectivity], None]],
) -> None:
connectivity = initial_connectivity
callbacks = initial_callbacks
while True:
for callback in callbacks:
cygrpc.block_if_fork_in_progress(state)
try:
callback(connectivity)
except Exception: # pylint: disable=broad-except
_LOGGER.exception(
_CHANNEL_SUBSCRIPTION_CALLBACK_ERROR_LOG_MESSAGE
)
with state.lock:
callbacks = _deliveries(state)
if callbacks:
connectivity = state.connectivity
else:
state.delivering = False
return
def _spawn_delivery(
state: _ChannelConnectivityState,
callbacks: Sequence[Callable[[grpc.ChannelConnectivity], None]],
) -> None:
delivering_thread = cygrpc.ForkManagedThread(
target=_deliver,
args=(
state,
state.connectivity,
callbacks,
),
)
delivering_thread.setDaemon(True)
delivering_thread.start()
state.delivering = True
# NOTE(https://github.com/grpc/grpc/issues/3064): We'd rather not poll.
def _poll_connectivity(
state: _ChannelConnectivityState,
channel: grpc.Channel,
initial_try_to_connect: bool,
) -> None:
try_to_connect = initial_try_to_connect
connectivity = channel.check_connectivity_state(try_to_connect)
with state.lock:
state.connectivity = (
_common.CYGRPC_CONNECTIVITY_STATE_TO_CHANNEL_CONNECTIVITY[
connectivity
]
)
callbacks = tuple(
callback for callback, _ in state.callbacks_and_connectivities
)
for callback_and_connectivity in state.callbacks_and_connectivities:
callback_and_connectivity[1] = state.connectivity
if callbacks:
_spawn_delivery(state, callbacks)
while True:
event = channel.watch_connectivity_state(
connectivity, time.time() + 0.2
)
cygrpc.block_if_fork_in_progress(state)
with state.lock:
if (
not state.callbacks_and_connectivities
and not state.try_to_connect
):
state.polling = False
state.connectivity = None
break
try_to_connect = state.try_to_connect
state.try_to_connect = False
if event.success or try_to_connect:
connectivity = channel.check_connectivity_state(try_to_connect)
with state.lock:
state.connectivity = (
_common.CYGRPC_CONNECTIVITY_STATE_TO_CHANNEL_CONNECTIVITY[
connectivity
]
)
if not state.delivering:
callbacks = _deliveries(state)
if callbacks:
_spawn_delivery(state, callbacks)
def _subscribe(
state: _ChannelConnectivityState,
callback: Callable[[grpc.ChannelConnectivity], None],
try_to_connect: bool,
) -> None:
with state.lock:
if not state.callbacks_and_connectivities and not state.polling:
polling_thread = cygrpc.ForkManagedThread(
target=_poll_connectivity,
args=(state, state.channel, bool(try_to_connect)),
)
polling_thread.setDaemon(True)
polling_thread.start()
state.polling = True
state.callbacks_and_connectivities.append([callback, None])
elif not state.delivering and state.connectivity is not None:
_spawn_delivery(state, (callback,))
state.try_to_connect |= bool(try_to_connect)
state.callbacks_and_connectivities.append(
[callback, state.connectivity]
)
else:
state.try_to_connect |= bool(try_to_connect)
state.callbacks_and_connectivities.append([callback, None])
def _unsubscribe(
state: _ChannelConnectivityState,
callback: Callable[[grpc.ChannelConnectivity], None],
) -> None:
with state.lock:
for index, (subscribed_callback, unused_connectivity) in enumerate(
state.callbacks_and_connectivities
):
if callback == subscribed_callback:
state.callbacks_and_connectivities.pop(index)
break
def _augment_options(
base_options: Sequence[ChannelArgumentType],
compression: Optional[grpc.Compression],
) -> Sequence[ChannelArgumentType]:
compression_option = _compression.create_channel_option(compression)
return (
tuple(base_options)
+ compression_option
+ (
(
cygrpc.ChannelArgKey.primary_user_agent_string,
_USER_AGENT,
),
)
)
def _separate_channel_options(
options: Sequence[ChannelArgumentType],
) -> Tuple[Sequence[ChannelArgumentType], Sequence[ChannelArgumentType]]:
"""Separates core channel options from Python channel options."""
core_options = []
python_options = []
for pair in options:
if (
pair[0]
== grpc.experimental.ChannelOptions.SingleThreadedUnaryStream
):
python_options.append(pair)
else:
core_options.append(pair)
return python_options, core_options
class Channel(grpc.Channel):
"""A cygrpc.Channel-backed implementation of grpc.Channel."""
_single_threaded_unary_stream: bool
_channel: cygrpc.Channel
_call_state: _ChannelCallState
_connectivity_state: _ChannelConnectivityState
def __init__(
self,
target: str,
options: Sequence[ChannelArgumentType],
credentials: Optional[grpc.ChannelCredentials],
compression: Optional[grpc.Compression],
):
"""Constructor.
Args:
target: The target to which to connect.
options: Configuration options for the channel.
credentials: A cygrpc.ChannelCredentials or None.
compression: An optional value indicating the compression method to be
used over the lifetime of the channel.
"""
python_options, core_options = _separate_channel_options(options)
self._single_threaded_unary_stream = (
_DEFAULT_SINGLE_THREADED_UNARY_STREAM
)
self._process_python_options(python_options)
self._channel = cygrpc.Channel(
_common.encode(target),
_augment_options(core_options, compression),
credentials,
)
self._call_state = _ChannelCallState(self._channel)
self._connectivity_state = _ChannelConnectivityState(self._channel)
cygrpc.fork_register_channel(self)
if cygrpc.g_gevent_activated:
cygrpc.gevent_increment_channel_count()
def _process_python_options(
self, python_options: Sequence[ChannelArgumentType]
) -> None:
"""Sets channel attributes according to python-only channel options."""
for pair in python_options:
if (
pair[0]
== grpc.experimental.ChannelOptions.SingleThreadedUnaryStream
):
self._single_threaded_unary_stream = True
def subscribe(
self,
callback: Callable[[grpc.ChannelConnectivity], None],
try_to_connect: Optional[bool] = None,
) -> None:
_subscribe(self._connectivity_state, callback, try_to_connect)
def unsubscribe(
self, callback: Callable[[grpc.ChannelConnectivity], None]
) -> None:
_unsubscribe(self._connectivity_state, callback)
def unary_unary(
self,
method: str,
request_serializer: Optional[SerializingFunction] = None,
response_deserializer: Optional[DeserializingFunction] = None,
) -> grpc.UnaryUnaryMultiCallable:
return _UnaryUnaryMultiCallable(
self._channel,
_channel_managed_call_management(self._call_state),
_common.encode(method),
request_serializer,
response_deserializer,
)
def unary_stream(
self,
method: str,
request_serializer: Optional[SerializingFunction] = None,
response_deserializer: Optional[DeserializingFunction] = None,
) -> grpc.UnaryStreamMultiCallable:
# NOTE(rbellevi): Benchmarks have shown that running a unary-stream RPC
# on a single Python thread results in an appreciable speed-up. However,
# due to slight differences in capability, the multi-threaded variant
# remains the default.
if self._single_threaded_unary_stream:
return _SingleThreadedUnaryStreamMultiCallable(
self._channel,
_common.encode(method),
request_serializer,
response_deserializer,
)
else:
return _UnaryStreamMultiCallable(
self._channel,
_channel_managed_call_management(self._call_state),
_common.encode(method),
request_serializer,
response_deserializer,
)
def stream_unary(
self,
method: str,
request_serializer: Optional[SerializingFunction] = None,
response_deserializer: Optional[DeserializingFunction] = None,
) -> grpc.StreamUnaryMultiCallable:
return _StreamUnaryMultiCallable(
self._channel,
_channel_managed_call_management(self._call_state),
_common.encode(method),
request_serializer,
response_deserializer,
)
def stream_stream(
self,
method: str,
request_serializer: Optional[SerializingFunction] = None,
response_deserializer: Optional[DeserializingFunction] = None,
) -> grpc.StreamStreamMultiCallable:
return _StreamStreamMultiCallable(
self._channel,
_channel_managed_call_management(self._call_state),
_common.encode(method),
request_serializer,
response_deserializer,
)
def _unsubscribe_all(self) -> None:
state = self._connectivity_state
if state:
with state.lock:
del state.callbacks_and_connectivities[:]
def _close(self) -> None:
self._unsubscribe_all()
self._channel.close(cygrpc.StatusCode.cancelled, "Channel closed!")
cygrpc.fork_unregister_channel(self)
if cygrpc.g_gevent_activated:
cygrpc.gevent_decrement_channel_count()
def _close_on_fork(self) -> None:
self._unsubscribe_all()
self._channel.close_on_fork(
cygrpc.StatusCode.cancelled, "Channel closed due to fork"
)
def __enter__(self):
return self
def __exit__(self, exc_type, exc_val, exc_tb):
self._close()
return False
def close(self) -> None:
self._close()
def __del__(self):
# TODO(https://github.com/grpc/grpc/issues/12531): Several releases
# after 1.12 (1.16 or thereabouts?) add a "self._channel.close" call
# here (or more likely, call self._close() here). We don't do this today
# because many valid use cases today allow the channel to be deleted
# immediately after stubs are created. After a sufficient period of time
# has passed for all users to be trusted to freeze out to their channels
# for as long as they are in use and to close them after using them,
# then deletion of this grpc._channel.Channel instance can be made to
# effect closure of the underlying cygrpc.Channel instance.
try:
self._unsubscribe_all()
except: # pylint: disable=bare-except
# Exceptions in __del__ are ignored by Python anyway, but they can
# keep spamming logs. Just silence them.
pass
| 76,304
| 34.992925
| 90
|
py
|
grpc
|
grpc-master/src/python/grpcio/grpc/__init__.py
|
# Copyright 2015-2016 gRPC authors.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""gRPC's Python API."""
import abc
import contextlib
import enum
import logging
import sys
from grpc import _compression
from grpc._cython import cygrpc as _cygrpc
from grpc._runtime_protos import protos
from grpc._runtime_protos import protos_and_services
from grpc._runtime_protos import services
logging.getLogger(__name__).addHandler(logging.NullHandler())
try:
# pylint: disable=ungrouped-imports
from grpc._grpcio_metadata import __version__
except ImportError:
__version__ = "dev0"
############################## Future Interface ###############################
class FutureTimeoutError(Exception):
"""Indicates that a method call on a Future timed out."""
class FutureCancelledError(Exception):
"""Indicates that the computation underlying a Future was cancelled."""
class Future(abc.ABC):
"""A representation of a computation in another control flow.
Computations represented by a Future may be yet to be begun,
may be ongoing, or may have already completed.
"""
@abc.abstractmethod
def cancel(self):
"""Attempts to cancel the computation.
This method does not block.
Returns:
bool:
Returns True if the computation was canceled.
Returns False under all other circumstances, for example:
1. computation has begun and could not be canceled.
2. computation has finished
3. computation is scheduled for execution and it is impossible
to determine its state without blocking.
"""
raise NotImplementedError()
@abc.abstractmethod
def cancelled(self):
"""Describes whether the computation was cancelled.
This method does not block.
Returns:
bool:
Returns True if the computation was cancelled before its result became
available.
Returns False under all other circumstances, for example:
1. computation was not cancelled.
2. computation's result is available.
"""
raise NotImplementedError()
@abc.abstractmethod
def running(self):
"""Describes whether the computation is taking place.
This method does not block.
Returns:
Returns True if the computation is scheduled for execution or
currently executing.
Returns False if the computation already executed or was cancelled.
"""
raise NotImplementedError()
@abc.abstractmethod
def done(self):
"""Describes whether the computation has taken place.
This method does not block.
Returns:
bool:
Returns True if the computation already executed or was cancelled.
Returns False if the computation is scheduled for execution or
currently executing.
This is exactly opposite of the running() method's result.
"""
raise NotImplementedError()
@abc.abstractmethod
def result(self, timeout=None):
"""Returns the result of the computation or raises its exception.
This method may return immediately or may block.
Args:
timeout: The length of time in seconds to wait for the computation to
finish or be cancelled. If None, the call will block until the
computations's termination.
Returns:
The return value of the computation.
Raises:
FutureTimeoutError: If a timeout value is passed and the computation
does not terminate within the allotted time.
FutureCancelledError: If the computation was cancelled.
Exception: If the computation raised an exception, this call will
raise the same exception.
"""
raise NotImplementedError()
@abc.abstractmethod
def exception(self, timeout=None):
"""Return the exception raised by the computation.
This method may return immediately or may block.
Args:
timeout: The length of time in seconds to wait for the computation to
terminate or be cancelled. If None, the call will block until the
computations's termination.
Returns:
The exception raised by the computation, or None if the computation
did not raise an exception.
Raises:
FutureTimeoutError: If a timeout value is passed and the computation
does not terminate within the allotted time.
FutureCancelledError: If the computation was cancelled.
"""
raise NotImplementedError()
@abc.abstractmethod
def traceback(self, timeout=None):
"""Access the traceback of the exception raised by the computation.
This method may return immediately or may block.
Args:
timeout: The length of time in seconds to wait for the computation
to terminate or be cancelled. If None, the call will block until
the computation's termination.
Returns:
The traceback of the exception raised by the computation, or None
if the computation did not raise an exception.
Raises:
FutureTimeoutError: If a timeout value is passed and the computation
does not terminate within the allotted time.
FutureCancelledError: If the computation was cancelled.
"""
raise NotImplementedError()
@abc.abstractmethod
def add_done_callback(self, fn):
"""Adds a function to be called at completion of the computation.
The callback will be passed this Future object describing the outcome
of the computation. Callbacks will be invoked after the future is
terminated, whether successfully or not.
If the computation has already completed, the callback will be called
immediately.
Exceptions raised in the callback will be logged at ERROR level, but
will not terminate any threads of execution.
Args:
fn: A callable taking this Future object as its single parameter.
"""
raise NotImplementedError()
################################ gRPC Enums ##################################
@enum.unique
class ChannelConnectivity(enum.Enum):
"""Mirrors grpc_connectivity_state in the gRPC Core.
Attributes:
IDLE: The channel is idle.
CONNECTING: The channel is connecting.
READY: The channel is ready to conduct RPCs.
TRANSIENT_FAILURE: The channel has seen a failure from which it expects
to recover.
SHUTDOWN: The channel has seen a failure from which it cannot recover.
"""
IDLE = (_cygrpc.ConnectivityState.idle, "idle")
CONNECTING = (_cygrpc.ConnectivityState.connecting, "connecting")
READY = (_cygrpc.ConnectivityState.ready, "ready")
TRANSIENT_FAILURE = (
_cygrpc.ConnectivityState.transient_failure,
"transient failure",
)
SHUTDOWN = (_cygrpc.ConnectivityState.shutdown, "shutdown")
@enum.unique
class StatusCode(enum.Enum):
"""Mirrors grpc_status_code in the gRPC Core.
Attributes:
OK: Not an error; returned on success
CANCELLED: The operation was cancelled (typically by the caller).
UNKNOWN: Unknown error.
INVALID_ARGUMENT: Client specified an invalid argument.
DEADLINE_EXCEEDED: Deadline expired before operation could complete.
NOT_FOUND: Some requested entity (e.g., file or directory) was not found.
ALREADY_EXISTS: Some entity that we attempted to create (e.g., file or directory)
already exists.
PERMISSION_DENIED: The caller does not have permission to execute the specified
operation.
UNAUTHENTICATED: The request does not have valid authentication credentials for the
operation.
RESOURCE_EXHAUSTED: Some resource has been exhausted, perhaps a per-user quota, or
perhaps the entire file system is out of space.
FAILED_PRECONDITION: Operation was rejected because the system is not in a state
required for the operation's execution.
ABORTED: The operation was aborted, typically due to a concurrency issue
like sequencer check failures, transaction aborts, etc.
UNIMPLEMENTED: Operation is not implemented or not supported/enabled in this service.
INTERNAL: Internal errors. Means some invariants expected by underlying
system has been broken.
UNAVAILABLE: The service is currently unavailable.
DATA_LOSS: Unrecoverable data loss or corruption.
"""
OK = (_cygrpc.StatusCode.ok, "ok")
CANCELLED = (_cygrpc.StatusCode.cancelled, "cancelled")
UNKNOWN = (_cygrpc.StatusCode.unknown, "unknown")
INVALID_ARGUMENT = (_cygrpc.StatusCode.invalid_argument, "invalid argument")
DEADLINE_EXCEEDED = (
_cygrpc.StatusCode.deadline_exceeded,
"deadline exceeded",
)
NOT_FOUND = (_cygrpc.StatusCode.not_found, "not found")
ALREADY_EXISTS = (_cygrpc.StatusCode.already_exists, "already exists")
PERMISSION_DENIED = (
_cygrpc.StatusCode.permission_denied,
"permission denied",
)
RESOURCE_EXHAUSTED = (
_cygrpc.StatusCode.resource_exhausted,
"resource exhausted",
)
FAILED_PRECONDITION = (
_cygrpc.StatusCode.failed_precondition,
"failed precondition",
)
ABORTED = (_cygrpc.StatusCode.aborted, "aborted")
OUT_OF_RANGE = (_cygrpc.StatusCode.out_of_range, "out of range")
UNIMPLEMENTED = (_cygrpc.StatusCode.unimplemented, "unimplemented")
INTERNAL = (_cygrpc.StatusCode.internal, "internal")
UNAVAILABLE = (_cygrpc.StatusCode.unavailable, "unavailable")
DATA_LOSS = (_cygrpc.StatusCode.data_loss, "data loss")
UNAUTHENTICATED = (_cygrpc.StatusCode.unauthenticated, "unauthenticated")
############################# gRPC Status ################################
class Status(abc.ABC):
"""Describes the status of an RPC.
This is an EXPERIMENTAL API.
Attributes:
code: A StatusCode object to be sent to the client.
details: A UTF-8-encodable string to be sent to the client upon
termination of the RPC.
trailing_metadata: The trailing :term:`metadata` in the RPC.
"""
############################# gRPC Exceptions ################################
class RpcError(Exception):
"""Raised by the gRPC library to indicate non-OK-status RPC termination."""
############################## Shared Context ################################
class RpcContext(abc.ABC):
"""Provides RPC-related information and action."""
@abc.abstractmethod
def is_active(self):
"""Describes whether the RPC is active or has terminated.
Returns:
bool:
True if RPC is active, False otherwise.
"""
raise NotImplementedError()
@abc.abstractmethod
def time_remaining(self):
"""Describes the length of allowed time remaining for the RPC.
Returns:
A nonnegative float indicating the length of allowed time in seconds
remaining for the RPC to complete before it is considered to have
timed out, or None if no deadline was specified for the RPC.
"""
raise NotImplementedError()
@abc.abstractmethod
def cancel(self):
"""Cancels the RPC.
Idempotent and has no effect if the RPC has already terminated.
"""
raise NotImplementedError()
@abc.abstractmethod
def add_callback(self, callback):
"""Registers a callback to be called on RPC termination.
Args:
callback: A no-parameter callable to be called on RPC termination.
Returns:
True if the callback was added and will be called later; False if
the callback was not added and will not be called (because the RPC
already terminated or some other reason).
"""
raise NotImplementedError()
######################### Invocation-Side Context ############################
class Call(RpcContext, metaclass=abc.ABCMeta):
"""Invocation-side utility object for an RPC."""
@abc.abstractmethod
def initial_metadata(self):
"""Accesses the initial metadata sent by the server.
This method blocks until the value is available.
Returns:
The initial :term:`metadata`.
"""
raise NotImplementedError()
@abc.abstractmethod
def trailing_metadata(self):
"""Accesses the trailing metadata sent by the server.
This method blocks until the value is available.
Returns:
The trailing :term:`metadata`.
"""
raise NotImplementedError()
@abc.abstractmethod
def code(self):
"""Accesses the status code sent by the server.
This method blocks until the value is available.
Returns:
The StatusCode value for the RPC.
"""
raise NotImplementedError()
@abc.abstractmethod
def details(self):
"""Accesses the details sent by the server.
This method blocks until the value is available.
Returns:
The details string of the RPC.
"""
raise NotImplementedError()
############## Invocation-Side Interceptor Interfaces & Classes ##############
class ClientCallDetails(abc.ABC):
"""Describes an RPC to be invoked.
Attributes:
method: The method name of the RPC.
timeout: An optional duration of time in seconds to allow for the RPC.
metadata: Optional :term:`metadata` to be transmitted to
the service-side of the RPC.
credentials: An optional CallCredentials for the RPC.
wait_for_ready: An optional flag to enable :term:`wait_for_ready` mechanism.
compression: An element of grpc.compression, e.g.
grpc.compression.Gzip.
"""
class UnaryUnaryClientInterceptor(abc.ABC):
"""Affords intercepting unary-unary invocations."""
@abc.abstractmethod
def intercept_unary_unary(self, continuation, client_call_details, request):
"""Intercepts a unary-unary invocation asynchronously.
Args:
continuation: A function that proceeds with the invocation by
executing the next interceptor in chain or invoking the
actual RPC on the underlying Channel. It is the interceptor's
responsibility to call it if it decides to move the RPC forward.
The interceptor can use
`response_future = continuation(client_call_details, request)`
to continue with the RPC. `continuation` returns an object that is
both a Call for the RPC and a Future. In the event of RPC
completion, the return Call-Future's result value will be
the response message of the RPC. Should the event terminate
with non-OK status, the returned Call-Future's exception value
will be an RpcError.
client_call_details: A ClientCallDetails object describing the
outgoing RPC.
request: The request value for the RPC.
Returns:
An object that is both a Call for the RPC and a Future.
In the event of RPC completion, the return Call-Future's
result value will be the response message of the RPC.
Should the event terminate with non-OK status, the returned
Call-Future's exception value will be an RpcError.
"""
raise NotImplementedError()
class UnaryStreamClientInterceptor(abc.ABC):
"""Affords intercepting unary-stream invocations."""
@abc.abstractmethod
def intercept_unary_stream(
self, continuation, client_call_details, request
):
"""Intercepts a unary-stream invocation.
Args:
continuation: A function that proceeds with the invocation by
executing the next interceptor in chain or invoking the
actual RPC on the underlying Channel. It is the interceptor's
responsibility to call it if it decides to move the RPC forward.
The interceptor can use
`response_iterator = continuation(client_call_details, request)`
to continue with the RPC. `continuation` returns an object that is
both a Call for the RPC and an iterator for response values.
Drawing response values from the returned Call-iterator may
raise RpcError indicating termination of the RPC with non-OK
status.
client_call_details: A ClientCallDetails object describing the
outgoing RPC.
request: The request value for the RPC.
Returns:
An object that is both a Call for the RPC and an iterator of
response values. Drawing response values from the returned
Call-iterator may raise RpcError indicating termination of
the RPC with non-OK status. This object *should* also fulfill the
Future interface, though it may not.
"""
raise NotImplementedError()
class StreamUnaryClientInterceptor(abc.ABC):
"""Affords intercepting stream-unary invocations."""
@abc.abstractmethod
def intercept_stream_unary(
self, continuation, client_call_details, request_iterator
):
"""Intercepts a stream-unary invocation asynchronously.
Args:
continuation: A function that proceeds with the invocation by
executing the next interceptor in chain or invoking the
actual RPC on the underlying Channel. It is the interceptor's
responsibility to call it if it decides to move the RPC forward.
The interceptor can use
`response_future = continuation(client_call_details, request_iterator)`
to continue with the RPC. `continuation` returns an object that is
both a Call for the RPC and a Future. In the event of RPC completion,
the return Call-Future's result value will be the response message
of the RPC. Should the event terminate with non-OK status, the
returned Call-Future's exception value will be an RpcError.
client_call_details: A ClientCallDetails object describing the
outgoing RPC.
request_iterator: An iterator that yields request values for the RPC.
Returns:
An object that is both a Call for the RPC and a Future.
In the event of RPC completion, the return Call-Future's
result value will be the response message of the RPC.
Should the event terminate with non-OK status, the returned
Call-Future's exception value will be an RpcError.
"""
raise NotImplementedError()
class StreamStreamClientInterceptor(abc.ABC):
"""Affords intercepting stream-stream invocations."""
@abc.abstractmethod
def intercept_stream_stream(
self, continuation, client_call_details, request_iterator
):
"""Intercepts a stream-stream invocation.
Args:
continuation: A function that proceeds with the invocation by
executing the next interceptor in chain or invoking the
actual RPC on the underlying Channel. It is the interceptor's
responsibility to call it if it decides to move the RPC forward.
The interceptor can use
`response_iterator = continuation(client_call_details, request_iterator)`
to continue with the RPC. `continuation` returns an object that is
both a Call for the RPC and an iterator for response values.
Drawing response values from the returned Call-iterator may
raise RpcError indicating termination of the RPC with non-OK
status.
client_call_details: A ClientCallDetails object describing the
outgoing RPC.
request_iterator: An iterator that yields request values for the RPC.
Returns:
An object that is both a Call for the RPC and an iterator of
response values. Drawing response values from the returned
Call-iterator may raise RpcError indicating termination of
the RPC with non-OK status. This object *should* also fulfill the
Future interface, though it may not.
"""
raise NotImplementedError()
############ Authentication & Authorization Interfaces & Classes #############
class ChannelCredentials(object):
"""An encapsulation of the data required to create a secure Channel.
This class has no supported interface - it exists to define the type of its
instances and its instances exist to be passed to other functions. For
example, ssl_channel_credentials returns an instance of this class and
secure_channel requires an instance of this class.
"""
def __init__(self, credentials):
self._credentials = credentials
class CallCredentials(object):
"""An encapsulation of the data required to assert an identity over a call.
A CallCredentials has to be used with secure Channel, otherwise the
metadata will not be transmitted to the server.
A CallCredentials may be composed with ChannelCredentials to always assert
identity for every call over that Channel.
This class has no supported interface - it exists to define the type of its
instances and its instances exist to be passed to other functions.
"""
def __init__(self, credentials):
self._credentials = credentials
class AuthMetadataContext(abc.ABC):
"""Provides information to call credentials metadata plugins.
Attributes:
service_url: A string URL of the service being called into.
method_name: A string of the fully qualified method name being called.
"""
class AuthMetadataPluginCallback(abc.ABC):
"""Callback object received by a metadata plugin."""
def __call__(self, metadata, error):
"""Passes to the gRPC runtime authentication metadata for an RPC.
Args:
metadata: The :term:`metadata` used to construct the CallCredentials.
error: An Exception to indicate error or None to indicate success.
"""
raise NotImplementedError()
class AuthMetadataPlugin(abc.ABC):
"""A specification for custom authentication."""
def __call__(self, context, callback):
"""Implements authentication by passing metadata to a callback.
This method will be invoked asynchronously in a separate thread.
Args:
context: An AuthMetadataContext providing information on the RPC that
the plugin is being called to authenticate.
callback: An AuthMetadataPluginCallback to be invoked either
synchronously or asynchronously.
"""
raise NotImplementedError()
class ServerCredentials(object):
"""An encapsulation of the data required to open a secure port on a Server.
This class has no supported interface - it exists to define the type of its
instances and its instances exist to be passed to other functions.
"""
def __init__(self, credentials):
self._credentials = credentials
class ServerCertificateConfiguration(object):
"""A certificate configuration for use with an SSL-enabled Server.
Instances of this class can be returned in the certificate configuration
fetching callback.
This class has no supported interface -- it exists to define the
type of its instances and its instances exist to be passed to
other functions.
"""
def __init__(self, certificate_configuration):
self._certificate_configuration = certificate_configuration
######################## Multi-Callable Interfaces ###########################
class UnaryUnaryMultiCallable(abc.ABC):
"""Affords invoking a unary-unary RPC from client-side."""
@abc.abstractmethod
def __call__(
self,
request,
timeout=None,
metadata=None,
credentials=None,
wait_for_ready=None,
compression=None,
):
"""Synchronously invokes the underlying RPC.
Args:
request: The request value for the RPC.
timeout: An optional duration of time in seconds to allow
for the RPC.
metadata: Optional :term:`metadata` to be transmitted to the
service-side of the RPC.
credentials: An optional CallCredentials for the RPC. Only valid for
secure Channel.
wait_for_ready: An optional flag to enable :term:`wait_for_ready` mechanism.
compression: An element of grpc.compression, e.g.
grpc.compression.Gzip.
Returns:
The response value for the RPC.
Raises:
RpcError: Indicating that the RPC terminated with non-OK status. The
raised RpcError will also be a Call for the RPC affording the RPC's
metadata, status code, and details.
"""
raise NotImplementedError()
@abc.abstractmethod
def with_call(
self,
request,
timeout=None,
metadata=None,
credentials=None,
wait_for_ready=None,
compression=None,
):
"""Synchronously invokes the underlying RPC.
Args:
request: The request value for the RPC.
timeout: An optional durating of time in seconds to allow for
the RPC.
metadata: Optional :term:`metadata` to be transmitted to the
service-side of the RPC.
credentials: An optional CallCredentials for the RPC. Only valid for
secure Channel.
wait_for_ready: An optional flag to enable :term:`wait_for_ready` mechanism.
compression: An element of grpc.compression, e.g.
grpc.compression.Gzip.
Returns:
The response value for the RPC and a Call value for the RPC.
Raises:
RpcError: Indicating that the RPC terminated with non-OK status. The
raised RpcError will also be a Call for the RPC affording the RPC's
metadata, status code, and details.
"""
raise NotImplementedError()
@abc.abstractmethod
def future(
self,
request,
timeout=None,
metadata=None,
credentials=None,
wait_for_ready=None,
compression=None,
):
"""Asynchronously invokes the underlying RPC.
Args:
request: The request value for the RPC.
timeout: An optional duration of time in seconds to allow for
the RPC.
metadata: Optional :term:`metadata` to be transmitted to the
service-side of the RPC.
credentials: An optional CallCredentials for the RPC. Only valid for
secure Channel.
wait_for_ready: An optional flag to enable :term:`wait_for_ready` mechanism.
compression: An element of grpc.compression, e.g.
grpc.compression.Gzip.
Returns:
An object that is both a Call for the RPC and a Future.
In the event of RPC completion, the return Call-Future's result
value will be the response message of the RPC.
Should the event terminate with non-OK status,
the returned Call-Future's exception value will be an RpcError.
"""
raise NotImplementedError()
class UnaryStreamMultiCallable(abc.ABC):
"""Affords invoking a unary-stream RPC from client-side."""
@abc.abstractmethod
def __call__(
self,
request,
timeout=None,
metadata=None,
credentials=None,
wait_for_ready=None,
compression=None,
):
"""Invokes the underlying RPC.
Args:
request: The request value for the RPC.
timeout: An optional duration of time in seconds to allow for
the RPC. If None, the timeout is considered infinite.
metadata: An optional :term:`metadata` to be transmitted to the
service-side of the RPC.
credentials: An optional CallCredentials for the RPC. Only valid for
secure Channel.
wait_for_ready: An optional flag to enable :term:`wait_for_ready` mechanism.
compression: An element of grpc.compression, e.g.
grpc.compression.Gzip.
Returns:
An object that is a Call for the RPC, an iterator of response
values, and a Future for the RPC. Drawing response values from the
returned Call-iterator may raise RpcError indicating termination of
the RPC with non-OK status.
"""
raise NotImplementedError()
class StreamUnaryMultiCallable(abc.ABC):
"""Affords invoking a stream-unary RPC from client-side."""
@abc.abstractmethod
def __call__(
self,
request_iterator,
timeout=None,
metadata=None,
credentials=None,
wait_for_ready=None,
compression=None,
):
"""Synchronously invokes the underlying RPC.
Args:
request_iterator: An iterator that yields request values for
the RPC.
timeout: An optional duration of time in seconds to allow for
the RPC. If None, the timeout is considered infinite.
metadata: Optional :term:`metadata` to be transmitted to the
service-side of the RPC.
credentials: An optional CallCredentials for the RPC. Only valid for
secure Channel.
wait_for_ready: An optional flag to enable :term:`wait_for_ready` mechanism.
compression: An element of grpc.compression, e.g.
grpc.compression.Gzip.
Returns:
The response value for the RPC.
Raises:
RpcError: Indicating that the RPC terminated with non-OK status. The
raised RpcError will also implement grpc.Call, affording methods
such as metadata, code, and details.
"""
raise NotImplementedError()
@abc.abstractmethod
def with_call(
self,
request_iterator,
timeout=None,
metadata=None,
credentials=None,
wait_for_ready=None,
compression=None,
):
"""Synchronously invokes the underlying RPC on the client.
Args:
request_iterator: An iterator that yields request values for
the RPC.
timeout: An optional duration of time in seconds to allow for
the RPC. If None, the timeout is considered infinite.
metadata: Optional :term:`metadata` to be transmitted to the
service-side of the RPC.
credentials: An optional CallCredentials for the RPC. Only valid for
secure Channel.
wait_for_ready: An optional flag to enable :term:`wait_for_ready` mechanism.
compression: An element of grpc.compression, e.g.
grpc.compression.Gzip.
Returns:
The response value for the RPC and a Call object for the RPC.
Raises:
RpcError: Indicating that the RPC terminated with non-OK status. The
raised RpcError will also be a Call for the RPC affording the RPC's
metadata, status code, and details.
"""
raise NotImplementedError()
@abc.abstractmethod
def future(
self,
request_iterator,
timeout=None,
metadata=None,
credentials=None,
wait_for_ready=None,
compression=None,
):
"""Asynchronously invokes the underlying RPC on the client.
Args:
request_iterator: An iterator that yields request values for the RPC.
timeout: An optional duration of time in seconds to allow for
the RPC. If None, the timeout is considered infinite.
metadata: Optional :term:`metadata` to be transmitted to the
service-side of the RPC.
credentials: An optional CallCredentials for the RPC. Only valid for
secure Channel.
wait_for_ready: An optional flag to enable :term:`wait_for_ready` mechanism.
compression: An element of grpc.compression, e.g.
grpc.compression.Gzip.
Returns:
An object that is both a Call for the RPC and a Future.
In the event of RPC completion, the return Call-Future's result value
will be the response message of the RPC. Should the event terminate
with non-OK status, the returned Call-Future's exception value will
be an RpcError.
"""
raise NotImplementedError()
class StreamStreamMultiCallable(abc.ABC):
"""Affords invoking a stream-stream RPC on client-side."""
@abc.abstractmethod
def __call__(
self,
request_iterator,
timeout=None,
metadata=None,
credentials=None,
wait_for_ready=None,
compression=None,
):
"""Invokes the underlying RPC on the client.
Args:
request_iterator: An iterator that yields request values for the RPC.
timeout: An optional duration of time in seconds to allow for
the RPC. If not specified, the timeout is considered infinite.
metadata: Optional :term:`metadata` to be transmitted to the
service-side of the RPC.
credentials: An optional CallCredentials for the RPC. Only valid for
secure Channel.
wait_for_ready: An optional flag to enable :term:`wait_for_ready` mechanism.
compression: An element of grpc.compression, e.g.
grpc.compression.Gzip.
Returns:
An object that is a Call for the RPC, an iterator of response
values, and a Future for the RPC. Drawing response values from the
returned Call-iterator may raise RpcError indicating termination of
the RPC with non-OK status.
"""
raise NotImplementedError()
############################# Channel Interface ##############################
class Channel(abc.ABC):
"""Affords RPC invocation via generic methods on client-side.
Channel objects implement the Context Manager type, although they need not
support being entered and exited multiple times.
"""
@abc.abstractmethod
def subscribe(self, callback, try_to_connect=False):
"""Subscribe to this Channel's connectivity state machine.
A Channel may be in any of the states described by ChannelConnectivity.
This method allows application to monitor the state transitions.
The typical use case is to debug or gain better visibility into gRPC
runtime's state.
Args:
callback: A callable to be invoked with ChannelConnectivity argument.
ChannelConnectivity describes current state of the channel.
The callable will be invoked immediately upon subscription
and again for every change to ChannelConnectivity until it
is unsubscribed or this Channel object goes out of scope.
try_to_connect: A boolean indicating whether or not this Channel
should attempt to connect immediately. If set to False, gRPC
runtime decides when to connect.
"""
raise NotImplementedError()
@abc.abstractmethod
def unsubscribe(self, callback):
"""Unsubscribes a subscribed callback from this Channel's connectivity.
Args:
callback: A callable previously registered with this Channel from
having been passed to its "subscribe" method.
"""
raise NotImplementedError()
@abc.abstractmethod
def unary_unary(
self, method, request_serializer=None, response_deserializer=None
):
"""Creates a UnaryUnaryMultiCallable for a unary-unary method.
Args:
method: The name of the RPC method.
request_serializer: Optional :term:`serializer` for serializing the request
message. Request goes unserialized in case None is passed.
response_deserializer: Optional :term:`deserializer` for deserializing the
response message. Response goes undeserialized in case None
is passed.
Returns:
A UnaryUnaryMultiCallable value for the named unary-unary method.
"""
raise NotImplementedError()
@abc.abstractmethod
def unary_stream(
self, method, request_serializer=None, response_deserializer=None
):
"""Creates a UnaryStreamMultiCallable for a unary-stream method.
Args:
method: The name of the RPC method.
request_serializer: Optional :term:`serializer` for serializing the request
message. Request goes unserialized in case None is passed.
response_deserializer: Optional :term:`deserializer` for deserializing the
response message. Response goes undeserialized in case None is
passed.
Returns:
A UnaryStreamMultiCallable value for the name unary-stream method.
"""
raise NotImplementedError()
@abc.abstractmethod
def stream_unary(
self, method, request_serializer=None, response_deserializer=None
):
"""Creates a StreamUnaryMultiCallable for a stream-unary method.
Args:
method: The name of the RPC method.
request_serializer: Optional :term:`serializer` for serializing the request
message. Request goes unserialized in case None is passed.
response_deserializer: Optional :term:`deserializer` for deserializing the
response message. Response goes undeserialized in case None is
passed.
Returns:
A StreamUnaryMultiCallable value for the named stream-unary method.
"""
raise NotImplementedError()
@abc.abstractmethod
def stream_stream(
self, method, request_serializer=None, response_deserializer=None
):
"""Creates a StreamStreamMultiCallable for a stream-stream method.
Args:
method: The name of the RPC method.
request_serializer: Optional :term:`serializer` for serializing the request
message. Request goes unserialized in case None is passed.
response_deserializer: Optional :term:`deserializer` for deserializing the
response message. Response goes undeserialized in case None
is passed.
Returns:
A StreamStreamMultiCallable value for the named stream-stream method.
"""
raise NotImplementedError()
@abc.abstractmethod
def close(self):
"""Closes this Channel and releases all resources held by it.
Closing the Channel will immediately terminate all RPCs active with the
Channel and it is not valid to invoke new RPCs with the Channel.
This method is idempotent.
"""
raise NotImplementedError()
def __enter__(self):
"""Enters the runtime context related to the channel object."""
raise NotImplementedError()
def __exit__(self, exc_type, exc_val, exc_tb):
"""Exits the runtime context related to the channel object."""
raise NotImplementedError()
########################## Service-Side Context ##############################
class ServicerContext(RpcContext, metaclass=abc.ABCMeta):
"""A context object passed to method implementations."""
@abc.abstractmethod
def invocation_metadata(self):
"""Accesses the metadata sent by the client.
Returns:
The invocation :term:`metadata`.
"""
raise NotImplementedError()
@abc.abstractmethod
def peer(self):
"""Identifies the peer that invoked the RPC being serviced.
Returns:
A string identifying the peer that invoked the RPC being serviced.
The string format is determined by gRPC runtime.
"""
raise NotImplementedError()
@abc.abstractmethod
def peer_identities(self):
"""Gets one or more peer identity(s).
Equivalent to
servicer_context.auth_context().get(servicer_context.peer_identity_key())
Returns:
An iterable of the identities, or None if the call is not
authenticated. Each identity is returned as a raw bytes type.
"""
raise NotImplementedError()
@abc.abstractmethod
def peer_identity_key(self):
"""The auth property used to identify the peer.
For example, "x509_common_name" or "x509_subject_alternative_name" are
used to identify an SSL peer.
Returns:
The auth property (string) that indicates the
peer identity, or None if the call is not authenticated.
"""
raise NotImplementedError()
@abc.abstractmethod
def auth_context(self):
"""Gets the auth context for the call.
Returns:
A map of strings to an iterable of bytes for each auth property.
"""
raise NotImplementedError()
def set_compression(self, compression):
"""Set the compression algorithm to be used for the entire call.
Args:
compression: An element of grpc.compression, e.g.
grpc.compression.Gzip.
"""
raise NotImplementedError()
@abc.abstractmethod
def send_initial_metadata(self, initial_metadata):
"""Sends the initial metadata value to the client.
This method need not be called by implementations if they have no
metadata to add to what the gRPC runtime will transmit.
Args:
initial_metadata: The initial :term:`metadata`.
"""
raise NotImplementedError()
@abc.abstractmethod
def set_trailing_metadata(self, trailing_metadata):
"""Sets the trailing metadata for the RPC.
Sets the trailing metadata to be sent upon completion of the RPC.
If this method is invoked multiple times throughout the lifetime of an
RPC, the value supplied in the final invocation will be the value sent
over the wire.
This method need not be called by implementations if they have no
metadata to add to what the gRPC runtime will transmit.
Args:
trailing_metadata: The trailing :term:`metadata`.
"""
raise NotImplementedError()
def trailing_metadata(self):
"""Access value to be used as trailing metadata upon RPC completion.
This is an EXPERIMENTAL API.
Returns:
The trailing :term:`metadata` for the RPC.
"""
raise NotImplementedError()
@abc.abstractmethod
def abort(self, code, details):
"""Raises an exception to terminate the RPC with a non-OK status.
The code and details passed as arguments will supercede any existing
ones.
Args:
code: A StatusCode object to be sent to the client.
It must not be StatusCode.OK.
details: A UTF-8-encodable string to be sent to the client upon
termination of the RPC.
Raises:
Exception: An exception is always raised to signal the abortion the
RPC to the gRPC runtime.
"""
raise NotImplementedError()
@abc.abstractmethod
def abort_with_status(self, status):
"""Raises an exception to terminate the RPC with a non-OK status.
The status passed as argument will supercede any existing status code,
status message and trailing metadata.
This is an EXPERIMENTAL API.
Args:
status: A grpc.Status object. The status code in it must not be
StatusCode.OK.
Raises:
Exception: An exception is always raised to signal the abortion the
RPC to the gRPC runtime.
"""
raise NotImplementedError()
@abc.abstractmethod
def set_code(self, code):
"""Sets the value to be used as status code upon RPC completion.
This method need not be called by method implementations if they wish
the gRPC runtime to determine the status code of the RPC.
Args:
code: A StatusCode object to be sent to the client.
"""
raise NotImplementedError()
@abc.abstractmethod
def set_details(self, details):
"""Sets the value to be used as detail string upon RPC completion.
This method need not be called by method implementations if they have
no details to transmit.
Args:
details: A UTF-8-encodable string to be sent to the client upon
termination of the RPC.
"""
raise NotImplementedError()
def code(self):
"""Accesses the value to be used as status code upon RPC completion.
This is an EXPERIMENTAL API.
Returns:
The StatusCode value for the RPC.
"""
raise NotImplementedError()
def details(self):
"""Accesses the value to be used as detail string upon RPC completion.
This is an EXPERIMENTAL API.
Returns:
The details string of the RPC.
"""
raise NotImplementedError()
def disable_next_message_compression(self):
"""Disables compression for the next response message.
This method will override any compression configuration set during
server creation or set on the call.
"""
raise NotImplementedError()
##################### Service-Side Handler Interfaces ########################
class RpcMethodHandler(abc.ABC):
"""An implementation of a single RPC method.
Attributes:
request_streaming: Whether the RPC supports exactly one request message
or any arbitrary number of request messages.
response_streaming: Whether the RPC supports exactly one response message
or any arbitrary number of response messages.
request_deserializer: A callable :term:`deserializer` that accepts a byte string and
returns an object suitable to be passed to this object's business
logic, or None to indicate that this object's business logic should be
passed the raw request bytes.
response_serializer: A callable :term:`serializer` that accepts an object produced
by this object's business logic and returns a byte string, or None to
indicate that the byte strings produced by this object's business logic
should be transmitted on the wire as they are.
unary_unary: This object's application-specific business logic as a
callable value that takes a request value and a ServicerContext object
and returns a response value. Only non-None if both request_streaming
and response_streaming are False.
unary_stream: This object's application-specific business logic as a
callable value that takes a request value and a ServicerContext object
and returns an iterator of response values. Only non-None if
request_streaming is False and response_streaming is True.
stream_unary: This object's application-specific business logic as a
callable value that takes an iterator of request values and a
ServicerContext object and returns a response value. Only non-None if
request_streaming is True and response_streaming is False.
stream_stream: This object's application-specific business logic as a
callable value that takes an iterator of request values and a
ServicerContext object and returns an iterator of response values.
Only non-None if request_streaming and response_streaming are both
True.
"""
class HandlerCallDetails(abc.ABC):
"""Describes an RPC that has just arrived for service.
Attributes:
method: The method name of the RPC.
invocation_metadata: The :term:`metadata` sent by the client.
"""
class GenericRpcHandler(abc.ABC):
"""An implementation of arbitrarily many RPC methods."""
@abc.abstractmethod
def service(self, handler_call_details):
"""Returns the handler for servicing the RPC.
Args:
handler_call_details: A HandlerCallDetails describing the RPC.
Returns:
An RpcMethodHandler with which the RPC may be serviced if the
implementation chooses to service this RPC, or None otherwise.
"""
raise NotImplementedError()
class ServiceRpcHandler(GenericRpcHandler, metaclass=abc.ABCMeta):
"""An implementation of RPC methods belonging to a service.
A service handles RPC methods with structured names of the form
'/Service.Name/Service.Method', where 'Service.Name' is the value
returned by service_name(), and 'Service.Method' is the method
name. A service can have multiple method names, but only a single
service name.
"""
@abc.abstractmethod
def service_name(self):
"""Returns this service's name.
Returns:
The service name.
"""
raise NotImplementedError()
#################### Service-Side Interceptor Interfaces #####################
class ServerInterceptor(abc.ABC):
"""Affords intercepting incoming RPCs on the service-side."""
@abc.abstractmethod
def intercept_service(self, continuation, handler_call_details):
"""Intercepts incoming RPCs before handing them over to a handler.
Args:
continuation: A function that takes a HandlerCallDetails and
proceeds to invoke the next interceptor in the chain, if any,
or the RPC handler lookup logic, with the call details passed
as an argument, and returns an RpcMethodHandler instance if
the RPC is considered serviced, or None otherwise.
handler_call_details: A HandlerCallDetails describing the RPC.
Returns:
An RpcMethodHandler with which the RPC may be serviced if the
interceptor chooses to service this RPC, or None otherwise.
"""
raise NotImplementedError()
############################# Server Interface ###############################
class Server(abc.ABC):
"""Services RPCs."""
@abc.abstractmethod
def add_generic_rpc_handlers(self, generic_rpc_handlers):
"""Registers GenericRpcHandlers with this Server.
This method is only safe to call before the server is started.
Args:
generic_rpc_handlers: An iterable of GenericRpcHandlers that will be
used to service RPCs.
"""
raise NotImplementedError()
@abc.abstractmethod
def add_insecure_port(self, address):
"""Opens an insecure port for accepting RPCs.
This method may only be called before starting the server.
Args:
address: The address for which to open a port. If the port is 0,
or not specified in the address, then gRPC runtime will choose a port.
Returns:
An integer port on which server will accept RPC requests.
"""
raise NotImplementedError()
@abc.abstractmethod
def add_secure_port(self, address, server_credentials):
"""Opens a secure port for accepting RPCs.
This method may only be called before starting the server.
Args:
address: The address for which to open a port.
if the port is 0, or not specified in the address, then gRPC
runtime will choose a port.
server_credentials: A ServerCredentials object.
Returns:
An integer port on which server will accept RPC requests.
"""
raise NotImplementedError()
@abc.abstractmethod
def start(self):
"""Starts this Server.
This method may only be called once. (i.e. it is not idempotent).
"""
raise NotImplementedError()
@abc.abstractmethod
def stop(self, grace):
"""Stops this Server.
This method immediately stop service of new RPCs in all cases.
If a grace period is specified, this method returns immediately
and all RPCs active at the end of the grace period are aborted.
If a grace period is not specified (by passing None for `grace`),
all existing RPCs are aborted immediately and this method
blocks until the last RPC handler terminates.
This method is idempotent and may be called at any time.
Passing a smaller grace value in a subsequent call will have
the effect of stopping the Server sooner (passing None will
have the effect of stopping the server immediately). Passing
a larger grace value in a subsequent call *will not* have the
effect of stopping the server later (i.e. the most restrictive
grace value is used).
Args:
grace: A duration of time in seconds or None.
Returns:
A threading.Event that will be set when this Server has completely
stopped, i.e. when running RPCs either complete or are aborted and
all handlers have terminated.
"""
raise NotImplementedError()
def wait_for_termination(self, timeout=None):
"""Block current thread until the server stops.
This is an EXPERIMENTAL API.
The wait will not consume computational resources during blocking, and
it will block until one of the two following conditions are met:
1) The server is stopped or terminated;
2) A timeout occurs if timeout is not `None`.
The timeout argument works in the same way as `threading.Event.wait()`.
https://docs.python.org/3/library/threading.html#threading.Event.wait
Args:
timeout: A floating point number specifying a timeout for the
operation in seconds.
Returns:
A bool indicates if the operation times out.
"""
raise NotImplementedError()
################################# Functions ################################
def unary_unary_rpc_method_handler(
behavior, request_deserializer=None, response_serializer=None
):
"""Creates an RpcMethodHandler for a unary-unary RPC method.
Args:
behavior: The implementation of an RPC that accepts one request
and returns one response.
request_deserializer: An optional :term:`deserializer` for request deserialization.
response_serializer: An optional :term:`serializer` for response serialization.
Returns:
An RpcMethodHandler object that is typically used by grpc.Server.
"""
from grpc import _utilities # pylint: disable=cyclic-import
return _utilities.RpcMethodHandler(
False,
False,
request_deserializer,
response_serializer,
behavior,
None,
None,
None,
)
def unary_stream_rpc_method_handler(
behavior, request_deserializer=None, response_serializer=None
):
"""Creates an RpcMethodHandler for a unary-stream RPC method.
Args:
behavior: The implementation of an RPC that accepts one request
and returns an iterator of response values.
request_deserializer: An optional :term:`deserializer` for request deserialization.
response_serializer: An optional :term:`serializer` for response serialization.
Returns:
An RpcMethodHandler object that is typically used by grpc.Server.
"""
from grpc import _utilities # pylint: disable=cyclic-import
return _utilities.RpcMethodHandler(
False,
True,
request_deserializer,
response_serializer,
None,
behavior,
None,
None,
)
def stream_unary_rpc_method_handler(
behavior, request_deserializer=None, response_serializer=None
):
"""Creates an RpcMethodHandler for a stream-unary RPC method.
Args:
behavior: The implementation of an RPC that accepts an iterator of
request values and returns a single response value.
request_deserializer: An optional :term:`deserializer` for request deserialization.
response_serializer: An optional :term:`serializer` for response serialization.
Returns:
An RpcMethodHandler object that is typically used by grpc.Server.
"""
from grpc import _utilities # pylint: disable=cyclic-import
return _utilities.RpcMethodHandler(
True,
False,
request_deserializer,
response_serializer,
None,
None,
behavior,
None,
)
def stream_stream_rpc_method_handler(
behavior, request_deserializer=None, response_serializer=None
):
"""Creates an RpcMethodHandler for a stream-stream RPC method.
Args:
behavior: The implementation of an RPC that accepts an iterator of
request values and returns an iterator of response values.
request_deserializer: An optional :term:`deserializer` for request deserialization.
response_serializer: An optional :term:`serializer` for response serialization.
Returns:
An RpcMethodHandler object that is typically used by grpc.Server.
"""
from grpc import _utilities # pylint: disable=cyclic-import
return _utilities.RpcMethodHandler(
True,
True,
request_deserializer,
response_serializer,
None,
None,
None,
behavior,
)
def method_handlers_generic_handler(service, method_handlers):
"""Creates a GenericRpcHandler from RpcMethodHandlers.
Args:
service: The name of the service that is implemented by the
method_handlers.
method_handlers: A dictionary that maps method names to corresponding
RpcMethodHandler.
Returns:
A GenericRpcHandler. This is typically added to the grpc.Server object
with add_generic_rpc_handlers() before starting the server.
"""
from grpc import _utilities # pylint: disable=cyclic-import
return _utilities.DictionaryGenericHandler(service, method_handlers)
def ssl_channel_credentials(
root_certificates=None, private_key=None, certificate_chain=None
):
"""Creates a ChannelCredentials for use with an SSL-enabled Channel.
Args:
root_certificates: The PEM-encoded root certificates as a byte string,
or None to retrieve them from a default location chosen by gRPC
runtime.
private_key: The PEM-encoded private key as a byte string, or None if no
private key should be used.
certificate_chain: The PEM-encoded certificate chain as a byte string
to use or None if no certificate chain should be used.
Returns:
A ChannelCredentials for use with an SSL-enabled Channel.
"""
return ChannelCredentials(
_cygrpc.SSLChannelCredentials(
root_certificates, private_key, certificate_chain
)
)
def xds_channel_credentials(fallback_credentials=None):
"""Creates a ChannelCredentials for use with xDS. This is an EXPERIMENTAL
API.
Args:
fallback_credentials: Credentials to use in case it is not possible to
establish a secure connection via xDS. If no fallback_credentials
argument is supplied, a default SSLChannelCredentials is used.
"""
fallback_credentials = (
ssl_channel_credentials()
if fallback_credentials is None
else fallback_credentials
)
return ChannelCredentials(
_cygrpc.XDSChannelCredentials(fallback_credentials._credentials)
)
def metadata_call_credentials(metadata_plugin, name=None):
"""Construct CallCredentials from an AuthMetadataPlugin.
Args:
metadata_plugin: An AuthMetadataPlugin to use for authentication.
name: An optional name for the plugin.
Returns:
A CallCredentials.
"""
from grpc import _plugin_wrapping # pylint: disable=cyclic-import
return _plugin_wrapping.metadata_plugin_call_credentials(
metadata_plugin, name
)
def access_token_call_credentials(access_token):
"""Construct CallCredentials from an access token.
Args:
access_token: A string to place directly in the http request
authorization header, for example
"authorization: Bearer <access_token>".
Returns:
A CallCredentials.
"""
from grpc import _auth # pylint: disable=cyclic-import
from grpc import _plugin_wrapping # pylint: disable=cyclic-import
return _plugin_wrapping.metadata_plugin_call_credentials(
_auth.AccessTokenAuthMetadataPlugin(access_token), None
)
def composite_call_credentials(*call_credentials):
"""Compose multiple CallCredentials to make a new CallCredentials.
Args:
*call_credentials: At least two CallCredentials objects.
Returns:
A CallCredentials object composed of the given CallCredentials objects.
"""
return CallCredentials(
_cygrpc.CompositeCallCredentials(
tuple(
single_call_credentials._credentials
for single_call_credentials in call_credentials
)
)
)
def composite_channel_credentials(channel_credentials, *call_credentials):
"""Compose a ChannelCredentials and one or more CallCredentials objects.
Args:
channel_credentials: A ChannelCredentials object.
*call_credentials: One or more CallCredentials objects.
Returns:
A ChannelCredentials composed of the given ChannelCredentials and
CallCredentials objects.
"""
return ChannelCredentials(
_cygrpc.CompositeChannelCredentials(
tuple(
single_call_credentials._credentials
for single_call_credentials in call_credentials
),
channel_credentials._credentials,
)
)
def ssl_server_credentials(
private_key_certificate_chain_pairs,
root_certificates=None,
require_client_auth=False,
):
"""Creates a ServerCredentials for use with an SSL-enabled Server.
Args:
private_key_certificate_chain_pairs: A list of pairs of the form
[PEM-encoded private key, PEM-encoded certificate chain].
root_certificates: An optional byte string of PEM-encoded client root
certificates that the server will use to verify client authentication.
If omitted, require_client_auth must also be False.
require_client_auth: A boolean indicating whether or not to require
clients to be authenticated. May only be True if root_certificates
is not None.
Returns:
A ServerCredentials for use with an SSL-enabled Server. Typically, this
object is an argument to add_secure_port() method during server setup.
"""
if not private_key_certificate_chain_pairs:
raise ValueError(
"At least one private key-certificate chain pair is required!"
)
elif require_client_auth and root_certificates is None:
raise ValueError(
"Illegal to require client auth without providing root"
" certificates!"
)
else:
return ServerCredentials(
_cygrpc.server_credentials_ssl(
root_certificates,
[
_cygrpc.SslPemKeyCertPair(key, pem)
for key, pem in private_key_certificate_chain_pairs
],
require_client_auth,
)
)
def xds_server_credentials(fallback_credentials):
"""Creates a ServerCredentials for use with xDS. This is an EXPERIMENTAL
API.
Args:
fallback_credentials: Credentials to use in case it is not possible to
establish a secure connection via xDS. No default value is provided.
"""
return ServerCredentials(
_cygrpc.xds_server_credentials(fallback_credentials._credentials)
)
def insecure_server_credentials():
"""Creates a credentials object directing the server to use no credentials.
This is an EXPERIMENTAL API.
This object cannot be used directly in a call to `add_secure_port`.
Instead, it should be used to construct other credentials objects, e.g.
with xds_server_credentials.
"""
return ServerCredentials(_cygrpc.insecure_server_credentials())
def ssl_server_certificate_configuration(
private_key_certificate_chain_pairs, root_certificates=None
):
"""Creates a ServerCertificateConfiguration for use with a Server.
Args:
private_key_certificate_chain_pairs: A collection of pairs of
the form [PEM-encoded private key, PEM-encoded certificate
chain].
root_certificates: An optional byte string of PEM-encoded client root
certificates that the server will use to verify client authentication.
Returns:
A ServerCertificateConfiguration that can be returned in the certificate
configuration fetching callback.
"""
if private_key_certificate_chain_pairs:
return ServerCertificateConfiguration(
_cygrpc.server_certificate_config_ssl(
root_certificates,
[
_cygrpc.SslPemKeyCertPair(key, pem)
for key, pem in private_key_certificate_chain_pairs
],
)
)
else:
raise ValueError(
"At least one private key-certificate chain pair is required!"
)
def dynamic_ssl_server_credentials(
initial_certificate_configuration,
certificate_configuration_fetcher,
require_client_authentication=False,
):
"""Creates a ServerCredentials for use with an SSL-enabled Server.
Args:
initial_certificate_configuration (ServerCertificateConfiguration): The
certificate configuration with which the server will be initialized.
certificate_configuration_fetcher (callable): A callable that takes no
arguments and should return a ServerCertificateConfiguration to
replace the server's current certificate, or None for no change
(i.e., the server will continue its current certificate
config). The library will call this callback on *every* new
client connection before starting the TLS handshake with the
client, thus allowing the user application to optionally
return a new ServerCertificateConfiguration that the server will then
use for the handshake.
require_client_authentication: A boolean indicating whether or not to
require clients to be authenticated.
Returns:
A ServerCredentials.
"""
return ServerCredentials(
_cygrpc.server_credentials_ssl_dynamic_cert_config(
initial_certificate_configuration,
certificate_configuration_fetcher,
require_client_authentication,
)
)
@enum.unique
class LocalConnectionType(enum.Enum):
"""Types of local connection for local credential creation.
Attributes:
UDS: Unix domain socket connections
LOCAL_TCP: Local TCP connections.
"""
UDS = _cygrpc.LocalConnectionType.uds
LOCAL_TCP = _cygrpc.LocalConnectionType.local_tcp
def local_channel_credentials(local_connect_type=LocalConnectionType.LOCAL_TCP):
"""Creates a local ChannelCredentials used for local connections.
This is an EXPERIMENTAL API.
Local credentials are used by local TCP endpoints (e.g. localhost:10000)
also UDS connections.
The connections created by local channel credentials are not
encrypted, but will be checked if they are local or not.
The UDS connections are considered secure by providing peer authentication
and data confidentiality while TCP connections are considered insecure.
It is allowed to transmit call credentials over connections created by
local channel credentials.
Local channel credentials are useful for 1) eliminating insecure_channel usage;
2) enable unit testing for call credentials without setting up secrets.
Args:
local_connect_type: Local connection type (either
grpc.LocalConnectionType.UDS or grpc.LocalConnectionType.LOCAL_TCP)
Returns:
A ChannelCredentials for use with a local Channel
"""
return ChannelCredentials(
_cygrpc.channel_credentials_local(local_connect_type.value)
)
def local_server_credentials(local_connect_type=LocalConnectionType.LOCAL_TCP):
"""Creates a local ServerCredentials used for local connections.
This is an EXPERIMENTAL API.
Local credentials are used by local TCP endpoints (e.g. localhost:10000)
also UDS connections.
The connections created by local server credentials are not
encrypted, but will be checked if they are local or not.
The UDS connections are considered secure by providing peer authentication
and data confidentiality while TCP connections are considered insecure.
It is allowed to transmit call credentials over connections created by local
server credentials.
Local server credentials are useful for 1) eliminating insecure_channel usage;
2) enable unit testing for call credentials without setting up secrets.
Args:
local_connect_type: Local connection type (either
grpc.LocalConnectionType.UDS or grpc.LocalConnectionType.LOCAL_TCP)
Returns:
A ServerCredentials for use with a local Server
"""
return ServerCredentials(
_cygrpc.server_credentials_local(local_connect_type.value)
)
def alts_channel_credentials(service_accounts=None):
"""Creates a ChannelCredentials for use with an ALTS-enabled Channel.
This is an EXPERIMENTAL API.
ALTS credentials API can only be used in GCP environment as it relies on
handshaker service being available. For more info about ALTS see
https://cloud.google.com/security/encryption-in-transit/application-layer-transport-security
Args:
service_accounts: A list of server identities accepted by the client.
If target service accounts are provided and none of them matches the
peer identity of the server, handshake will fail. The arg can be empty
if the client does not have any information about trusted server
identity.
Returns:
A ChannelCredentials for use with an ALTS-enabled Channel
"""
return ChannelCredentials(
_cygrpc.channel_credentials_alts(service_accounts or [])
)
def alts_server_credentials():
"""Creates a ServerCredentials for use with an ALTS-enabled connection.
This is an EXPERIMENTAL API.
ALTS credentials API can only be used in GCP environment as it relies on
handshaker service being available. For more info about ALTS see
https://cloud.google.com/security/encryption-in-transit/application-layer-transport-security
Returns:
A ServerCredentials for use with an ALTS-enabled Server
"""
return ServerCredentials(_cygrpc.server_credentials_alts())
def compute_engine_channel_credentials(call_credentials):
"""Creates a compute engine channel credential.
This credential can only be used in a GCP environment as it relies on
a handshaker service. For more info about ALTS, see
https://cloud.google.com/security/encryption-in-transit/application-layer-transport-security
This channel credential is expected to be used as part of a composite
credential in conjunction with a call credentials that authenticates the
VM's default service account. If used with any other sort of call
credential, the connection may suddenly and unexpectedly begin failing RPCs.
"""
return ChannelCredentials(
_cygrpc.channel_credentials_compute_engine(
call_credentials._credentials
)
)
def channel_ready_future(channel):
"""Creates a Future that tracks when a Channel is ready.
Cancelling the Future does not affect the channel's state machine.
It merely decouples the Future from channel state machine.
Args:
channel: A Channel object.
Returns:
A Future object that matures when the channel connectivity is
ChannelConnectivity.READY.
"""
from grpc import _utilities # pylint: disable=cyclic-import
return _utilities.channel_ready_future(channel)
def insecure_channel(target, options=None, compression=None):
"""Creates an insecure Channel to a server.
The returned Channel is thread-safe.
Args:
target: The server address
options: An optional list of key-value pairs (:term:`channel_arguments`
in gRPC Core runtime) to configure the channel.
compression: An optional value indicating the compression method to be
used over the lifetime of the channel.
Returns:
A Channel.
"""
from grpc import _channel # pylint: disable=cyclic-import
return _channel.Channel(
target, () if options is None else options, None, compression
)
def secure_channel(target, credentials, options=None, compression=None):
"""Creates a secure Channel to a server.
The returned Channel is thread-safe.
Args:
target: The server address.
credentials: A ChannelCredentials instance.
options: An optional list of key-value pairs (:term:`channel_arguments`
in gRPC Core runtime) to configure the channel.
compression: An optional value indicating the compression method to be
used over the lifetime of the channel.
Returns:
A Channel.
"""
from grpc import _channel # pylint: disable=cyclic-import
from grpc.experimental import _insecure_channel_credentials
if credentials._credentials is _insecure_channel_credentials:
raise ValueError(
"secure_channel cannot be called with insecure credentials."
+ " Call insecure_channel instead."
)
return _channel.Channel(
target,
() if options is None else options,
credentials._credentials,
compression,
)
def intercept_channel(channel, *interceptors):
"""Intercepts a channel through a set of interceptors.
Args:
channel: A Channel.
interceptors: Zero or more objects of type
UnaryUnaryClientInterceptor,
UnaryStreamClientInterceptor,
StreamUnaryClientInterceptor, or
StreamStreamClientInterceptor.
Interceptors are given control in the order they are listed.
Returns:
A Channel that intercepts each invocation via the provided interceptors.
Raises:
TypeError: If interceptor does not derive from any of
UnaryUnaryClientInterceptor,
UnaryStreamClientInterceptor,
StreamUnaryClientInterceptor, or
StreamStreamClientInterceptor.
"""
from grpc import _interceptor # pylint: disable=cyclic-import
return _interceptor.intercept_channel(channel, *interceptors)
def server(
thread_pool,
handlers=None,
interceptors=None,
options=None,
maximum_concurrent_rpcs=None,
compression=None,
xds=False,
):
"""Creates a Server with which RPCs can be serviced.
Args:
thread_pool: A futures.ThreadPoolExecutor to be used by the Server
to execute RPC handlers.
handlers: An optional list of GenericRpcHandlers used for executing RPCs.
More handlers may be added by calling add_generic_rpc_handlers any time
before the server is started.
interceptors: An optional list of ServerInterceptor objects that observe
and optionally manipulate the incoming RPCs before handing them over to
handlers. The interceptors are given control in the order they are
specified. This is an EXPERIMENTAL API.
options: An optional list of key-value pairs (:term:`channel_arguments` in gRPC runtime)
to configure the channel.
maximum_concurrent_rpcs: The maximum number of concurrent RPCs this server
will service before returning RESOURCE_EXHAUSTED status, or None to
indicate no limit.
compression: An element of grpc.compression, e.g.
grpc.compression.Gzip. This compression algorithm will be used for the
lifetime of the server unless overridden.
xds: If set to true, retrieves server configuration via xDS. This is an
EXPERIMENTAL option.
Returns:
A Server object.
"""
from grpc import _server # pylint: disable=cyclic-import
return _server.create_server(
thread_pool,
() if handlers is None else handlers,
() if interceptors is None else interceptors,
() if options is None else options,
maximum_concurrent_rpcs,
compression,
xds,
)
@contextlib.contextmanager
def _create_servicer_context(rpc_event, state, request_deserializer):
from grpc import _server # pylint: disable=cyclic-import
context = _server._Context(rpc_event, state, request_deserializer)
yield context
context._finalize_state() # pylint: disable=protected-access
@enum.unique
class Compression(enum.IntEnum):
"""Indicates the compression method to be used for an RPC.
Attributes:
NoCompression: Do not use compression algorithm.
Deflate: Use "Deflate" compression algorithm.
Gzip: Use "Gzip" compression algorithm.
"""
NoCompression = _compression.NoCompression
Deflate = _compression.Deflate
Gzip = _compression.Gzip
################################### __all__ #################################
__all__ = (
"FutureTimeoutError",
"FutureCancelledError",
"Future",
"ChannelConnectivity",
"StatusCode",
"Status",
"RpcError",
"RpcContext",
"Call",
"ChannelCredentials",
"CallCredentials",
"AuthMetadataContext",
"AuthMetadataPluginCallback",
"AuthMetadataPlugin",
"Compression",
"ClientCallDetails",
"ServerCertificateConfiguration",
"ServerCredentials",
"LocalConnectionType",
"UnaryUnaryMultiCallable",
"UnaryStreamMultiCallable",
"StreamUnaryMultiCallable",
"StreamStreamMultiCallable",
"UnaryUnaryClientInterceptor",
"UnaryStreamClientInterceptor",
"StreamUnaryClientInterceptor",
"StreamStreamClientInterceptor",
"Channel",
"ServicerContext",
"RpcMethodHandler",
"HandlerCallDetails",
"GenericRpcHandler",
"ServiceRpcHandler",
"Server",
"ServerInterceptor",
"unary_unary_rpc_method_handler",
"unary_stream_rpc_method_handler",
"stream_unary_rpc_method_handler",
"stream_stream_rpc_method_handler",
"method_handlers_generic_handler",
"ssl_channel_credentials",
"metadata_call_credentials",
"access_token_call_credentials",
"composite_call_credentials",
"composite_channel_credentials",
"compute_engine_channel_credentials",
"local_channel_credentials",
"local_server_credentials",
"alts_channel_credentials",
"alts_server_credentials",
"ssl_server_credentials",
"ssl_server_certificate_configuration",
"dynamic_ssl_server_credentials",
"channel_ready_future",
"insecure_channel",
"secure_channel",
"intercept_channel",
"server",
"protos",
"services",
"protos_and_services",
"xds_channel_credentials",
"xds_server_credentials",
"insecure_server_credentials",
)
############################### Extension Shims ################################
# Here to maintain backwards compatibility; avoid using these in new code!
try:
import grpc_tools
sys.modules.update({"grpc.tools": grpc_tools})
except ImportError:
pass
try:
import grpc_health
sys.modules.update({"grpc.health": grpc_health})
except ImportError:
pass
try:
import grpc_reflection
sys.modules.update({"grpc.reflection": grpc_reflection})
except ImportError:
pass
# Prevents import order issue in the case of renamed path.
if sys.version_info >= (3, 6) and __name__ == "grpc":
from grpc import aio # pylint: disable=ungrouped-imports
sys.modules.update({"grpc.aio": aio})
| 80,646
| 34.018237
| 96
|
py
|
grpc
|
grpc-master/src/python/grpcio/grpc/_typing.py
|
# Copyright 2022 gRPC authors.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Common types for gRPC Sync API"""
from typing import (
TYPE_CHECKING,
Any,
Callable,
Iterable,
Iterator,
Optional,
Sequence,
Tuple,
TypeVar,
Union,
)
from grpc._cython import cygrpc
if TYPE_CHECKING:
from grpc import ServicerContext
from grpc._server import _RPCState
RequestType = TypeVar("RequestType")
ResponseType = TypeVar("ResponseType")
SerializingFunction = Callable[[Any], bytes]
DeserializingFunction = Callable[[bytes], Any]
MetadataType = Sequence[Tuple[str, Union[str, bytes]]]
ChannelArgumentType = Tuple[str, Any]
DoneCallbackType = Callable[[Any], None]
NullaryCallbackType = Callable[[], None]
RequestIterableType = Iterable[Any]
ResponseIterableType = Iterable[Any]
UserTag = Callable[[cygrpc.BaseEvent], bool]
IntegratedCallFactory = Callable[
[
int,
bytes,
None,
Optional[float],
Optional[MetadataType],
Optional[cygrpc.CallCredentials],
Sequence[Sequence[cygrpc.Operation]],
UserTag,
Any,
],
cygrpc.IntegratedCall,
]
ServerTagCallbackType = Tuple[
Optional["_RPCState"], Sequence[NullaryCallbackType]
]
ServerCallbackTag = Callable[[cygrpc.BaseEvent], ServerTagCallbackType]
ArityAgnosticMethodHandler = Union[
Callable[
[RequestType, "ServicerContext", Callable[[ResponseType], None]],
ResponseType,
],
Callable[
[RequestType, "ServicerContext", Callable[[ResponseType], None]],
Iterator[ResponseType],
],
Callable[
[
Iterator[RequestType],
"ServicerContext",
Callable[[ResponseType], None],
],
ResponseType,
],
Callable[
[
Iterator[RequestType],
"ServicerContext",
Callable[[ResponseType], None],
],
Iterator[ResponseType],
],
Callable[[RequestType, "ServicerContext"], ResponseType],
Callable[[RequestType, "ServicerContext"], Iterator[ResponseType]],
Callable[[Iterator[RequestType], "ServicerContext"], ResponseType],
Callable[
[Iterator[RequestType], "ServicerContext"], Iterator[ResponseType]
],
]
| 2,758
| 27.739583
| 74
|
py
|
grpc
|
grpc-master/src/python/grpcio/grpc/_auth.py
|
# Copyright 2016 gRPC authors.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""GRPCAuthMetadataPlugins for standard authentication."""
import inspect
from typing import Any, Optional
import grpc
def _sign_request(
callback: grpc.AuthMetadataPluginCallback,
token: Optional[str],
error: Optional[Exception],
):
metadata = (("authorization", "Bearer {}".format(token)),)
callback(metadata, error)
class GoogleCallCredentials(grpc.AuthMetadataPlugin):
"""Metadata wrapper for GoogleCredentials from the oauth2client library."""
_is_jwt: bool
_credentials: Any
# TODO(xuanwn): Give credentials an actual type.
def __init__(self, credentials: Any):
self._credentials = credentials
# Hack to determine if these are JWT creds and we need to pass
# additional_claims when getting a token
self._is_jwt = (
"additional_claims"
in inspect.getfullargspec(credentials.get_access_token).args
)
def __call__(
self,
context: grpc.AuthMetadataContext,
callback: grpc.AuthMetadataPluginCallback,
):
try:
if self._is_jwt:
access_token = self._credentials.get_access_token(
additional_claims={
"aud": context.service_url # pytype: disable=attribute-error
}
).access_token
else:
access_token = self._credentials.get_access_token().access_token
except Exception as exception: # pylint: disable=broad-except
_sign_request(callback, None, exception)
else:
_sign_request(callback, access_token, None)
class AccessTokenAuthMetadataPlugin(grpc.AuthMetadataPlugin):
"""Metadata wrapper for raw access token credentials."""
_access_token: str
def __init__(self, access_token: str):
self._access_token = access_token
def __call__(
self,
context: grpc.AuthMetadataContext,
callback: grpc.AuthMetadataPluginCallback,
):
_sign_request(callback, self._access_token, None)
| 2,635
| 31.54321
| 85
|
py
|
grpc
|
grpc-master/src/python/grpcio/grpc/_common.py
|
# Copyright 2016 gRPC authors.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Shared implementation."""
import logging
import time
from typing import Any, AnyStr, Callable, Optional, Union
import grpc
from grpc._cython import cygrpc
from grpc._typing import DeserializingFunction
from grpc._typing import SerializingFunction
_LOGGER = logging.getLogger(__name__)
CYGRPC_CONNECTIVITY_STATE_TO_CHANNEL_CONNECTIVITY = {
cygrpc.ConnectivityState.idle: grpc.ChannelConnectivity.IDLE,
cygrpc.ConnectivityState.connecting: grpc.ChannelConnectivity.CONNECTING,
cygrpc.ConnectivityState.ready: grpc.ChannelConnectivity.READY,
cygrpc.ConnectivityState.transient_failure: grpc.ChannelConnectivity.TRANSIENT_FAILURE,
cygrpc.ConnectivityState.shutdown: grpc.ChannelConnectivity.SHUTDOWN,
}
CYGRPC_STATUS_CODE_TO_STATUS_CODE = {
cygrpc.StatusCode.ok: grpc.StatusCode.OK,
cygrpc.StatusCode.cancelled: grpc.StatusCode.CANCELLED,
cygrpc.StatusCode.unknown: grpc.StatusCode.UNKNOWN,
cygrpc.StatusCode.invalid_argument: grpc.StatusCode.INVALID_ARGUMENT,
cygrpc.StatusCode.deadline_exceeded: grpc.StatusCode.DEADLINE_EXCEEDED,
cygrpc.StatusCode.not_found: grpc.StatusCode.NOT_FOUND,
cygrpc.StatusCode.already_exists: grpc.StatusCode.ALREADY_EXISTS,
cygrpc.StatusCode.permission_denied: grpc.StatusCode.PERMISSION_DENIED,
cygrpc.StatusCode.unauthenticated: grpc.StatusCode.UNAUTHENTICATED,
cygrpc.StatusCode.resource_exhausted: grpc.StatusCode.RESOURCE_EXHAUSTED,
cygrpc.StatusCode.failed_precondition: grpc.StatusCode.FAILED_PRECONDITION,
cygrpc.StatusCode.aborted: grpc.StatusCode.ABORTED,
cygrpc.StatusCode.out_of_range: grpc.StatusCode.OUT_OF_RANGE,
cygrpc.StatusCode.unimplemented: grpc.StatusCode.UNIMPLEMENTED,
cygrpc.StatusCode.internal: grpc.StatusCode.INTERNAL,
cygrpc.StatusCode.unavailable: grpc.StatusCode.UNAVAILABLE,
cygrpc.StatusCode.data_loss: grpc.StatusCode.DATA_LOSS,
}
STATUS_CODE_TO_CYGRPC_STATUS_CODE = {
grpc_code: cygrpc_code
for cygrpc_code, grpc_code in CYGRPC_STATUS_CODE_TO_STATUS_CODE.items()
}
MAXIMUM_WAIT_TIMEOUT = 0.1
_ERROR_MESSAGE_PORT_BINDING_FAILED = (
"Failed to bind to address %s; set "
"GRPC_VERBOSITY=debug environment variable to see detailed error message."
)
def encode(s: AnyStr) -> bytes:
if isinstance(s, bytes):
return s
else:
return s.encode("utf8")
def decode(b: AnyStr) -> str:
if isinstance(b, bytes):
return b.decode("utf-8", "replace")
return b
def _transform(
message: Any,
transformer: Union[SerializingFunction, DeserializingFunction, None],
exception_message: str,
) -> Any:
if transformer is None:
return message
else:
try:
return transformer(message)
except Exception: # pylint: disable=broad-except
_LOGGER.exception(exception_message)
return None
def serialize(message: Any, serializer: Optional[SerializingFunction]) -> bytes:
return _transform(message, serializer, "Exception serializing message!")
def deserialize(
serialized_message: bytes, deserializer: Optional[DeserializingFunction]
) -> Any:
return _transform(
serialized_message, deserializer, "Exception deserializing message!"
)
def fully_qualified_method(group: str, method: str) -> str:
return "/{}/{}".format(group, method)
def _wait_once(
wait_fn: Callable[..., bool],
timeout: float,
spin_cb: Optional[Callable[[], None]],
):
wait_fn(timeout=timeout)
if spin_cb is not None:
spin_cb()
def wait(
wait_fn: Callable[..., bool],
wait_complete_fn: Callable[[], bool],
timeout: Optional[float] = None,
spin_cb: Optional[Callable[[], None]] = None,
) -> bool:
"""Blocks waiting for an event without blocking the thread indefinitely.
See https://github.com/grpc/grpc/issues/19464 for full context. CPython's
`threading.Event.wait` and `threading.Condition.wait` methods, if invoked
without a timeout kwarg, may block the calling thread indefinitely. If the
call is made from the main thread, this means that signal handlers may not
run for an arbitrarily long period of time.
This wrapper calls the supplied wait function with an arbitrary short
timeout to ensure that no signal handler has to wait longer than
MAXIMUM_WAIT_TIMEOUT before executing.
Args:
wait_fn: A callable acceptable a single float-valued kwarg named
`timeout`. This function is expected to be one of `threading.Event.wait`
or `threading.Condition.wait`.
wait_complete_fn: A callable taking no arguments and returning a bool.
When this function returns true, it indicates that waiting should cease.
timeout: An optional float-valued number of seconds after which the wait
should cease.
spin_cb: An optional Callable taking no arguments and returning nothing.
This callback will be called on each iteration of the spin. This may be
used for, e.g. work related to forking.
Returns:
True if a timeout was supplied and it was reached. False otherwise.
"""
if timeout is None:
while not wait_complete_fn():
_wait_once(wait_fn, MAXIMUM_WAIT_TIMEOUT, spin_cb)
else:
end = time.time() + timeout
while not wait_complete_fn():
remaining = min(end - time.time(), MAXIMUM_WAIT_TIMEOUT)
if remaining < 0:
return True
_wait_once(wait_fn, remaining, spin_cb)
return False
def validate_port_binding_result(address: str, port: int) -> int:
"""Validates if the port binding succeed.
If the port returned by Core is 0, the binding is failed. However, in that
case, the Core API doesn't return a detailed failing reason. The best we
can do is raising an exception to prevent further confusion.
Args:
address: The address string to be bound.
port: An int returned by core
"""
if port == 0:
# The Core API doesn't return a failure message. The best we can do
# is raising an exception to prevent further confusion.
raise RuntimeError(_ERROR_MESSAGE_PORT_BINDING_FAILED % address)
else:
return port
| 6,784
| 35.875
| 91
|
py
|
grpc
|
grpc-master/src/python/grpcio/grpc/_server.py
|
# Copyright 2016 gRPC authors.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Service-side implementation of gRPC Python."""
from __future__ import annotations
import collections
from concurrent import futures
import enum
import logging
import threading
import time
import traceback
from typing import (
Any,
Callable,
Iterable,
Iterator,
List,
Mapping,
Optional,
Sequence,
Set,
Tuple,
Union,
)
import grpc # pytype: disable=pyi-error
from grpc import _common # pytype: disable=pyi-error
from grpc import _compression # pytype: disable=pyi-error
from grpc import _interceptor # pytype: disable=pyi-error
from grpc._cython import cygrpc
from grpc._typing import ArityAgnosticMethodHandler
from grpc._typing import ChannelArgumentType
from grpc._typing import DeserializingFunction
from grpc._typing import MetadataType
from grpc._typing import NullaryCallbackType
from grpc._typing import ResponseType
from grpc._typing import SerializingFunction
from grpc._typing import ServerCallbackTag
from grpc._typing import ServerTagCallbackType
_LOGGER = logging.getLogger(__name__)
_SHUTDOWN_TAG = "shutdown"
_REQUEST_CALL_TAG = "request_call"
_RECEIVE_CLOSE_ON_SERVER_TOKEN = "receive_close_on_server"
_SEND_INITIAL_METADATA_TOKEN = "send_initial_metadata"
_RECEIVE_MESSAGE_TOKEN = "receive_message"
_SEND_MESSAGE_TOKEN = "send_message"
_SEND_INITIAL_METADATA_AND_SEND_MESSAGE_TOKEN = (
"send_initial_metadata * send_message"
)
_SEND_STATUS_FROM_SERVER_TOKEN = "send_status_from_server"
_SEND_INITIAL_METADATA_AND_SEND_STATUS_FROM_SERVER_TOKEN = (
"send_initial_metadata * send_status_from_server"
)
_OPEN = "open"
_CLOSED = "closed"
_CANCELLED = "cancelled"
_EMPTY_FLAGS = 0
_DEALLOCATED_SERVER_CHECK_PERIOD_S = 1.0
_INF_TIMEOUT = 1e9
def _serialized_request(request_event: cygrpc.BaseEvent) -> bytes:
return request_event.batch_operations[0].message()
def _application_code(code: grpc.StatusCode) -> cygrpc.StatusCode:
cygrpc_code = _common.STATUS_CODE_TO_CYGRPC_STATUS_CODE.get(code)
return cygrpc.StatusCode.unknown if cygrpc_code is None else cygrpc_code
def _completion_code(state: _RPCState) -> cygrpc.StatusCode:
if state.code is None:
return cygrpc.StatusCode.ok
else:
return _application_code(state.code)
def _abortion_code(
state: _RPCState, code: cygrpc.StatusCode
) -> cygrpc.StatusCode:
if state.code is None:
return code
else:
return _application_code(state.code)
def _details(state: _RPCState) -> bytes:
return b"" if state.details is None else state.details
class _HandlerCallDetails(
collections.namedtuple(
"_HandlerCallDetails",
(
"method",
"invocation_metadata",
),
),
grpc.HandlerCallDetails,
):
pass
class _RPCState(object):
condition: threading.Condition
due = Set[str]
request: Any
client: str
initial_metadata_allowed: bool
compression_algorithm: Optional[grpc.Compression]
disable_next_compression: bool
trailing_metadata: Optional[MetadataType]
code: Optional[grpc.StatusCode]
details: Optional[bytes]
statused: bool
rpc_errors: List[Exception]
callbacks: Optional[List[NullaryCallbackType]]
aborted: bool
def __init__(self):
self.condition = threading.Condition()
self.due = set()
self.request = None
self.client = _OPEN
self.initial_metadata_allowed = True
self.compression_algorithm = None
self.disable_next_compression = False
self.trailing_metadata = None
self.code = None
self.details = None
self.statused = False
self.rpc_errors = []
self.callbacks = []
self.aborted = False
def _raise_rpc_error(state: _RPCState) -> None:
rpc_error = grpc.RpcError()
state.rpc_errors.append(rpc_error)
raise rpc_error
def _possibly_finish_call(
state: _RPCState, token: str
) -> ServerTagCallbackType:
state.due.remove(token)
if not _is_rpc_state_active(state) and not state.due:
callbacks = state.callbacks
state.callbacks = None
return state, callbacks
else:
return None, ()
def _send_status_from_server(state: _RPCState, token: str) -> ServerCallbackTag:
def send_status_from_server(unused_send_status_from_server_event):
with state.condition:
return _possibly_finish_call(state, token)
return send_status_from_server
def _get_initial_metadata(
state: _RPCState, metadata: Optional[MetadataType]
) -> Optional[MetadataType]:
with state.condition:
if state.compression_algorithm:
compression_metadata = (
_compression.compression_algorithm_to_metadata(
state.compression_algorithm
),
)
if metadata is None:
return compression_metadata
else:
return compression_metadata + tuple(metadata)
else:
return metadata
def _get_initial_metadata_operation(
state: _RPCState, metadata: Optional[MetadataType]
) -> cygrpc.Operation:
operation = cygrpc.SendInitialMetadataOperation(
_get_initial_metadata(state, metadata), _EMPTY_FLAGS
)
return operation
def _abort(
state: _RPCState, call: cygrpc.Call, code: cygrpc.StatusCode, details: bytes
) -> None:
if state.client is not _CANCELLED:
effective_code = _abortion_code(state, code)
effective_details = details if state.details is None else state.details
if state.initial_metadata_allowed:
operations = (
_get_initial_metadata_operation(state, None),
cygrpc.SendStatusFromServerOperation(
state.trailing_metadata,
effective_code,
effective_details,
_EMPTY_FLAGS,
),
)
token = _SEND_INITIAL_METADATA_AND_SEND_STATUS_FROM_SERVER_TOKEN
else:
operations = (
cygrpc.SendStatusFromServerOperation(
state.trailing_metadata,
effective_code,
effective_details,
_EMPTY_FLAGS,
),
)
token = _SEND_STATUS_FROM_SERVER_TOKEN
call.start_server_batch(
operations, _send_status_from_server(state, token)
)
state.statused = True
state.due.add(token)
def _receive_close_on_server(state: _RPCState) -> ServerCallbackTag:
def receive_close_on_server(receive_close_on_server_event):
with state.condition:
if receive_close_on_server_event.batch_operations[0].cancelled():
state.client = _CANCELLED
elif state.client is _OPEN:
state.client = _CLOSED
state.condition.notify_all()
return _possibly_finish_call(state, _RECEIVE_CLOSE_ON_SERVER_TOKEN)
return receive_close_on_server
def _receive_message(
state: _RPCState,
call: cygrpc.Call,
request_deserializer: Optional[DeserializingFunction],
) -> ServerCallbackTag:
def receive_message(receive_message_event):
serialized_request = _serialized_request(receive_message_event)
if serialized_request is None:
with state.condition:
if state.client is _OPEN:
state.client = _CLOSED
state.condition.notify_all()
return _possibly_finish_call(state, _RECEIVE_MESSAGE_TOKEN)
else:
request = _common.deserialize(
serialized_request, request_deserializer
)
with state.condition:
if request is None:
_abort(
state,
call,
cygrpc.StatusCode.internal,
b"Exception deserializing request!",
)
else:
state.request = request
state.condition.notify_all()
return _possibly_finish_call(state, _RECEIVE_MESSAGE_TOKEN)
return receive_message
def _send_initial_metadata(state: _RPCState) -> ServerCallbackTag:
def send_initial_metadata(unused_send_initial_metadata_event):
with state.condition:
return _possibly_finish_call(state, _SEND_INITIAL_METADATA_TOKEN)
return send_initial_metadata
def _send_message(state: _RPCState, token: str) -> ServerCallbackTag:
def send_message(unused_send_message_event):
with state.condition:
state.condition.notify_all()
return _possibly_finish_call(state, token)
return send_message
class _Context(grpc.ServicerContext):
_rpc_event: cygrpc.BaseEvent
_state: _RPCState
request_deserializer: Optional[DeserializingFunction]
def __init__(
self,
rpc_event: cygrpc.BaseEvent,
state: _RPCState,
request_deserializer: Optional[DeserializingFunction],
):
self._rpc_event = rpc_event
self._state = state
self._request_deserializer = request_deserializer
def is_active(self) -> bool:
with self._state.condition:
return _is_rpc_state_active(self._state)
def time_remaining(self) -> float:
return max(self._rpc_event.call_details.deadline - time.time(), 0)
def cancel(self) -> None:
self._rpc_event.call.cancel()
def add_callback(self, callback: NullaryCallbackType) -> bool:
with self._state.condition:
if self._state.callbacks is None:
return False
else:
self._state.callbacks.append(callback)
return True
def disable_next_message_compression(self) -> None:
with self._state.condition:
self._state.disable_next_compression = True
def invocation_metadata(self) -> Optional[MetadataType]:
return self._rpc_event.invocation_metadata
def peer(self) -> str:
return _common.decode(self._rpc_event.call.peer())
def peer_identities(self) -> Optional[Sequence[bytes]]:
return cygrpc.peer_identities(self._rpc_event.call)
def peer_identity_key(self) -> Optional[str]:
id_key = cygrpc.peer_identity_key(self._rpc_event.call)
return id_key if id_key is None else _common.decode(id_key)
def auth_context(self) -> Mapping[str, Sequence[bytes]]:
auth_context = cygrpc.auth_context(self._rpc_event.call)
auth_context_dict = {} if auth_context is None else auth_context
return {
_common.decode(key): value
for key, value in auth_context_dict.items()
}
def set_compression(self, compression: grpc.Compression) -> None:
with self._state.condition:
self._state.compression_algorithm = compression
def send_initial_metadata(self, initial_metadata: MetadataType) -> None:
with self._state.condition:
if self._state.client is _CANCELLED:
_raise_rpc_error(self._state)
else:
if self._state.initial_metadata_allowed:
operation = _get_initial_metadata_operation(
self._state, initial_metadata
)
self._rpc_event.call.start_server_batch(
(operation,), _send_initial_metadata(self._state)
)
self._state.initial_metadata_allowed = False
self._state.due.add(_SEND_INITIAL_METADATA_TOKEN)
else:
raise ValueError("Initial metadata no longer allowed!")
def set_trailing_metadata(self, trailing_metadata: MetadataType) -> None:
with self._state.condition:
self._state.trailing_metadata = trailing_metadata
def trailing_metadata(self) -> Optional[MetadataType]:
return self._state.trailing_metadata
def abort(self, code: grpc.StatusCode, details: str) -> None:
# treat OK like other invalid arguments: fail the RPC
if code == grpc.StatusCode.OK:
_LOGGER.error(
"abort() called with StatusCode.OK; returning UNKNOWN"
)
code = grpc.StatusCode.UNKNOWN
details = ""
with self._state.condition:
self._state.code = code
self._state.details = _common.encode(details)
self._state.aborted = True
raise Exception()
def abort_with_status(self, status: grpc.Status) -> None:
self._state.trailing_metadata = status.trailing_metadata
self.abort(status.code, status.details)
def set_code(self, code: grpc.StatusCode) -> None:
with self._state.condition:
self._state.code = code
def code(self) -> grpc.StatusCode:
return self._state.code
def set_details(self, details: str) -> None:
with self._state.condition:
self._state.details = _common.encode(details)
def details(self) -> bytes:
return self._state.details
def _finalize_state(self) -> None:
pass
class _RequestIterator(object):
_state: _RPCState
_call: cygrpc.Call
_request_deserializer: Optional[DeserializingFunction]
def __init__(
self,
state: _RPCState,
call: cygrpc.Call,
request_deserializer: Optional[DeserializingFunction],
):
self._state = state
self._call = call
self._request_deserializer = request_deserializer
def _raise_or_start_receive_message(self) -> None:
if self._state.client is _CANCELLED:
_raise_rpc_error(self._state)
elif not _is_rpc_state_active(self._state):
raise StopIteration()
else:
self._call.start_server_batch(
(cygrpc.ReceiveMessageOperation(_EMPTY_FLAGS),),
_receive_message(
self._state, self._call, self._request_deserializer
),
)
self._state.due.add(_RECEIVE_MESSAGE_TOKEN)
def _look_for_request(self) -> Any:
if self._state.client is _CANCELLED:
_raise_rpc_error(self._state)
elif (
self._state.request is None
and _RECEIVE_MESSAGE_TOKEN not in self._state.due
):
raise StopIteration()
else:
request = self._state.request
self._state.request = None
return request
raise AssertionError() # should never run
def _next(self) -> Any:
with self._state.condition:
self._raise_or_start_receive_message()
while True:
self._state.condition.wait()
request = self._look_for_request()
if request is not None:
return request
def __iter__(self) -> _RequestIterator:
return self
def __next__(self) -> Any:
return self._next()
def next(self) -> Any:
return self._next()
def _unary_request(
rpc_event: cygrpc.BaseEvent,
state: _RPCState,
request_deserializer: Optional[DeserializingFunction],
) -> Callable[[], Any]:
def unary_request():
with state.condition:
if not _is_rpc_state_active(state):
return None
else:
rpc_event.call.start_server_batch(
(cygrpc.ReceiveMessageOperation(_EMPTY_FLAGS),),
_receive_message(
state, rpc_event.call, request_deserializer
),
)
state.due.add(_RECEIVE_MESSAGE_TOKEN)
while True:
state.condition.wait()
if state.request is None:
if state.client is _CLOSED:
details = '"{}" requires exactly one request message.'.format(
rpc_event.call_details.method
)
_abort(
state,
rpc_event.call,
cygrpc.StatusCode.unimplemented,
_common.encode(details),
)
return None
elif state.client is _CANCELLED:
return None
else:
request = state.request
state.request = None
return request
return unary_request
def _call_behavior(
rpc_event: cygrpc.BaseEvent,
state: _RPCState,
behavior: ArityAgnosticMethodHandler,
argument: Any,
request_deserializer: Optional[DeserializingFunction],
send_response_callback: Optional[Callable[[ResponseType], None]] = None,
) -> Tuple[Union[ResponseType, Iterator[ResponseType]], bool]:
from grpc import _create_servicer_context # pytype: disable=pyi-error
with _create_servicer_context(
rpc_event, state, request_deserializer
) as context:
try:
response_or_iterator = None
if send_response_callback is not None:
response_or_iterator = behavior(
argument, context, send_response_callback
)
else:
response_or_iterator = behavior(argument, context)
return response_or_iterator, True
except Exception as exception: # pylint: disable=broad-except
with state.condition:
if state.aborted:
_abort(
state,
rpc_event.call,
cygrpc.StatusCode.unknown,
b"RPC Aborted",
)
elif exception not in state.rpc_errors:
try:
details = "Exception calling application: {}".format(
exception
)
except Exception: # pylint: disable=broad-except
details = (
"Calling application raised unprintable Exception!"
)
traceback.print_exc()
_LOGGER.exception(details)
_abort(
state,
rpc_event.call,
cygrpc.StatusCode.unknown,
_common.encode(details),
)
return None, False
def _take_response_from_response_iterator(
rpc_event: cygrpc.BaseEvent,
state: _RPCState,
response_iterator: Iterator[ResponseType],
) -> Tuple[ResponseType, bool]:
try:
return next(response_iterator), True
except StopIteration:
return None, True
except Exception as exception: # pylint: disable=broad-except
with state.condition:
if state.aborted:
_abort(
state,
rpc_event.call,
cygrpc.StatusCode.unknown,
b"RPC Aborted",
)
elif exception not in state.rpc_errors:
details = "Exception iterating responses: {}".format(exception)
_LOGGER.exception(details)
_abort(
state,
rpc_event.call,
cygrpc.StatusCode.unknown,
_common.encode(details),
)
return None, False
def _serialize_response(
rpc_event: cygrpc.BaseEvent,
state: _RPCState,
response: Any,
response_serializer: Optional[SerializingFunction],
) -> Optional[bytes]:
serialized_response = _common.serialize(response, response_serializer)
if serialized_response is None:
with state.condition:
_abort(
state,
rpc_event.call,
cygrpc.StatusCode.internal,
b"Failed to serialize response!",
)
return None
else:
return serialized_response
def _get_send_message_op_flags_from_state(
state: _RPCState,
) -> Union[int, cygrpc.WriteFlag]:
if state.disable_next_compression:
return cygrpc.WriteFlag.no_compress
else:
return _EMPTY_FLAGS
def _reset_per_message_state(state: _RPCState) -> None:
with state.condition:
state.disable_next_compression = False
def _send_response(
rpc_event: cygrpc.BaseEvent, state: _RPCState, serialized_response: bytes
) -> bool:
with state.condition:
if not _is_rpc_state_active(state):
return False
else:
if state.initial_metadata_allowed:
operations = (
_get_initial_metadata_operation(state, None),
cygrpc.SendMessageOperation(
serialized_response,
_get_send_message_op_flags_from_state(state),
),
)
state.initial_metadata_allowed = False
token = _SEND_INITIAL_METADATA_AND_SEND_MESSAGE_TOKEN
else:
operations = (
cygrpc.SendMessageOperation(
serialized_response,
_get_send_message_op_flags_from_state(state),
),
)
token = _SEND_MESSAGE_TOKEN
rpc_event.call.start_server_batch(
operations, _send_message(state, token)
)
state.due.add(token)
_reset_per_message_state(state)
while True:
state.condition.wait()
if token not in state.due:
return _is_rpc_state_active(state)
def _status(
rpc_event: cygrpc.BaseEvent,
state: _RPCState,
serialized_response: Optional[bytes],
) -> None:
with state.condition:
if state.client is not _CANCELLED:
code = _completion_code(state)
details = _details(state)
operations = [
cygrpc.SendStatusFromServerOperation(
state.trailing_metadata, code, details, _EMPTY_FLAGS
),
]
if state.initial_metadata_allowed:
operations.append(_get_initial_metadata_operation(state, None))
if serialized_response is not None:
operations.append(
cygrpc.SendMessageOperation(
serialized_response,
_get_send_message_op_flags_from_state(state),
)
)
rpc_event.call.start_server_batch(
operations,
_send_status_from_server(state, _SEND_STATUS_FROM_SERVER_TOKEN),
)
state.statused = True
_reset_per_message_state(state)
state.due.add(_SEND_STATUS_FROM_SERVER_TOKEN)
def _unary_response_in_pool(
rpc_event: cygrpc.BaseEvent,
state: _RPCState,
behavior: ArityAgnosticMethodHandler,
argument_thunk: Callable[[], Any],
request_deserializer: Optional[SerializingFunction],
response_serializer: Optional[SerializingFunction],
) -> None:
cygrpc.install_context_from_request_call_event(rpc_event)
try:
argument = argument_thunk()
if argument is not None:
response, proceed = _call_behavior(
rpc_event, state, behavior, argument, request_deserializer
)
if proceed:
serialized_response = _serialize_response(
rpc_event, state, response, response_serializer
)
if serialized_response is not None:
_status(rpc_event, state, serialized_response)
except Exception: # pylint: disable=broad-except
traceback.print_exc()
finally:
cygrpc.uninstall_context()
def _stream_response_in_pool(
rpc_event: cygrpc.BaseEvent,
state: _RPCState,
behavior: ArityAgnosticMethodHandler,
argument_thunk: Callable[[], Any],
request_deserializer: Optional[DeserializingFunction],
response_serializer: Optional[SerializingFunction],
) -> None:
cygrpc.install_context_from_request_call_event(rpc_event)
def send_response(response: Any) -> None:
if response is None:
_status(rpc_event, state, None)
else:
serialized_response = _serialize_response(
rpc_event, state, response, response_serializer
)
if serialized_response is not None:
_send_response(rpc_event, state, serialized_response)
try:
argument = argument_thunk()
if argument is not None:
if (
hasattr(behavior, "experimental_non_blocking")
and behavior.experimental_non_blocking
):
_call_behavior(
rpc_event,
state,
behavior,
argument,
request_deserializer,
send_response_callback=send_response,
)
else:
response_iterator, proceed = _call_behavior(
rpc_event, state, behavior, argument, request_deserializer
)
if proceed:
_send_message_callback_to_blocking_iterator_adapter(
rpc_event, state, send_response, response_iterator
)
except Exception: # pylint: disable=broad-except
traceback.print_exc()
finally:
cygrpc.uninstall_context()
def _is_rpc_state_active(state: _RPCState) -> bool:
return state.client is not _CANCELLED and not state.statused
def _send_message_callback_to_blocking_iterator_adapter(
rpc_event: cygrpc.BaseEvent,
state: _RPCState,
send_response_callback: Callable[[ResponseType], None],
response_iterator: Iterator[ResponseType],
) -> None:
while True:
response, proceed = _take_response_from_response_iterator(
rpc_event, state, response_iterator
)
if proceed:
send_response_callback(response)
if not _is_rpc_state_active(state):
break
else:
break
def _select_thread_pool_for_behavior(
behavior: ArityAgnosticMethodHandler,
default_thread_pool: futures.ThreadPoolExecutor,
) -> futures.ThreadPoolExecutor:
if hasattr(behavior, "experimental_thread_pool") and isinstance(
behavior.experimental_thread_pool, futures.ThreadPoolExecutor
):
return behavior.experimental_thread_pool
else:
return default_thread_pool
def _handle_unary_unary(
rpc_event: cygrpc.BaseEvent,
state: _RPCState,
method_handler: grpc.RpcMethodHandler,
default_thread_pool: futures.ThreadPoolExecutor,
) -> futures.Future:
unary_request = _unary_request(
rpc_event, state, method_handler.request_deserializer
)
thread_pool = _select_thread_pool_for_behavior(
method_handler.unary_unary, default_thread_pool
)
return thread_pool.submit(
_unary_response_in_pool,
rpc_event,
state,
method_handler.unary_unary,
unary_request,
method_handler.request_deserializer,
method_handler.response_serializer,
)
def _handle_unary_stream(
rpc_event: cygrpc.BaseEvent,
state: _RPCState,
method_handler: grpc.RpcMethodHandler,
default_thread_pool: futures.ThreadPoolExecutor,
) -> futures.Future:
unary_request = _unary_request(
rpc_event, state, method_handler.request_deserializer
)
thread_pool = _select_thread_pool_for_behavior(
method_handler.unary_stream, default_thread_pool
)
return thread_pool.submit(
_stream_response_in_pool,
rpc_event,
state,
method_handler.unary_stream,
unary_request,
method_handler.request_deserializer,
method_handler.response_serializer,
)
def _handle_stream_unary(
rpc_event: cygrpc.BaseEvent,
state: _RPCState,
method_handler: grpc.RpcMethodHandler,
default_thread_pool: futures.ThreadPoolExecutor,
) -> futures.Future:
request_iterator = _RequestIterator(
state, rpc_event.call, method_handler.request_deserializer
)
thread_pool = _select_thread_pool_for_behavior(
method_handler.stream_unary, default_thread_pool
)
return thread_pool.submit(
_unary_response_in_pool,
rpc_event,
state,
method_handler.stream_unary,
lambda: request_iterator,
method_handler.request_deserializer,
method_handler.response_serializer,
)
def _handle_stream_stream(
rpc_event: cygrpc.BaseEvent,
state: _RPCState,
method_handler: grpc.RpcMethodHandler,
default_thread_pool: futures.ThreadPoolExecutor,
) -> futures.Future:
request_iterator = _RequestIterator(
state, rpc_event.call, method_handler.request_deserializer
)
thread_pool = _select_thread_pool_for_behavior(
method_handler.stream_stream, default_thread_pool
)
return thread_pool.submit(
_stream_response_in_pool,
rpc_event,
state,
method_handler.stream_stream,
lambda: request_iterator,
method_handler.request_deserializer,
method_handler.response_serializer,
)
def _find_method_handler(
rpc_event: cygrpc.BaseEvent,
generic_handlers: List[grpc.GenericRpcHandler],
interceptor_pipeline: Optional[_interceptor._ServicePipeline],
) -> Optional[grpc.RpcMethodHandler]:
def query_handlers(
handler_call_details: _HandlerCallDetails,
) -> Optional[grpc.RpcMethodHandler]:
for generic_handler in generic_handlers:
method_handler = generic_handler.service(handler_call_details)
if method_handler is not None:
return method_handler
return None
handler_call_details = _HandlerCallDetails(
_common.decode(rpc_event.call_details.method),
rpc_event.invocation_metadata,
)
if interceptor_pipeline is not None:
return interceptor_pipeline.execute(
query_handlers, handler_call_details
)
else:
return query_handlers(handler_call_details)
def _reject_rpc(
rpc_event: cygrpc.BaseEvent, status: cygrpc.StatusCode, details: bytes
) -> _RPCState:
rpc_state = _RPCState()
operations = (
_get_initial_metadata_operation(rpc_state, None),
cygrpc.ReceiveCloseOnServerOperation(_EMPTY_FLAGS),
cygrpc.SendStatusFromServerOperation(
None, status, details, _EMPTY_FLAGS
),
)
rpc_event.call.start_server_batch(
operations,
lambda ignored_event: (
rpc_state,
(),
),
)
return rpc_state
def _handle_with_method_handler(
rpc_event: cygrpc.BaseEvent,
method_handler: grpc.RpcMethodHandler,
thread_pool: futures.ThreadPoolExecutor,
) -> Tuple[_RPCState, futures.Future]:
state = _RPCState()
with state.condition:
rpc_event.call.start_server_batch(
(cygrpc.ReceiveCloseOnServerOperation(_EMPTY_FLAGS),),
_receive_close_on_server(state),
)
state.due.add(_RECEIVE_CLOSE_ON_SERVER_TOKEN)
if method_handler.request_streaming:
if method_handler.response_streaming:
return state, _handle_stream_stream(
rpc_event, state, method_handler, thread_pool
)
else:
return state, _handle_stream_unary(
rpc_event, state, method_handler, thread_pool
)
else:
if method_handler.response_streaming:
return state, _handle_unary_stream(
rpc_event, state, method_handler, thread_pool
)
else:
return state, _handle_unary_unary(
rpc_event, state, method_handler, thread_pool
)
def _handle_call(
rpc_event: cygrpc.BaseEvent,
generic_handlers: List[grpc.GenericRpcHandler],
interceptor_pipeline: Optional[_interceptor._ServicePipeline],
thread_pool: futures.ThreadPoolExecutor,
concurrency_exceeded: bool,
) -> Tuple[Optional[_RPCState], Optional[futures.Future]]:
if not rpc_event.success:
return None, None
if rpc_event.call_details.method is not None:
try:
method_handler = _find_method_handler(
rpc_event, generic_handlers, interceptor_pipeline
)
except Exception as exception: # pylint: disable=broad-except
details = "Exception servicing handler: {}".format(exception)
_LOGGER.exception(details)
return (
_reject_rpc(
rpc_event,
cygrpc.StatusCode.unknown,
b"Error in service handler!",
),
None,
)
if method_handler is None:
return (
_reject_rpc(
rpc_event,
cygrpc.StatusCode.unimplemented,
b"Method not found!",
),
None,
)
elif concurrency_exceeded:
return (
_reject_rpc(
rpc_event,
cygrpc.StatusCode.resource_exhausted,
b"Concurrent RPC limit exceeded!",
),
None,
)
else:
return _handle_with_method_handler(
rpc_event, method_handler, thread_pool
)
else:
return None, None
@enum.unique
class _ServerStage(enum.Enum):
STOPPED = "stopped"
STARTED = "started"
GRACE = "grace"
class _ServerState(object):
lock: threading.RLock
completion_queue: cygrpc.CompletionQueue
server: cygrpc.Server
generic_handlers: List[grpc.GenericRpcHandler]
interceptor_pipeline: Optional[_interceptor._ServicePipeline]
thread_pool: futures.ThreadPoolExecutor
stage: _ServerStage
termination_event: threading.Event
shutdown_events: List[threading.Event]
maximum_concurrent_rpcs: Optional[int]
active_rpc_count: int
rpc_states: Set[_RPCState]
due: Set[str]
server_deallocated: bool
# pylint: disable=too-many-arguments
def __init__(
self,
completion_queue: cygrpc.CompletionQueue,
server: cygrpc.Server,
generic_handlers: Sequence[grpc.GenericRpcHandler],
interceptor_pipeline: Optional[_interceptor._ServicePipeline],
thread_pool: futures.ThreadPoolExecutor,
maximum_concurrent_rpcs: Optional[int],
):
self.lock = threading.RLock()
self.completion_queue = completion_queue
self.server = server
self.generic_handlers = list(generic_handlers)
self.interceptor_pipeline = interceptor_pipeline
self.thread_pool = thread_pool
self.stage = _ServerStage.STOPPED
self.termination_event = threading.Event()
self.shutdown_events = [self.termination_event]
self.maximum_concurrent_rpcs = maximum_concurrent_rpcs
self.active_rpc_count = 0
# TODO(https://github.com/grpc/grpc/issues/6597): eliminate these fields.
self.rpc_states = set()
self.due = set()
# A "volatile" flag to interrupt the daemon serving thread
self.server_deallocated = False
def _add_generic_handlers(
state: _ServerState, generic_handlers: Iterable[grpc.GenericRpcHandler]
) -> None:
with state.lock:
state.generic_handlers.extend(generic_handlers)
def _add_insecure_port(state: _ServerState, address: bytes) -> int:
with state.lock:
return state.server.add_http2_port(address)
def _add_secure_port(
state: _ServerState,
address: bytes,
server_credentials: grpc.ServerCredentials,
) -> int:
with state.lock:
return state.server.add_http2_port(
address, server_credentials._credentials
)
def _request_call(state: _ServerState) -> None:
state.server.request_call(
state.completion_queue, state.completion_queue, _REQUEST_CALL_TAG
)
state.due.add(_REQUEST_CALL_TAG)
# TODO(https://github.com/grpc/grpc/issues/6597): delete this function.
def _stop_serving(state: _ServerState) -> bool:
if not state.rpc_states and not state.due:
state.server.destroy()
for shutdown_event in state.shutdown_events:
shutdown_event.set()
state.stage = _ServerStage.STOPPED
return True
else:
return False
def _on_call_completed(state: _ServerState) -> None:
with state.lock:
state.active_rpc_count -= 1
def _process_event_and_continue(
state: _ServerState, event: cygrpc.BaseEvent
) -> bool:
should_continue = True
if event.tag is _SHUTDOWN_TAG:
with state.lock:
state.due.remove(_SHUTDOWN_TAG)
if _stop_serving(state):
should_continue = False
elif event.tag is _REQUEST_CALL_TAG:
with state.lock:
state.due.remove(_REQUEST_CALL_TAG)
concurrency_exceeded = (
state.maximum_concurrent_rpcs is not None
and state.active_rpc_count >= state.maximum_concurrent_rpcs
)
rpc_state, rpc_future = _handle_call(
event,
state.generic_handlers,
state.interceptor_pipeline,
state.thread_pool,
concurrency_exceeded,
)
if rpc_state is not None:
state.rpc_states.add(rpc_state)
if rpc_future is not None:
state.active_rpc_count += 1
rpc_future.add_done_callback(
lambda unused_future: _on_call_completed(state)
)
if state.stage is _ServerStage.STARTED:
_request_call(state)
elif _stop_serving(state):
should_continue = False
else:
rpc_state, callbacks = event.tag(event)
for callback in callbacks:
try:
callback()
except Exception: # pylint: disable=broad-except
_LOGGER.exception("Exception calling callback!")
if rpc_state is not None:
with state.lock:
state.rpc_states.remove(rpc_state)
if _stop_serving(state):
should_continue = False
return should_continue
def _serve(state: _ServerState) -> None:
while True:
timeout = time.time() + _DEALLOCATED_SERVER_CHECK_PERIOD_S
event = state.completion_queue.poll(timeout)
if state.server_deallocated:
_begin_shutdown_once(state)
if event.completion_type != cygrpc.CompletionType.queue_timeout:
if not _process_event_and_continue(state, event):
return
# We want to force the deletion of the previous event
# ~before~ we poll again; if the event has a reference
# to a shutdown Call object, this can induce spinlock.
event = None
def _begin_shutdown_once(state: _ServerState) -> None:
with state.lock:
if state.stage is _ServerStage.STARTED:
state.server.shutdown(state.completion_queue, _SHUTDOWN_TAG)
state.stage = _ServerStage.GRACE
state.due.add(_SHUTDOWN_TAG)
def _stop(state: _ServerState, grace: Optional[float]) -> threading.Event:
with state.lock:
if state.stage is _ServerStage.STOPPED:
shutdown_event = threading.Event()
shutdown_event.set()
return shutdown_event
else:
_begin_shutdown_once(state)
shutdown_event = threading.Event()
state.shutdown_events.append(shutdown_event)
if grace is None:
state.server.cancel_all_calls()
else:
def cancel_all_calls_after_grace():
shutdown_event.wait(timeout=grace)
with state.lock:
state.server.cancel_all_calls()
thread = threading.Thread(target=cancel_all_calls_after_grace)
thread.start()
return shutdown_event
shutdown_event.wait()
return shutdown_event
def _start(state: _ServerState) -> None:
with state.lock:
if state.stage is not _ServerStage.STOPPED:
raise ValueError("Cannot start already-started server!")
state.server.start()
state.stage = _ServerStage.STARTED
_request_call(state)
thread = threading.Thread(target=_serve, args=(state,))
thread.daemon = True
thread.start()
def _validate_generic_rpc_handlers(
generic_rpc_handlers: Iterable[grpc.GenericRpcHandler],
) -> None:
for generic_rpc_handler in generic_rpc_handlers:
service_attribute = getattr(generic_rpc_handler, "service", None)
if service_attribute is None:
raise AttributeError(
'"{}" must conform to grpc.GenericRpcHandler type but does '
'not have "service" method!'.format(generic_rpc_handler)
)
def _augment_options(
base_options: Sequence[ChannelArgumentType],
compression: Optional[grpc.Compression],
) -> Sequence[ChannelArgumentType]:
compression_option = _compression.create_channel_option(compression)
return tuple(base_options) + compression_option
class _Server(grpc.Server):
_state: _ServerState
# pylint: disable=too-many-arguments
def __init__(
self,
thread_pool: futures.ThreadPoolExecutor,
generic_handlers: Sequence[grpc.GenericRpcHandler],
interceptors: Sequence[grpc.ServerInterceptor],
options: Sequence[ChannelArgumentType],
maximum_concurrent_rpcs: Optional[int],
compression: Optional[grpc.Compression],
xds: bool,
):
completion_queue = cygrpc.CompletionQueue()
server = cygrpc.Server(_augment_options(options, compression), xds)
server.register_completion_queue(completion_queue)
self._state = _ServerState(
completion_queue,
server,
generic_handlers,
_interceptor.service_pipeline(interceptors),
thread_pool,
maximum_concurrent_rpcs,
)
def add_generic_rpc_handlers(
self, generic_rpc_handlers: Iterable[grpc.GenericRpcHandler]
) -> None:
_validate_generic_rpc_handlers(generic_rpc_handlers)
_add_generic_handlers(self._state, generic_rpc_handlers)
def add_insecure_port(self, address: str) -> int:
return _common.validate_port_binding_result(
address, _add_insecure_port(self._state, _common.encode(address))
)
def add_secure_port(
self, address: str, server_credentials: grpc.ServerCredentials
) -> int:
return _common.validate_port_binding_result(
address,
_add_secure_port(
self._state, _common.encode(address), server_credentials
),
)
def start(self) -> None:
_start(self._state)
def wait_for_termination(self, timeout: Optional[float] = None) -> bool:
# NOTE(https://bugs.python.org/issue35935)
# Remove this workaround once threading.Event.wait() is working with
# CTRL+C across platforms.
return _common.wait(
self._state.termination_event.wait,
self._state.termination_event.is_set,
timeout=timeout,
)
def stop(self, grace: Optional[float]) -> threading.Event:
return _stop(self._state, grace)
def __del__(self):
if hasattr(self, "_state"):
# We can not grab a lock in __del__(), so set a flag to signal the
# serving daemon thread (if it exists) to initiate shutdown.
self._state.server_deallocated = True
def create_server(
thread_pool: futures.ThreadPoolExecutor,
generic_rpc_handlers: Sequence[grpc.GenericRpcHandler],
interceptors: Sequence[grpc.ServerInterceptor],
options: Sequence[ChannelArgumentType],
maximum_concurrent_rpcs: Optional[int],
compression: Optional[grpc.Compression],
xds: bool,
) -> _Server:
_validate_generic_rpc_handlers(generic_rpc_handlers)
return _Server(
thread_pool,
generic_rpc_handlers,
interceptors,
options,
maximum_concurrent_rpcs,
compression,
xds,
)
| 45,545
| 32.26954
| 90
|
py
|
grpc
|
grpc-master/src/python/grpcio/grpc/_compression.py
|
# Copyright 2019 The gRPC authors.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
from __future__ import annotations
from typing import Optional
import grpc
from grpc._cython import cygrpc
from grpc._typing import MetadataType
NoCompression = cygrpc.CompressionAlgorithm.none
Deflate = cygrpc.CompressionAlgorithm.deflate
Gzip = cygrpc.CompressionAlgorithm.gzip
_METADATA_STRING_MAPPING = {
NoCompression: "identity",
Deflate: "deflate",
Gzip: "gzip",
}
def _compression_algorithm_to_metadata_value(
compression: grpc.Compression,
) -> str:
return _METADATA_STRING_MAPPING[compression]
def compression_algorithm_to_metadata(compression: grpc.Compression):
return (
cygrpc.GRPC_COMPRESSION_REQUEST_ALGORITHM_MD_KEY,
_compression_algorithm_to_metadata_value(compression),
)
def create_channel_option(compression: Optional[grpc.Compression]):
return (
((cygrpc.GRPC_COMPRESSION_CHANNEL_DEFAULT_ALGORITHM, int(compression)),)
if compression
else ()
)
def augment_metadata(
metadata: Optional[MetadataType], compression: Optional[grpc.Compression]
):
if not metadata and not compression:
return None
base_metadata = tuple(metadata) if metadata else ()
compression_metadata = (
(compression_algorithm_to_metadata(compression),) if compression else ()
)
return base_metadata + compression_metadata
__all__ = (
"NoCompression",
"Deflate",
"Gzip",
)
| 1,983
| 26.555556
| 80
|
py
|
grpc
|
grpc-master/src/python/grpcio/grpc/beta/interfaces.py
|
# Copyright 2015 gRPC authors.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Constants and interfaces of the Beta API of gRPC Python."""
import abc
import grpc
ChannelConnectivity = grpc.ChannelConnectivity
# FATAL_FAILURE was a Beta-API name for SHUTDOWN
ChannelConnectivity.FATAL_FAILURE = ChannelConnectivity.SHUTDOWN
StatusCode = grpc.StatusCode
class GRPCCallOptions(object):
"""A value encapsulating gRPC-specific options passed on RPC invocation.
This class and its instances have no supported interface - it exists to
define the type of its instances and its instances exist to be passed to
other functions.
"""
def __init__(self, disable_compression, subcall_of, credentials):
self.disable_compression = disable_compression
self.subcall_of = subcall_of
self.credentials = credentials
def grpc_call_options(disable_compression=False, credentials=None):
"""Creates a GRPCCallOptions value to be passed at RPC invocation.
All parameters are optional and should always be passed by keyword.
Args:
disable_compression: A boolean indicating whether or not compression should
be disabled for the request object of the RPC. Only valid for
request-unary RPCs.
credentials: A CallCredentials object to use for the invoked RPC.
"""
return GRPCCallOptions(disable_compression, None, credentials)
GRPCAuthMetadataContext = grpc.AuthMetadataContext
GRPCAuthMetadataPluginCallback = grpc.AuthMetadataPluginCallback
GRPCAuthMetadataPlugin = grpc.AuthMetadataPlugin
class GRPCServicerContext(abc.ABC):
"""Exposes gRPC-specific options and behaviors to code servicing RPCs."""
@abc.abstractmethod
def peer(self):
"""Identifies the peer that invoked the RPC being serviced.
Returns:
A string identifying the peer that invoked the RPC being serviced.
"""
raise NotImplementedError()
@abc.abstractmethod
def disable_next_response_compression(self):
"""Disables compression of the next response passed by the application."""
raise NotImplementedError()
class GRPCInvocationContext(abc.ABC):
"""Exposes gRPC-specific options and behaviors to code invoking RPCs."""
@abc.abstractmethod
def disable_next_request_compression(self):
"""Disables compression of the next request passed by the application."""
raise NotImplementedError()
class Server(abc.ABC):
"""Services RPCs."""
@abc.abstractmethod
def add_insecure_port(self, address):
"""Reserves a port for insecure RPC service once this Server becomes active.
This method may only be called before calling this Server's start method is
called.
Args:
address: The address for which to open a port.
Returns:
An integer port on which RPCs will be serviced after this link has been
started. This is typically the same number as the port number contained
in the passed address, but will likely be different if the port number
contained in the passed address was zero.
"""
raise NotImplementedError()
@abc.abstractmethod
def add_secure_port(self, address, server_credentials):
"""Reserves a port for secure RPC service after this Server becomes active.
This method may only be called before calling this Server's start method is
called.
Args:
address: The address for which to open a port.
server_credentials: A ServerCredentials.
Returns:
An integer port on which RPCs will be serviced after this link has been
started. This is typically the same number as the port number contained
in the passed address, but will likely be different if the port number
contained in the passed address was zero.
"""
raise NotImplementedError()
@abc.abstractmethod
def start(self):
"""Starts this Server's service of RPCs.
This method may only be called while the server is not serving RPCs (i.e. it
is not idempotent).
"""
raise NotImplementedError()
@abc.abstractmethod
def stop(self, grace):
"""Stops this Server's service of RPCs.
All calls to this method immediately stop service of new RPCs. When existing
RPCs are aborted is controlled by the grace period parameter passed to this
method.
This method may be called at any time and is idempotent. Passing a smaller
grace value than has been passed in a previous call will have the effect of
stopping the Server sooner. Passing a larger grace value than has been
passed in a previous call will not have the effect of stopping the server
later.
Args:
grace: A duration of time in seconds to allow existing RPCs to complete
before being aborted by this Server's stopping. May be zero for
immediate abortion of all in-progress RPCs.
Returns:
A threading.Event that will be set when this Server has completely
stopped. The returned event may not be set until after the full grace
period (if some ongoing RPC continues for the full length of the period)
of it may be set much sooner (such as if this Server had no RPCs underway
at the time it was stopped or if all RPCs that it had underway completed
very early in the grace period).
"""
raise NotImplementedError()
| 6,082
| 36.091463
| 84
|
py
|
grpc
|
grpc-master/src/python/grpcio/grpc/beta/_metadata.py
|
# Copyright 2017 gRPC authors.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""API metadata conversion utilities."""
import collections
_Metadatum = collections.namedtuple(
"_Metadatum",
(
"key",
"value",
),
)
def _beta_metadatum(key, value):
beta_key = key if isinstance(key, (bytes,)) else key.encode("ascii")
beta_value = value if isinstance(value, (bytes,)) else value.encode("ascii")
return _Metadatum(beta_key, beta_value)
def _metadatum(beta_key, beta_value):
key = beta_key if isinstance(beta_key, (str,)) else beta_key.decode("utf8")
if isinstance(beta_value, (str,)) or key[-4:] == "-bin":
value = beta_value
else:
value = beta_value.decode("utf8")
return _Metadatum(key, value)
def beta(metadata):
if metadata is None:
return ()
else:
return tuple(_beta_metadatum(key, value) for key, value in metadata)
def unbeta(beta_metadata):
if beta_metadata is None:
return ()
else:
return tuple(
_metadatum(beta_key, beta_value)
for beta_key, beta_value in beta_metadata
)
| 1,638
| 27.754386
| 80
|
py
|
grpc
|
grpc-master/src/python/grpcio/grpc/beta/utilities.py
|
# Copyright 2015 gRPC authors.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Utilities for the gRPC Python Beta API."""
import threading
import time
# implementations is referenced from specification in this module.
from grpc.beta import implementations # pylint: disable=unused-import
from grpc.beta import interfaces
from grpc.framework.foundation import callable_util
from grpc.framework.foundation import future
_DONE_CALLBACK_EXCEPTION_LOG_MESSAGE = (
'Exception calling connectivity future "done" callback!'
)
class _ChannelReadyFuture(future.Future):
def __init__(self, channel):
self._condition = threading.Condition()
self._channel = channel
self._matured = False
self._cancelled = False
self._done_callbacks = []
def _block(self, timeout):
until = None if timeout is None else time.time() + timeout
with self._condition:
while True:
if self._cancelled:
raise future.CancelledError()
elif self._matured:
return
else:
if until is None:
self._condition.wait()
else:
remaining = until - time.time()
if remaining < 0:
raise future.TimeoutError()
else:
self._condition.wait(timeout=remaining)
def _update(self, connectivity):
with self._condition:
if (
not self._cancelled
and connectivity is interfaces.ChannelConnectivity.READY
):
self._matured = True
self._channel.unsubscribe(self._update)
self._condition.notify_all()
done_callbacks = tuple(self._done_callbacks)
self._done_callbacks = None
else:
return
for done_callback in done_callbacks:
callable_util.call_logging_exceptions(
done_callback, _DONE_CALLBACK_EXCEPTION_LOG_MESSAGE, self
)
def cancel(self):
with self._condition:
if not self._matured:
self._cancelled = True
self._channel.unsubscribe(self._update)
self._condition.notify_all()
done_callbacks = tuple(self._done_callbacks)
self._done_callbacks = None
else:
return False
for done_callback in done_callbacks:
callable_util.call_logging_exceptions(
done_callback, _DONE_CALLBACK_EXCEPTION_LOG_MESSAGE, self
)
return True
def cancelled(self):
with self._condition:
return self._cancelled
def running(self):
with self._condition:
return not self._cancelled and not self._matured
def done(self):
with self._condition:
return self._cancelled or self._matured
def result(self, timeout=None):
self._block(timeout)
return None
def exception(self, timeout=None):
self._block(timeout)
return None
def traceback(self, timeout=None):
self._block(timeout)
return None
def add_done_callback(self, fn):
with self._condition:
if not self._cancelled and not self._matured:
self._done_callbacks.append(fn)
return
fn(self)
def start(self):
with self._condition:
self._channel.subscribe(self._update, try_to_connect=True)
def __del__(self):
with self._condition:
if not self._cancelled and not self._matured:
self._channel.unsubscribe(self._update)
def channel_ready_future(channel):
"""Creates a future.Future tracking when an implementations.Channel is ready.
Cancelling the returned future.Future does not tell the given
implementations.Channel to abandon attempts it may have been making to
connect; cancelling merely deactivates the return future.Future's
subscription to the given implementations.Channel's connectivity.
Args:
channel: An implementations.Channel.
Returns:
A future.Future that matures when the given Channel has connectivity
interfaces.ChannelConnectivity.READY.
"""
ready_future = _ChannelReadyFuture(channel)
ready_future.start()
return ready_future
| 5,005
| 31.506494
| 81
|
py
|
grpc
|
grpc-master/src/python/grpcio/grpc/beta/implementations.py
|
# Copyright 2015-2016 gRPC authors.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Entry points into the Beta API of gRPC Python."""
# threading is referenced from specification in this module.
import threading # pylint: disable=unused-import
# interfaces, cardinality, and face are referenced from specification in this
# module.
import grpc
from grpc import _auth
from grpc.beta import _client_adaptations
from grpc.beta import _metadata
from grpc.beta import _server_adaptations
from grpc.beta import interfaces # pylint: disable=unused-import
from grpc.framework.common import cardinality # pylint: disable=unused-import
from grpc.framework.interfaces.face import face # pylint: disable=unused-import
# pylint: disable=too-many-arguments
ChannelCredentials = grpc.ChannelCredentials
ssl_channel_credentials = grpc.ssl_channel_credentials
CallCredentials = grpc.CallCredentials
def metadata_call_credentials(metadata_plugin, name=None):
def plugin(context, callback):
def wrapped_callback(beta_metadata, error):
callback(_metadata.unbeta(beta_metadata), error)
metadata_plugin(context, wrapped_callback)
return grpc.metadata_call_credentials(plugin, name=name)
def google_call_credentials(credentials):
"""Construct CallCredentials from GoogleCredentials.
Args:
credentials: A GoogleCredentials object from the oauth2client library.
Returns:
A CallCredentials object for use in a GRPCCallOptions object.
"""
return metadata_call_credentials(_auth.GoogleCallCredentials(credentials))
access_token_call_credentials = grpc.access_token_call_credentials
composite_call_credentials = grpc.composite_call_credentials
composite_channel_credentials = grpc.composite_channel_credentials
class Channel(object):
"""A channel to a remote host through which RPCs may be conducted.
Only the "subscribe" and "unsubscribe" methods are supported for application
use. This class' instance constructor and all other attributes are
unsupported.
"""
def __init__(self, channel):
self._channel = channel
def subscribe(self, callback, try_to_connect=None):
"""Subscribes to this Channel's connectivity.
Args:
callback: A callable to be invoked and passed an
interfaces.ChannelConnectivity identifying this Channel's connectivity.
The callable will be invoked immediately upon subscription and again for
every change to this Channel's connectivity thereafter until it is
unsubscribed.
try_to_connect: A boolean indicating whether or not this Channel should
attempt to connect if it is not already connected and ready to conduct
RPCs.
"""
self._channel.subscribe(callback, try_to_connect=try_to_connect)
def unsubscribe(self, callback):
"""Unsubscribes a callback from this Channel's connectivity.
Args:
callback: A callable previously registered with this Channel from having
been passed to its "subscribe" method.
"""
self._channel.unsubscribe(callback)
def insecure_channel(host, port):
"""Creates an insecure Channel to a remote host.
Args:
host: The name of the remote host to which to connect.
port: The port of the remote host to which to connect.
If None only the 'host' part will be used.
Returns:
A Channel to the remote host through which RPCs may be conducted.
"""
channel = grpc.insecure_channel(
host if port is None else "%s:%d" % (host, port)
)
return Channel(channel)
def secure_channel(host, port, channel_credentials):
"""Creates a secure Channel to a remote host.
Args:
host: The name of the remote host to which to connect.
port: The port of the remote host to which to connect.
If None only the 'host' part will be used.
channel_credentials: A ChannelCredentials.
Returns:
A secure Channel to the remote host through which RPCs may be conducted.
"""
channel = grpc.secure_channel(
host if port is None else "%s:%d" % (host, port), channel_credentials
)
return Channel(channel)
class StubOptions(object):
"""A value encapsulating the various options for creation of a Stub.
This class and its instances have no supported interface - it exists to define
the type of its instances and its instances exist to be passed to other
functions.
"""
def __init__(
self,
host,
request_serializers,
response_deserializers,
metadata_transformer,
thread_pool,
thread_pool_size,
):
self.host = host
self.request_serializers = request_serializers
self.response_deserializers = response_deserializers
self.metadata_transformer = metadata_transformer
self.thread_pool = thread_pool
self.thread_pool_size = thread_pool_size
_EMPTY_STUB_OPTIONS = StubOptions(None, None, None, None, None, None)
def stub_options(
host=None,
request_serializers=None,
response_deserializers=None,
metadata_transformer=None,
thread_pool=None,
thread_pool_size=None,
):
"""Creates a StubOptions value to be passed at stub creation.
All parameters are optional and should always be passed by keyword.
Args:
host: A host string to set on RPC calls.
request_serializers: A dictionary from service name-method name pair to
request serialization behavior.
response_deserializers: A dictionary from service name-method name pair to
response deserialization behavior.
metadata_transformer: A callable that given a metadata object produces
another metadata object to be used in the underlying communication on the
wire.
thread_pool: A thread pool to use in stubs.
thread_pool_size: The size of thread pool to create for use in stubs;
ignored if thread_pool has been passed.
Returns:
A StubOptions value created from the passed parameters.
"""
return StubOptions(
host,
request_serializers,
response_deserializers,
metadata_transformer,
thread_pool,
thread_pool_size,
)
def generic_stub(channel, options=None):
"""Creates a face.GenericStub on which RPCs can be made.
Args:
channel: A Channel for use by the created stub.
options: A StubOptions customizing the created stub.
Returns:
A face.GenericStub on which RPCs can be made.
"""
effective_options = _EMPTY_STUB_OPTIONS if options is None else options
return _client_adaptations.generic_stub(
channel._channel, # pylint: disable=protected-access
effective_options.host,
effective_options.metadata_transformer,
effective_options.request_serializers,
effective_options.response_deserializers,
)
def dynamic_stub(channel, service, cardinalities, options=None):
"""Creates a face.DynamicStub with which RPCs can be invoked.
Args:
channel: A Channel for the returned face.DynamicStub to use.
service: The package-qualified full name of the service.
cardinalities: A dictionary from RPC method name to cardinality.Cardinality
value identifying the cardinality of the RPC method.
options: An optional StubOptions value further customizing the functionality
of the returned face.DynamicStub.
Returns:
A face.DynamicStub with which RPCs can be invoked.
"""
effective_options = _EMPTY_STUB_OPTIONS if options is None else options
return _client_adaptations.dynamic_stub(
channel._channel, # pylint: disable=protected-access
service,
cardinalities,
effective_options.host,
effective_options.metadata_transformer,
effective_options.request_serializers,
effective_options.response_deserializers,
)
ServerCredentials = grpc.ServerCredentials
ssl_server_credentials = grpc.ssl_server_credentials
class ServerOptions(object):
"""A value encapsulating the various options for creation of a Server.
This class and its instances have no supported interface - it exists to define
the type of its instances and its instances exist to be passed to other
functions.
"""
def __init__(
self,
multi_method_implementation,
request_deserializers,
response_serializers,
thread_pool,
thread_pool_size,
default_timeout,
maximum_timeout,
):
self.multi_method_implementation = multi_method_implementation
self.request_deserializers = request_deserializers
self.response_serializers = response_serializers
self.thread_pool = thread_pool
self.thread_pool_size = thread_pool_size
self.default_timeout = default_timeout
self.maximum_timeout = maximum_timeout
_EMPTY_SERVER_OPTIONS = ServerOptions(None, None, None, None, None, None, None)
def server_options(
multi_method_implementation=None,
request_deserializers=None,
response_serializers=None,
thread_pool=None,
thread_pool_size=None,
default_timeout=None,
maximum_timeout=None,
):
"""Creates a ServerOptions value to be passed at server creation.
All parameters are optional and should always be passed by keyword.
Args:
multi_method_implementation: A face.MultiMethodImplementation to be called
to service an RPC if the server has no specific method implementation for
the name of the RPC for which service was requested.
request_deserializers: A dictionary from service name-method name pair to
request deserialization behavior.
response_serializers: A dictionary from service name-method name pair to
response serialization behavior.
thread_pool: A thread pool to use in stubs.
thread_pool_size: The size of thread pool to create for use in stubs;
ignored if thread_pool has been passed.
default_timeout: A duration in seconds to allow for RPC service when
servicing RPCs that did not include a timeout value when invoked.
maximum_timeout: A duration in seconds to allow for RPC service when
servicing RPCs no matter what timeout value was passed when the RPC was
invoked.
Returns:
A StubOptions value created from the passed parameters.
"""
return ServerOptions(
multi_method_implementation,
request_deserializers,
response_serializers,
thread_pool,
thread_pool_size,
default_timeout,
maximum_timeout,
)
def server(service_implementations, options=None):
"""Creates an interfaces.Server with which RPCs can be serviced.
Args:
service_implementations: A dictionary from service name-method name pair to
face.MethodImplementation.
options: An optional ServerOptions value further customizing the
functionality of the returned Server.
Returns:
An interfaces.Server with which RPCs can be serviced.
"""
effective_options = _EMPTY_SERVER_OPTIONS if options is None else options
return _server_adaptations.server(
service_implementations,
effective_options.multi_method_implementation,
effective_options.request_deserializers,
effective_options.response_serializers,
effective_options.thread_pool,
effective_options.thread_pool_size,
)
| 12,058
| 33.852601
| 84
|
py
|
grpc
|
grpc-master/src/python/grpcio/grpc/beta/_client_adaptations.py
|
# Copyright 2016 gRPC authors.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Translates gRPC's client-side API into gRPC's client-side Beta API."""
import grpc
from grpc import _common
from grpc.beta import _metadata
from grpc.beta import interfaces
from grpc.framework.common import cardinality
from grpc.framework.foundation import future
from grpc.framework.interfaces.face import face
# pylint: disable=too-many-arguments,too-many-locals,unused-argument
_STATUS_CODE_TO_ABORTION_KIND_AND_ABORTION_ERROR_CLASS = {
grpc.StatusCode.CANCELLED: (
face.Abortion.Kind.CANCELLED,
face.CancellationError,
),
grpc.StatusCode.UNKNOWN: (
face.Abortion.Kind.REMOTE_FAILURE,
face.RemoteError,
),
grpc.StatusCode.DEADLINE_EXCEEDED: (
face.Abortion.Kind.EXPIRED,
face.ExpirationError,
),
grpc.StatusCode.UNIMPLEMENTED: (
face.Abortion.Kind.LOCAL_FAILURE,
face.LocalError,
),
}
def _effective_metadata(metadata, metadata_transformer):
non_none_metadata = () if metadata is None else metadata
if metadata_transformer is None:
return non_none_metadata
else:
return metadata_transformer(non_none_metadata)
def _credentials(grpc_call_options):
return None if grpc_call_options is None else grpc_call_options.credentials
def _abortion(rpc_error_call):
code = rpc_error_call.code()
pair = _STATUS_CODE_TO_ABORTION_KIND_AND_ABORTION_ERROR_CLASS.get(code)
error_kind = face.Abortion.Kind.LOCAL_FAILURE if pair is None else pair[0]
return face.Abortion(
error_kind,
rpc_error_call.initial_metadata(),
rpc_error_call.trailing_metadata(),
code,
rpc_error_call.details(),
)
def _abortion_error(rpc_error_call):
code = rpc_error_call.code()
pair = _STATUS_CODE_TO_ABORTION_KIND_AND_ABORTION_ERROR_CLASS.get(code)
exception_class = face.AbortionError if pair is None else pair[1]
return exception_class(
rpc_error_call.initial_metadata(),
rpc_error_call.trailing_metadata(),
code,
rpc_error_call.details(),
)
class _InvocationProtocolContext(interfaces.GRPCInvocationContext):
def disable_next_request_compression(self):
pass # TODO(https://github.com/grpc/grpc/issues/4078): design, implement.
class _Rendezvous(future.Future, face.Call):
def __init__(self, response_future, response_iterator, call):
self._future = response_future
self._iterator = response_iterator
self._call = call
def cancel(self):
return self._call.cancel()
def cancelled(self):
return self._future.cancelled()
def running(self):
return self._future.running()
def done(self):
return self._future.done()
def result(self, timeout=None):
try:
return self._future.result(timeout=timeout)
except grpc.RpcError as rpc_error_call:
raise _abortion_error(rpc_error_call)
except grpc.FutureTimeoutError:
raise future.TimeoutError()
except grpc.FutureCancelledError:
raise future.CancelledError()
def exception(self, timeout=None):
try:
rpc_error_call = self._future.exception(timeout=timeout)
if rpc_error_call is None:
return None
else:
return _abortion_error(rpc_error_call)
except grpc.FutureTimeoutError:
raise future.TimeoutError()
except grpc.FutureCancelledError:
raise future.CancelledError()
def traceback(self, timeout=None):
try:
return self._future.traceback(timeout=timeout)
except grpc.FutureTimeoutError:
raise future.TimeoutError()
except grpc.FutureCancelledError:
raise future.CancelledError()
def add_done_callback(self, fn):
self._future.add_done_callback(lambda ignored_callback: fn(self))
def __iter__(self):
return self
def _next(self):
try:
return next(self._iterator)
except grpc.RpcError as rpc_error_call:
raise _abortion_error(rpc_error_call)
def __next__(self):
return self._next()
def next(self):
return self._next()
def is_active(self):
return self._call.is_active()
def time_remaining(self):
return self._call.time_remaining()
def add_abortion_callback(self, abortion_callback):
def done_callback():
if self.code() is not grpc.StatusCode.OK:
abortion_callback(_abortion(self._call))
registered = self._call.add_callback(done_callback)
return None if registered else done_callback()
def protocol_context(self):
return _InvocationProtocolContext()
def initial_metadata(self):
return _metadata.beta(self._call.initial_metadata())
def terminal_metadata(self):
return _metadata.beta(self._call.terminal_metadata())
def code(self):
return self._call.code()
def details(self):
return self._call.details()
def _blocking_unary_unary(
channel,
group,
method,
timeout,
with_call,
protocol_options,
metadata,
metadata_transformer,
request,
request_serializer,
response_deserializer,
):
try:
multi_callable = channel.unary_unary(
_common.fully_qualified_method(group, method),
request_serializer=request_serializer,
response_deserializer=response_deserializer,
)
effective_metadata = _effective_metadata(metadata, metadata_transformer)
if with_call:
response, call = multi_callable.with_call(
request,
timeout=timeout,
metadata=_metadata.unbeta(effective_metadata),
credentials=_credentials(protocol_options),
)
return response, _Rendezvous(None, None, call)
else:
return multi_callable(
request,
timeout=timeout,
metadata=_metadata.unbeta(effective_metadata),
credentials=_credentials(protocol_options),
)
except grpc.RpcError as rpc_error_call:
raise _abortion_error(rpc_error_call)
def _future_unary_unary(
channel,
group,
method,
timeout,
protocol_options,
metadata,
metadata_transformer,
request,
request_serializer,
response_deserializer,
):
multi_callable = channel.unary_unary(
_common.fully_qualified_method(group, method),
request_serializer=request_serializer,
response_deserializer=response_deserializer,
)
effective_metadata = _effective_metadata(metadata, metadata_transformer)
response_future = multi_callable.future(
request,
timeout=timeout,
metadata=_metadata.unbeta(effective_metadata),
credentials=_credentials(protocol_options),
)
return _Rendezvous(response_future, None, response_future)
def _unary_stream(
channel,
group,
method,
timeout,
protocol_options,
metadata,
metadata_transformer,
request,
request_serializer,
response_deserializer,
):
multi_callable = channel.unary_stream(
_common.fully_qualified_method(group, method),
request_serializer=request_serializer,
response_deserializer=response_deserializer,
)
effective_metadata = _effective_metadata(metadata, metadata_transformer)
response_iterator = multi_callable(
request,
timeout=timeout,
metadata=_metadata.unbeta(effective_metadata),
credentials=_credentials(protocol_options),
)
return _Rendezvous(None, response_iterator, response_iterator)
def _blocking_stream_unary(
channel,
group,
method,
timeout,
with_call,
protocol_options,
metadata,
metadata_transformer,
request_iterator,
request_serializer,
response_deserializer,
):
try:
multi_callable = channel.stream_unary(
_common.fully_qualified_method(group, method),
request_serializer=request_serializer,
response_deserializer=response_deserializer,
)
effective_metadata = _effective_metadata(metadata, metadata_transformer)
if with_call:
response, call = multi_callable.with_call(
request_iterator,
timeout=timeout,
metadata=_metadata.unbeta(effective_metadata),
credentials=_credentials(protocol_options),
)
return response, _Rendezvous(None, None, call)
else:
return multi_callable(
request_iterator,
timeout=timeout,
metadata=_metadata.unbeta(effective_metadata),
credentials=_credentials(protocol_options),
)
except grpc.RpcError as rpc_error_call:
raise _abortion_error(rpc_error_call)
def _future_stream_unary(
channel,
group,
method,
timeout,
protocol_options,
metadata,
metadata_transformer,
request_iterator,
request_serializer,
response_deserializer,
):
multi_callable = channel.stream_unary(
_common.fully_qualified_method(group, method),
request_serializer=request_serializer,
response_deserializer=response_deserializer,
)
effective_metadata = _effective_metadata(metadata, metadata_transformer)
response_future = multi_callable.future(
request_iterator,
timeout=timeout,
metadata=_metadata.unbeta(effective_metadata),
credentials=_credentials(protocol_options),
)
return _Rendezvous(response_future, None, response_future)
def _stream_stream(
channel,
group,
method,
timeout,
protocol_options,
metadata,
metadata_transformer,
request_iterator,
request_serializer,
response_deserializer,
):
multi_callable = channel.stream_stream(
_common.fully_qualified_method(group, method),
request_serializer=request_serializer,
response_deserializer=response_deserializer,
)
effective_metadata = _effective_metadata(metadata, metadata_transformer)
response_iterator = multi_callable(
request_iterator,
timeout=timeout,
metadata=_metadata.unbeta(effective_metadata),
credentials=_credentials(protocol_options),
)
return _Rendezvous(None, response_iterator, response_iterator)
class _UnaryUnaryMultiCallable(face.UnaryUnaryMultiCallable):
def __init__(
self,
channel,
group,
method,
metadata_transformer,
request_serializer,
response_deserializer,
):
self._channel = channel
self._group = group
self._method = method
self._metadata_transformer = metadata_transformer
self._request_serializer = request_serializer
self._response_deserializer = response_deserializer
def __call__(
self,
request,
timeout,
metadata=None,
with_call=False,
protocol_options=None,
):
return _blocking_unary_unary(
self._channel,
self._group,
self._method,
timeout,
with_call,
protocol_options,
metadata,
self._metadata_transformer,
request,
self._request_serializer,
self._response_deserializer,
)
def future(self, request, timeout, metadata=None, protocol_options=None):
return _future_unary_unary(
self._channel,
self._group,
self._method,
timeout,
protocol_options,
metadata,
self._metadata_transformer,
request,
self._request_serializer,
self._response_deserializer,
)
def event(
self,
request,
receiver,
abortion_callback,
timeout,
metadata=None,
protocol_options=None,
):
raise NotImplementedError()
class _UnaryStreamMultiCallable(face.UnaryStreamMultiCallable):
def __init__(
self,
channel,
group,
method,
metadata_transformer,
request_serializer,
response_deserializer,
):
self._channel = channel
self._group = group
self._method = method
self._metadata_transformer = metadata_transformer
self._request_serializer = request_serializer
self._response_deserializer = response_deserializer
def __call__(self, request, timeout, metadata=None, protocol_options=None):
return _unary_stream(
self._channel,
self._group,
self._method,
timeout,
protocol_options,
metadata,
self._metadata_transformer,
request,
self._request_serializer,
self._response_deserializer,
)
def event(
self,
request,
receiver,
abortion_callback,
timeout,
metadata=None,
protocol_options=None,
):
raise NotImplementedError()
class _StreamUnaryMultiCallable(face.StreamUnaryMultiCallable):
def __init__(
self,
channel,
group,
method,
metadata_transformer,
request_serializer,
response_deserializer,
):
self._channel = channel
self._group = group
self._method = method
self._metadata_transformer = metadata_transformer
self._request_serializer = request_serializer
self._response_deserializer = response_deserializer
def __call__(
self,
request_iterator,
timeout,
metadata=None,
with_call=False,
protocol_options=None,
):
return _blocking_stream_unary(
self._channel,
self._group,
self._method,
timeout,
with_call,
protocol_options,
metadata,
self._metadata_transformer,
request_iterator,
self._request_serializer,
self._response_deserializer,
)
def future(
self, request_iterator, timeout, metadata=None, protocol_options=None
):
return _future_stream_unary(
self._channel,
self._group,
self._method,
timeout,
protocol_options,
metadata,
self._metadata_transformer,
request_iterator,
self._request_serializer,
self._response_deserializer,
)
def event(
self,
receiver,
abortion_callback,
timeout,
metadata=None,
protocol_options=None,
):
raise NotImplementedError()
class _StreamStreamMultiCallable(face.StreamStreamMultiCallable):
def __init__(
self,
channel,
group,
method,
metadata_transformer,
request_serializer,
response_deserializer,
):
self._channel = channel
self._group = group
self._method = method
self._metadata_transformer = metadata_transformer
self._request_serializer = request_serializer
self._response_deserializer = response_deserializer
def __call__(
self, request_iterator, timeout, metadata=None, protocol_options=None
):
return _stream_stream(
self._channel,
self._group,
self._method,
timeout,
protocol_options,
metadata,
self._metadata_transformer,
request_iterator,
self._request_serializer,
self._response_deserializer,
)
def event(
self,
receiver,
abortion_callback,
timeout,
metadata=None,
protocol_options=None,
):
raise NotImplementedError()
class _GenericStub(face.GenericStub):
def __init__(
self,
channel,
metadata_transformer,
request_serializers,
response_deserializers,
):
self._channel = channel
self._metadata_transformer = metadata_transformer
self._request_serializers = request_serializers or {}
self._response_deserializers = response_deserializers or {}
def blocking_unary_unary(
self,
group,
method,
request,
timeout,
metadata=None,
with_call=None,
protocol_options=None,
):
request_serializer = self._request_serializers.get(
(
group,
method,
)
)
response_deserializer = self._response_deserializers.get(
(
group,
method,
)
)
return _blocking_unary_unary(
self._channel,
group,
method,
timeout,
with_call,
protocol_options,
metadata,
self._metadata_transformer,
request,
request_serializer,
response_deserializer,
)
def future_unary_unary(
self,
group,
method,
request,
timeout,
metadata=None,
protocol_options=None,
):
request_serializer = self._request_serializers.get(
(
group,
method,
)
)
response_deserializer = self._response_deserializers.get(
(
group,
method,
)
)
return _future_unary_unary(
self._channel,
group,
method,
timeout,
protocol_options,
metadata,
self._metadata_transformer,
request,
request_serializer,
response_deserializer,
)
def inline_unary_stream(
self,
group,
method,
request,
timeout,
metadata=None,
protocol_options=None,
):
request_serializer = self._request_serializers.get(
(
group,
method,
)
)
response_deserializer = self._response_deserializers.get(
(
group,
method,
)
)
return _unary_stream(
self._channel,
group,
method,
timeout,
protocol_options,
metadata,
self._metadata_transformer,
request,
request_serializer,
response_deserializer,
)
def blocking_stream_unary(
self,
group,
method,
request_iterator,
timeout,
metadata=None,
with_call=None,
protocol_options=None,
):
request_serializer = self._request_serializers.get(
(
group,
method,
)
)
response_deserializer = self._response_deserializers.get(
(
group,
method,
)
)
return _blocking_stream_unary(
self._channel,
group,
method,
timeout,
with_call,
protocol_options,
metadata,
self._metadata_transformer,
request_iterator,
request_serializer,
response_deserializer,
)
def future_stream_unary(
self,
group,
method,
request_iterator,
timeout,
metadata=None,
protocol_options=None,
):
request_serializer = self._request_serializers.get(
(
group,
method,
)
)
response_deserializer = self._response_deserializers.get(
(
group,
method,
)
)
return _future_stream_unary(
self._channel,
group,
method,
timeout,
protocol_options,
metadata,
self._metadata_transformer,
request_iterator,
request_serializer,
response_deserializer,
)
def inline_stream_stream(
self,
group,
method,
request_iterator,
timeout,
metadata=None,
protocol_options=None,
):
request_serializer = self._request_serializers.get(
(
group,
method,
)
)
response_deserializer = self._response_deserializers.get(
(
group,
method,
)
)
return _stream_stream(
self._channel,
group,
method,
timeout,
protocol_options,
metadata,
self._metadata_transformer,
request_iterator,
request_serializer,
response_deserializer,
)
def event_unary_unary(
self,
group,
method,
request,
receiver,
abortion_callback,
timeout,
metadata=None,
protocol_options=None,
):
raise NotImplementedError()
def event_unary_stream(
self,
group,
method,
request,
receiver,
abortion_callback,
timeout,
metadata=None,
protocol_options=None,
):
raise NotImplementedError()
def event_stream_unary(
self,
group,
method,
receiver,
abortion_callback,
timeout,
metadata=None,
protocol_options=None,
):
raise NotImplementedError()
def event_stream_stream(
self,
group,
method,
receiver,
abortion_callback,
timeout,
metadata=None,
protocol_options=None,
):
raise NotImplementedError()
def unary_unary(self, group, method):
request_serializer = self._request_serializers.get(
(
group,
method,
)
)
response_deserializer = self._response_deserializers.get(
(
group,
method,
)
)
return _UnaryUnaryMultiCallable(
self._channel,
group,
method,
self._metadata_transformer,
request_serializer,
response_deserializer,
)
def unary_stream(self, group, method):
request_serializer = self._request_serializers.get(
(
group,
method,
)
)
response_deserializer = self._response_deserializers.get(
(
group,
method,
)
)
return _UnaryStreamMultiCallable(
self._channel,
group,
method,
self._metadata_transformer,
request_serializer,
response_deserializer,
)
def stream_unary(self, group, method):
request_serializer = self._request_serializers.get(
(
group,
method,
)
)
response_deserializer = self._response_deserializers.get(
(
group,
method,
)
)
return _StreamUnaryMultiCallable(
self._channel,
group,
method,
self._metadata_transformer,
request_serializer,
response_deserializer,
)
def stream_stream(self, group, method):
request_serializer = self._request_serializers.get(
(
group,
method,
)
)
response_deserializer = self._response_deserializers.get(
(
group,
method,
)
)
return _StreamStreamMultiCallable(
self._channel,
group,
method,
self._metadata_transformer,
request_serializer,
response_deserializer,
)
def __enter__(self):
return self
def __exit__(self, exc_type, exc_val, exc_tb):
return False
class _DynamicStub(face.DynamicStub):
def __init__(self, backing_generic_stub, group, cardinalities):
self._generic_stub = backing_generic_stub
self._group = group
self._cardinalities = cardinalities
def __getattr__(self, attr):
method_cardinality = self._cardinalities.get(attr)
if method_cardinality is cardinality.Cardinality.UNARY_UNARY:
return self._generic_stub.unary_unary(self._group, attr)
elif method_cardinality is cardinality.Cardinality.UNARY_STREAM:
return self._generic_stub.unary_stream(self._group, attr)
elif method_cardinality is cardinality.Cardinality.STREAM_UNARY:
return self._generic_stub.stream_unary(self._group, attr)
elif method_cardinality is cardinality.Cardinality.STREAM_STREAM:
return self._generic_stub.stream_stream(self._group, attr)
else:
raise AttributeError(
'_DynamicStub object has no attribute "%s"!' % attr
)
def __enter__(self):
return self
def __exit__(self, exc_type, exc_val, exc_tb):
return False
def generic_stub(
channel,
host,
metadata_transformer,
request_serializers,
response_deserializers,
):
return _GenericStub(
channel,
metadata_transformer,
request_serializers,
response_deserializers,
)
def dynamic_stub(
channel,
service,
cardinalities,
host,
metadata_transformer,
request_serializers,
response_deserializers,
):
return _DynamicStub(
_GenericStub(
channel,
metadata_transformer,
request_serializers,
response_deserializers,
),
service,
cardinalities,
)
| 27,023
| 25.598425
| 82
|
py
|
grpc
|
grpc-master/src/python/grpcio/grpc/beta/_server_adaptations.py
|
# Copyright 2016 gRPC authors.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Translates gRPC's server-side API into gRPC's server-side Beta API."""
import collections
import threading
import grpc
from grpc import _common
from grpc.beta import _metadata
from grpc.beta import interfaces
from grpc.framework.common import cardinality
from grpc.framework.common import style
from grpc.framework.foundation import abandonment
from grpc.framework.foundation import logging_pool
from grpc.framework.foundation import stream
from grpc.framework.interfaces.face import face
# pylint: disable=too-many-return-statements
_DEFAULT_POOL_SIZE = 8
class _ServerProtocolContext(interfaces.GRPCServicerContext):
def __init__(self, servicer_context):
self._servicer_context = servicer_context
def peer(self):
return self._servicer_context.peer()
def disable_next_response_compression(self):
pass # TODO(https://github.com/grpc/grpc/issues/4078): design, implement.
class _FaceServicerContext(face.ServicerContext):
def __init__(self, servicer_context):
self._servicer_context = servicer_context
def is_active(self):
return self._servicer_context.is_active()
def time_remaining(self):
return self._servicer_context.time_remaining()
def add_abortion_callback(self, abortion_callback):
raise NotImplementedError(
"add_abortion_callback no longer supported server-side!"
)
def cancel(self):
self._servicer_context.cancel()
def protocol_context(self):
return _ServerProtocolContext(self._servicer_context)
def invocation_metadata(self):
return _metadata.beta(self._servicer_context.invocation_metadata())
def initial_metadata(self, initial_metadata):
self._servicer_context.send_initial_metadata(
_metadata.unbeta(initial_metadata)
)
def terminal_metadata(self, terminal_metadata):
self._servicer_context.set_terminal_metadata(
_metadata.unbeta(terminal_metadata)
)
def code(self, code):
self._servicer_context.set_code(code)
def details(self, details):
self._servicer_context.set_details(details)
def _adapt_unary_request_inline(unary_request_inline):
def adaptation(request, servicer_context):
return unary_request_inline(
request, _FaceServicerContext(servicer_context)
)
return adaptation
def _adapt_stream_request_inline(stream_request_inline):
def adaptation(request_iterator, servicer_context):
return stream_request_inline(
request_iterator, _FaceServicerContext(servicer_context)
)
return adaptation
class _Callback(stream.Consumer):
def __init__(self):
self._condition = threading.Condition()
self._values = []
self._terminated = False
self._cancelled = False
def consume(self, value):
with self._condition:
self._values.append(value)
self._condition.notify_all()
def terminate(self):
with self._condition:
self._terminated = True
self._condition.notify_all()
def consume_and_terminate(self, value):
with self._condition:
self._values.append(value)
self._terminated = True
self._condition.notify_all()
def cancel(self):
with self._condition:
self._cancelled = True
self._condition.notify_all()
def draw_one_value(self):
with self._condition:
while True:
if self._cancelled:
raise abandonment.Abandoned()
elif self._values:
return self._values.pop(0)
elif self._terminated:
return None
else:
self._condition.wait()
def draw_all_values(self):
with self._condition:
while True:
if self._cancelled:
raise abandonment.Abandoned()
elif self._terminated:
all_values = tuple(self._values)
self._values = None
return all_values
else:
self._condition.wait()
def _run_request_pipe_thread(
request_iterator, request_consumer, servicer_context
):
thread_joined = threading.Event()
def pipe_requests():
for request in request_iterator:
if not servicer_context.is_active() or thread_joined.is_set():
return
request_consumer.consume(request)
if not servicer_context.is_active() or thread_joined.is_set():
return
request_consumer.terminate()
request_pipe_thread = threading.Thread(target=pipe_requests)
request_pipe_thread.daemon = True
request_pipe_thread.start()
def _adapt_unary_unary_event(unary_unary_event):
def adaptation(request, servicer_context):
callback = _Callback()
if not servicer_context.add_callback(callback.cancel):
raise abandonment.Abandoned()
unary_unary_event(
request,
callback.consume_and_terminate,
_FaceServicerContext(servicer_context),
)
return callback.draw_all_values()[0]
return adaptation
def _adapt_unary_stream_event(unary_stream_event):
def adaptation(request, servicer_context):
callback = _Callback()
if not servicer_context.add_callback(callback.cancel):
raise abandonment.Abandoned()
unary_stream_event(
request, callback, _FaceServicerContext(servicer_context)
)
while True:
response = callback.draw_one_value()
if response is None:
return
else:
yield response
return adaptation
def _adapt_stream_unary_event(stream_unary_event):
def adaptation(request_iterator, servicer_context):
callback = _Callback()
if not servicer_context.add_callback(callback.cancel):
raise abandonment.Abandoned()
request_consumer = stream_unary_event(
callback.consume_and_terminate,
_FaceServicerContext(servicer_context),
)
_run_request_pipe_thread(
request_iterator, request_consumer, servicer_context
)
return callback.draw_all_values()[0]
return adaptation
def _adapt_stream_stream_event(stream_stream_event):
def adaptation(request_iterator, servicer_context):
callback = _Callback()
if not servicer_context.add_callback(callback.cancel):
raise abandonment.Abandoned()
request_consumer = stream_stream_event(
callback, _FaceServicerContext(servicer_context)
)
_run_request_pipe_thread(
request_iterator, request_consumer, servicer_context
)
while True:
response = callback.draw_one_value()
if response is None:
return
else:
yield response
return adaptation
class _SimpleMethodHandler(
collections.namedtuple(
"_MethodHandler",
(
"request_streaming",
"response_streaming",
"request_deserializer",
"response_serializer",
"unary_unary",
"unary_stream",
"stream_unary",
"stream_stream",
),
),
grpc.RpcMethodHandler,
):
pass
def _simple_method_handler(
implementation, request_deserializer, response_serializer
):
if implementation.style is style.Service.INLINE:
if implementation.cardinality is cardinality.Cardinality.UNARY_UNARY:
return _SimpleMethodHandler(
False,
False,
request_deserializer,
response_serializer,
_adapt_unary_request_inline(implementation.unary_unary_inline),
None,
None,
None,
)
elif implementation.cardinality is cardinality.Cardinality.UNARY_STREAM:
return _SimpleMethodHandler(
False,
True,
request_deserializer,
response_serializer,
None,
_adapt_unary_request_inline(implementation.unary_stream_inline),
None,
None,
)
elif implementation.cardinality is cardinality.Cardinality.STREAM_UNARY:
return _SimpleMethodHandler(
True,
False,
request_deserializer,
response_serializer,
None,
None,
_adapt_stream_request_inline(
implementation.stream_unary_inline
),
None,
)
elif (
implementation.cardinality is cardinality.Cardinality.STREAM_STREAM
):
return _SimpleMethodHandler(
True,
True,
request_deserializer,
response_serializer,
None,
None,
None,
_adapt_stream_request_inline(
implementation.stream_stream_inline
),
)
elif implementation.style is style.Service.EVENT:
if implementation.cardinality is cardinality.Cardinality.UNARY_UNARY:
return _SimpleMethodHandler(
False,
False,
request_deserializer,
response_serializer,
_adapt_unary_unary_event(implementation.unary_unary_event),
None,
None,
None,
)
elif implementation.cardinality is cardinality.Cardinality.UNARY_STREAM:
return _SimpleMethodHandler(
False,
True,
request_deserializer,
response_serializer,
None,
_adapt_unary_stream_event(implementation.unary_stream_event),
None,
None,
)
elif implementation.cardinality is cardinality.Cardinality.STREAM_UNARY:
return _SimpleMethodHandler(
True,
False,
request_deserializer,
response_serializer,
None,
None,
_adapt_stream_unary_event(implementation.stream_unary_event),
None,
)
elif (
implementation.cardinality is cardinality.Cardinality.STREAM_STREAM
):
return _SimpleMethodHandler(
True,
True,
request_deserializer,
response_serializer,
None,
None,
None,
_adapt_stream_stream_event(implementation.stream_stream_event),
)
raise ValueError()
def _flatten_method_pair_map(method_pair_map):
method_pair_map = method_pair_map or {}
flat_map = {}
for method_pair in method_pair_map:
method = _common.fully_qualified_method(method_pair[0], method_pair[1])
flat_map[method] = method_pair_map[method_pair]
return flat_map
class _GenericRpcHandler(grpc.GenericRpcHandler):
def __init__(
self,
method_implementations,
multi_method_implementation,
request_deserializers,
response_serializers,
):
self._method_implementations = _flatten_method_pair_map(
method_implementations
)
self._request_deserializers = _flatten_method_pair_map(
request_deserializers
)
self._response_serializers = _flatten_method_pair_map(
response_serializers
)
self._multi_method_implementation = multi_method_implementation
def service(self, handler_call_details):
method_implementation = self._method_implementations.get(
handler_call_details.method
)
if method_implementation is not None:
return _simple_method_handler(
method_implementation,
self._request_deserializers.get(handler_call_details.method),
self._response_serializers.get(handler_call_details.method),
)
elif self._multi_method_implementation is None:
return None
else:
try:
return None # TODO(nathaniel): call the multimethod.
except face.NoSuchMethodError:
return None
class _Server(interfaces.Server):
def __init__(self, grpc_server):
self._grpc_server = grpc_server
def add_insecure_port(self, address):
return self._grpc_server.add_insecure_port(address)
def add_secure_port(self, address, server_credentials):
return self._grpc_server.add_secure_port(address, server_credentials)
def start(self):
self._grpc_server.start()
def stop(self, grace):
return self._grpc_server.stop(grace)
def __enter__(self):
self._grpc_server.start()
return self
def __exit__(self, exc_type, exc_val, exc_tb):
self._grpc_server.stop(None)
return False
def server(
service_implementations,
multi_method_implementation,
request_deserializers,
response_serializers,
thread_pool,
thread_pool_size,
):
generic_rpc_handler = _GenericRpcHandler(
service_implementations,
multi_method_implementation,
request_deserializers,
response_serializers,
)
if thread_pool is None:
effective_thread_pool = logging_pool.pool(
_DEFAULT_POOL_SIZE if thread_pool_size is None else thread_pool_size
)
else:
effective_thread_pool = thread_pool
return _Server(
grpc.server(effective_thread_pool, handlers=(generic_rpc_handler,))
)
| 14,611
| 30.356223
| 82
|
py
|
grpc
|
grpc-master/src/python/grpcio/grpc/beta/__init__.py
|
# Copyright 2015 gRPC authors.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
| 577
| 40.285714
| 74
|
py
|
grpc
|
grpc-master/src/python/grpcio/grpc/aio/_call.py
|
# Copyright 2019 gRPC authors.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Invocation-side implementation of gRPC Asyncio Python."""
import asyncio
import enum
from functools import partial
import inspect
import logging
import traceback
from typing import Any, AsyncIterator, Generator, Generic, Optional, Tuple
import grpc
from grpc import _common
from grpc._cython import cygrpc
from . import _base_call
from ._metadata import Metadata
from ._typing import DeserializingFunction
from ._typing import DoneCallbackType
from ._typing import MetadatumType
from ._typing import RequestIterableType
from ._typing import RequestType
from ._typing import ResponseType
from ._typing import SerializingFunction
__all__ = "AioRpcError", "Call", "UnaryUnaryCall", "UnaryStreamCall"
_LOCAL_CANCELLATION_DETAILS = "Locally cancelled by application!"
_GC_CANCELLATION_DETAILS = "Cancelled upon garbage collection!"
_RPC_ALREADY_FINISHED_DETAILS = "RPC already finished."
_RPC_HALF_CLOSED_DETAILS = 'RPC is half closed after calling "done_writing".'
_API_STYLE_ERROR = (
"The iterator and read/write APIs may not be mixed on a single RPC."
)
_OK_CALL_REPRESENTATION = (
'<{} of RPC that terminated with:\n\tstatus = {}\n\tdetails = "{}"\n>'
)
_NON_OK_CALL_REPRESENTATION = (
"<{} of RPC that terminated with:\n"
"\tstatus = {}\n"
'\tdetails = "{}"\n'
'\tdebug_error_string = "{}"\n'
">"
)
_LOGGER = logging.getLogger(__name__)
class AioRpcError(grpc.RpcError):
"""An implementation of RpcError to be used by the asynchronous API.
Raised RpcError is a snapshot of the final status of the RPC, values are
determined. Hence, its methods no longer needs to be coroutines.
"""
_code: grpc.StatusCode
_details: Optional[str]
_initial_metadata: Optional[Metadata]
_trailing_metadata: Optional[Metadata]
_debug_error_string: Optional[str]
def __init__(
self,
code: grpc.StatusCode,
initial_metadata: Metadata,
trailing_metadata: Metadata,
details: Optional[str] = None,
debug_error_string: Optional[str] = None,
) -> None:
"""Constructor.
Args:
code: The status code with which the RPC has been finalized.
details: Optional details explaining the reason of the error.
initial_metadata: Optional initial metadata that could be sent by the
Server.
trailing_metadata: Optional metadata that could be sent by the Server.
"""
super().__init__()
self._code = code
self._details = details
self._initial_metadata = initial_metadata
self._trailing_metadata = trailing_metadata
self._debug_error_string = debug_error_string
def code(self) -> grpc.StatusCode:
"""Accesses the status code sent by the server.
Returns:
The `grpc.StatusCode` status code.
"""
return self._code
def details(self) -> Optional[str]:
"""Accesses the details sent by the server.
Returns:
The description of the error.
"""
return self._details
def initial_metadata(self) -> Metadata:
"""Accesses the initial metadata sent by the server.
Returns:
The initial metadata received.
"""
return self._initial_metadata
def trailing_metadata(self) -> Metadata:
"""Accesses the trailing metadata sent by the server.
Returns:
The trailing metadata received.
"""
return self._trailing_metadata
def debug_error_string(self) -> str:
"""Accesses the debug error string sent by the server.
Returns:
The debug error string received.
"""
return self._debug_error_string
def _repr(self) -> str:
"""Assembles the error string for the RPC error."""
return _NON_OK_CALL_REPRESENTATION.format(
self.__class__.__name__,
self._code,
self._details,
self._debug_error_string,
)
def __repr__(self) -> str:
return self._repr()
def __str__(self) -> str:
return self._repr()
def _create_rpc_error(
initial_metadata: Metadata, status: cygrpc.AioRpcStatus
) -> AioRpcError:
return AioRpcError(
_common.CYGRPC_STATUS_CODE_TO_STATUS_CODE[status.code()],
Metadata.from_tuple(initial_metadata),
Metadata.from_tuple(status.trailing_metadata()),
details=status.details(),
debug_error_string=status.debug_error_string(),
)
class Call:
"""Base implementation of client RPC Call object.
Implements logic around final status, metadata and cancellation.
"""
_loop: asyncio.AbstractEventLoop
_code: grpc.StatusCode
_cython_call: cygrpc._AioCall
_metadata: Tuple[MetadatumType, ...]
_request_serializer: SerializingFunction
_response_deserializer: DeserializingFunction
def __init__(
self,
cython_call: cygrpc._AioCall,
metadata: Metadata,
request_serializer: SerializingFunction,
response_deserializer: DeserializingFunction,
loop: asyncio.AbstractEventLoop,
) -> None:
self._loop = loop
self._cython_call = cython_call
self._metadata = tuple(metadata)
self._request_serializer = request_serializer
self._response_deserializer = response_deserializer
def __del__(self) -> None:
# The '_cython_call' object might be destructed before Call object
if hasattr(self, "_cython_call"):
if not self._cython_call.done():
self._cancel(_GC_CANCELLATION_DETAILS)
def cancelled(self) -> bool:
return self._cython_call.cancelled()
def _cancel(self, details: str) -> bool:
"""Forwards the application cancellation reasoning."""
if not self._cython_call.done():
self._cython_call.cancel(details)
return True
else:
return False
def cancel(self) -> bool:
return self._cancel(_LOCAL_CANCELLATION_DETAILS)
def done(self) -> bool:
return self._cython_call.done()
def add_done_callback(self, callback: DoneCallbackType) -> None:
cb = partial(callback, self)
self._cython_call.add_done_callback(cb)
def time_remaining(self) -> Optional[float]:
return self._cython_call.time_remaining()
async def initial_metadata(self) -> Metadata:
raw_metadata_tuple = await self._cython_call.initial_metadata()
return Metadata.from_tuple(raw_metadata_tuple)
async def trailing_metadata(self) -> Metadata:
raw_metadata_tuple = (
await self._cython_call.status()
).trailing_metadata()
return Metadata.from_tuple(raw_metadata_tuple)
async def code(self) -> grpc.StatusCode:
cygrpc_code = (await self._cython_call.status()).code()
return _common.CYGRPC_STATUS_CODE_TO_STATUS_CODE[cygrpc_code]
async def details(self) -> str:
return (await self._cython_call.status()).details()
async def debug_error_string(self) -> str:
return (await self._cython_call.status()).debug_error_string()
async def _raise_for_status(self) -> None:
if self._cython_call.is_locally_cancelled():
raise asyncio.CancelledError()
code = await self.code()
if code != grpc.StatusCode.OK:
raise _create_rpc_error(
await self.initial_metadata(), await self._cython_call.status()
)
def _repr(self) -> str:
return repr(self._cython_call)
def __repr__(self) -> str:
return self._repr()
def __str__(self) -> str:
return self._repr()
class _APIStyle(enum.IntEnum):
UNKNOWN = 0
ASYNC_GENERATOR = 1
READER_WRITER = 2
class _UnaryResponseMixin(Call, Generic[ResponseType]):
_call_response: asyncio.Task
def _init_unary_response_mixin(self, response_task: asyncio.Task):
self._call_response = response_task
def cancel(self) -> bool:
if super().cancel():
self._call_response.cancel()
return True
else:
return False
def __await__(self) -> Generator[Any, None, ResponseType]:
"""Wait till the ongoing RPC request finishes."""
try:
response = yield from self._call_response
except asyncio.CancelledError:
# Even if we caught all other CancelledError, there is still
# this corner case. If the application cancels immediately after
# the Call object is created, we will observe this
# `CancelledError`.
if not self.cancelled():
self.cancel()
raise
# NOTE(lidiz) If we raise RpcError in the task, and users doesn't
# 'await' on it. AsyncIO will log 'Task exception was never retrieved'.
# Instead, if we move the exception raising here, the spam stops.
# Unfortunately, there can only be one 'yield from' in '__await__'. So,
# we need to access the private instance variable.
if response is cygrpc.EOF:
if self._cython_call.is_locally_cancelled():
raise asyncio.CancelledError()
else:
raise _create_rpc_error(
self._cython_call._initial_metadata,
self._cython_call._status,
)
else:
return response
class _StreamResponseMixin(Call):
_message_aiter: AsyncIterator[ResponseType]
_preparation: asyncio.Task
_response_style: _APIStyle
def _init_stream_response_mixin(self, preparation: asyncio.Task):
self._message_aiter = None
self._preparation = preparation
self._response_style = _APIStyle.UNKNOWN
def _update_response_style(self, style: _APIStyle):
if self._response_style is _APIStyle.UNKNOWN:
self._response_style = style
elif self._response_style is not style:
raise cygrpc.UsageError(_API_STYLE_ERROR)
def cancel(self) -> bool:
if super().cancel():
self._preparation.cancel()
return True
else:
return False
async def _fetch_stream_responses(self) -> ResponseType:
message = await self._read()
while message is not cygrpc.EOF:
yield message
message = await self._read()
# If the read operation failed, Core should explain why.
await self._raise_for_status()
def __aiter__(self) -> AsyncIterator[ResponseType]:
self._update_response_style(_APIStyle.ASYNC_GENERATOR)
if self._message_aiter is None:
self._message_aiter = self._fetch_stream_responses()
return self._message_aiter
async def _read(self) -> ResponseType:
# Wait for the request being sent
await self._preparation
# Reads response message from Core
try:
raw_response = await self._cython_call.receive_serialized_message()
except asyncio.CancelledError:
if not self.cancelled():
self.cancel()
raise
if raw_response is cygrpc.EOF:
return cygrpc.EOF
else:
return _common.deserialize(
raw_response, self._response_deserializer
)
async def read(self) -> ResponseType:
if self.done():
await self._raise_for_status()
return cygrpc.EOF
self._update_response_style(_APIStyle.READER_WRITER)
response_message = await self._read()
if response_message is cygrpc.EOF:
# If the read operation failed, Core should explain why.
await self._raise_for_status()
return response_message
class _StreamRequestMixin(Call):
_metadata_sent: asyncio.Event
_done_writing_flag: bool
_async_request_poller: Optional[asyncio.Task]
_request_style: _APIStyle
def _init_stream_request_mixin(
self, request_iterator: Optional[RequestIterableType]
):
self._metadata_sent = asyncio.Event()
self._done_writing_flag = False
# If user passes in an async iterator, create a consumer Task.
if request_iterator is not None:
self._async_request_poller = self._loop.create_task(
self._consume_request_iterator(request_iterator)
)
self._request_style = _APIStyle.ASYNC_GENERATOR
else:
self._async_request_poller = None
self._request_style = _APIStyle.READER_WRITER
def _raise_for_different_style(self, style: _APIStyle):
if self._request_style is not style:
raise cygrpc.UsageError(_API_STYLE_ERROR)
def cancel(self) -> bool:
if super().cancel():
if self._async_request_poller is not None:
self._async_request_poller.cancel()
return True
else:
return False
def _metadata_sent_observer(self):
self._metadata_sent.set()
async def _consume_request_iterator(
self, request_iterator: RequestIterableType
) -> None:
try:
if inspect.isasyncgen(request_iterator) or hasattr(
request_iterator, "__aiter__"
):
async for request in request_iterator:
try:
await self._write(request)
except AioRpcError as rpc_error:
_LOGGER.debug(
(
"Exception while consuming the"
" request_iterator: %s"
),
rpc_error,
)
return
else:
for request in request_iterator:
try:
await self._write(request)
except AioRpcError as rpc_error:
_LOGGER.debug(
(
"Exception while consuming the"
" request_iterator: %s"
),
rpc_error,
)
return
await self._done_writing()
except: # pylint: disable=bare-except
# Client iterators can raise exceptions, which we should handle by
# cancelling the RPC and logging the client's error. No exceptions
# should escape this function.
_LOGGER.debug(
"Client request_iterator raised exception:\n%s",
traceback.format_exc(),
)
self.cancel()
async def _write(self, request: RequestType) -> None:
if self.done():
raise asyncio.InvalidStateError(_RPC_ALREADY_FINISHED_DETAILS)
if self._done_writing_flag:
raise asyncio.InvalidStateError(_RPC_HALF_CLOSED_DETAILS)
if not self._metadata_sent.is_set():
await self._metadata_sent.wait()
if self.done():
await self._raise_for_status()
serialized_request = _common.serialize(
request, self._request_serializer
)
try:
await self._cython_call.send_serialized_message(serialized_request)
except cygrpc.InternalError:
await self._raise_for_status()
except asyncio.CancelledError:
if not self.cancelled():
self.cancel()
raise
async def _done_writing(self) -> None:
if self.done():
# If the RPC is finished, do nothing.
return
if not self._done_writing_flag:
# If the done writing is not sent before, try to send it.
self._done_writing_flag = True
try:
await self._cython_call.send_receive_close()
except asyncio.CancelledError:
if not self.cancelled():
self.cancel()
raise
async def write(self, request: RequestType) -> None:
self._raise_for_different_style(_APIStyle.READER_WRITER)
await self._write(request)
async def done_writing(self) -> None:
"""Signal peer that client is done writing.
This method is idempotent.
"""
self._raise_for_different_style(_APIStyle.READER_WRITER)
await self._done_writing()
async def wait_for_connection(self) -> None:
await self._metadata_sent.wait()
if self.done():
await self._raise_for_status()
class UnaryUnaryCall(_UnaryResponseMixin, Call, _base_call.UnaryUnaryCall):
"""Object for managing unary-unary RPC calls.
Returned when an instance of `UnaryUnaryMultiCallable` object is called.
"""
_request: RequestType
_invocation_task: asyncio.Task
# pylint: disable=too-many-arguments
def __init__(
self,
request: RequestType,
deadline: Optional[float],
metadata: Metadata,
credentials: Optional[grpc.CallCredentials],
wait_for_ready: Optional[bool],
channel: cygrpc.AioChannel,
method: bytes,
request_serializer: SerializingFunction,
response_deserializer: DeserializingFunction,
loop: asyncio.AbstractEventLoop,
) -> None:
super().__init__(
channel.call(method, deadline, credentials, wait_for_ready),
metadata,
request_serializer,
response_deserializer,
loop,
)
self._request = request
self._invocation_task = loop.create_task(self._invoke())
self._init_unary_response_mixin(self._invocation_task)
async def _invoke(self) -> ResponseType:
serialized_request = _common.serialize(
self._request, self._request_serializer
)
# NOTE(lidiz) asyncio.CancelledError is not a good transport for status,
# because the asyncio.Task class do not cache the exception object.
# https://github.com/python/cpython/blob/edad4d89e357c92f70c0324b937845d652b20afd/Lib/asyncio/tasks.py#L785
try:
serialized_response = await self._cython_call.unary_unary(
serialized_request, self._metadata
)
except asyncio.CancelledError:
if not self.cancelled():
self.cancel()
if self._cython_call.is_ok():
return _common.deserialize(
serialized_response, self._response_deserializer
)
else:
return cygrpc.EOF
async def wait_for_connection(self) -> None:
await self._invocation_task
if self.done():
await self._raise_for_status()
class UnaryStreamCall(_StreamResponseMixin, Call, _base_call.UnaryStreamCall):
"""Object for managing unary-stream RPC calls.
Returned when an instance of `UnaryStreamMultiCallable` object is called.
"""
_request: RequestType
_send_unary_request_task: asyncio.Task
# pylint: disable=too-many-arguments
def __init__(
self,
request: RequestType,
deadline: Optional[float],
metadata: Metadata,
credentials: Optional[grpc.CallCredentials],
wait_for_ready: Optional[bool],
channel: cygrpc.AioChannel,
method: bytes,
request_serializer: SerializingFunction,
response_deserializer: DeserializingFunction,
loop: asyncio.AbstractEventLoop,
) -> None:
super().__init__(
channel.call(method, deadline, credentials, wait_for_ready),
metadata,
request_serializer,
response_deserializer,
loop,
)
self._request = request
self._send_unary_request_task = loop.create_task(
self._send_unary_request()
)
self._init_stream_response_mixin(self._send_unary_request_task)
async def _send_unary_request(self) -> ResponseType:
serialized_request = _common.serialize(
self._request, self._request_serializer
)
try:
await self._cython_call.initiate_unary_stream(
serialized_request, self._metadata
)
except asyncio.CancelledError:
if not self.cancelled():
self.cancel()
raise
async def wait_for_connection(self) -> None:
await self._send_unary_request_task
if self.done():
await self._raise_for_status()
# pylint: disable=too-many-ancestors
class StreamUnaryCall(
_StreamRequestMixin, _UnaryResponseMixin, Call, _base_call.StreamUnaryCall
):
"""Object for managing stream-unary RPC calls.
Returned when an instance of `StreamUnaryMultiCallable` object is called.
"""
# pylint: disable=too-many-arguments
def __init__(
self,
request_iterator: Optional[RequestIterableType],
deadline: Optional[float],
metadata: Metadata,
credentials: Optional[grpc.CallCredentials],
wait_for_ready: Optional[bool],
channel: cygrpc.AioChannel,
method: bytes,
request_serializer: SerializingFunction,
response_deserializer: DeserializingFunction,
loop: asyncio.AbstractEventLoop,
) -> None:
super().__init__(
channel.call(method, deadline, credentials, wait_for_ready),
metadata,
request_serializer,
response_deserializer,
loop,
)
self._init_stream_request_mixin(request_iterator)
self._init_unary_response_mixin(loop.create_task(self._conduct_rpc()))
async def _conduct_rpc(self) -> ResponseType:
try:
serialized_response = await self._cython_call.stream_unary(
self._metadata, self._metadata_sent_observer
)
except asyncio.CancelledError:
if not self.cancelled():
self.cancel()
raise
if self._cython_call.is_ok():
return _common.deserialize(
serialized_response, self._response_deserializer
)
else:
return cygrpc.EOF
class StreamStreamCall(
_StreamRequestMixin, _StreamResponseMixin, Call, _base_call.StreamStreamCall
):
"""Object for managing stream-stream RPC calls.
Returned when an instance of `StreamStreamMultiCallable` object is called.
"""
_initializer: asyncio.Task
# pylint: disable=too-many-arguments
def __init__(
self,
request_iterator: Optional[RequestIterableType],
deadline: Optional[float],
metadata: Metadata,
credentials: Optional[grpc.CallCredentials],
wait_for_ready: Optional[bool],
channel: cygrpc.AioChannel,
method: bytes,
request_serializer: SerializingFunction,
response_deserializer: DeserializingFunction,
loop: asyncio.AbstractEventLoop,
) -> None:
super().__init__(
channel.call(method, deadline, credentials, wait_for_ready),
metadata,
request_serializer,
response_deserializer,
loop,
)
self._initializer = self._loop.create_task(self._prepare_rpc())
self._init_stream_request_mixin(request_iterator)
self._init_stream_response_mixin(self._initializer)
async def _prepare_rpc(self):
"""This method prepares the RPC for receiving/sending messages.
All other operations around the stream should only happen after the
completion of this method.
"""
try:
await self._cython_call.initiate_stream_stream(
self._metadata, self._metadata_sent_observer
)
except asyncio.CancelledError:
if not self.cancelled():
self.cancel()
# No need to raise RpcError here, because no one will `await` this task.
| 24,640
| 32.343708
| 115
|
py
|
grpc
|
grpc-master/src/python/grpcio/grpc/aio/_interceptor.py
|
# Copyright 2019 gRPC authors.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Interceptors implementation of gRPC Asyncio Python."""
from abc import ABCMeta
from abc import abstractmethod
import asyncio
import collections
import functools
from typing import (
AsyncIterable,
Awaitable,
Callable,
Iterator,
List,
Optional,
Sequence,
Union,
)
import grpc
from grpc._cython import cygrpc
from . import _base_call
from ._call import AioRpcError
from ._call import StreamStreamCall
from ._call import StreamUnaryCall
from ._call import UnaryStreamCall
from ._call import UnaryUnaryCall
from ._call import _API_STYLE_ERROR
from ._call import _RPC_ALREADY_FINISHED_DETAILS
from ._call import _RPC_HALF_CLOSED_DETAILS
from ._metadata import Metadata
from ._typing import DeserializingFunction
from ._typing import DoneCallbackType
from ._typing import RequestIterableType
from ._typing import RequestType
from ._typing import ResponseIterableType
from ._typing import ResponseType
from ._typing import SerializingFunction
from ._utils import _timeout_to_deadline
_LOCAL_CANCELLATION_DETAILS = "Locally cancelled by application!"
class ServerInterceptor(metaclass=ABCMeta):
"""Affords intercepting incoming RPCs on the service-side.
This is an EXPERIMENTAL API.
"""
@abstractmethod
async def intercept_service(
self,
continuation: Callable[
[grpc.HandlerCallDetails], Awaitable[grpc.RpcMethodHandler]
],
handler_call_details: grpc.HandlerCallDetails,
) -> grpc.RpcMethodHandler:
"""Intercepts incoming RPCs before handing them over to a handler.
Args:
continuation: A function that takes a HandlerCallDetails and
proceeds to invoke the next interceptor in the chain, if any,
or the RPC handler lookup logic, with the call details passed
as an argument, and returns an RpcMethodHandler instance if
the RPC is considered serviced, or None otherwise.
handler_call_details: A HandlerCallDetails describing the RPC.
Returns:
An RpcMethodHandler with which the RPC may be serviced if the
interceptor chooses to service this RPC, or None otherwise.
"""
class ClientCallDetails(
collections.namedtuple(
"ClientCallDetails",
("method", "timeout", "metadata", "credentials", "wait_for_ready"),
),
grpc.ClientCallDetails,
):
"""Describes an RPC to be invoked.
This is an EXPERIMENTAL API.
Args:
method: The method name of the RPC.
timeout: An optional duration of time in seconds to allow for the RPC.
metadata: Optional metadata to be transmitted to the service-side of
the RPC.
credentials: An optional CallCredentials for the RPC.
wait_for_ready: An optional flag to enable :term:`wait_for_ready` mechanism.
"""
method: str
timeout: Optional[float]
metadata: Optional[Metadata]
credentials: Optional[grpc.CallCredentials]
wait_for_ready: Optional[bool]
class ClientInterceptor(metaclass=ABCMeta):
"""Base class used for all Aio Client Interceptor classes"""
class UnaryUnaryClientInterceptor(ClientInterceptor, metaclass=ABCMeta):
"""Affords intercepting unary-unary invocations."""
@abstractmethod
async def intercept_unary_unary(
self,
continuation: Callable[
[ClientCallDetails, RequestType], UnaryUnaryCall
],
client_call_details: ClientCallDetails,
request: RequestType,
) -> Union[UnaryUnaryCall, ResponseType]:
"""Intercepts a unary-unary invocation asynchronously.
Args:
continuation: A coroutine that proceeds with the invocation by
executing the next interceptor in the chain or invoking the
actual RPC on the underlying Channel. It is the interceptor's
responsibility to call it if it decides to move the RPC forward.
The interceptor can use
`call = await continuation(client_call_details, request)`
to continue with the RPC. `continuation` returns the call to the
RPC.
client_call_details: A ClientCallDetails object describing the
outgoing RPC.
request: The request value for the RPC.
Returns:
An object with the RPC response.
Raises:
AioRpcError: Indicating that the RPC terminated with non-OK status.
asyncio.CancelledError: Indicating that the RPC was canceled.
"""
class UnaryStreamClientInterceptor(ClientInterceptor, metaclass=ABCMeta):
"""Affords intercepting unary-stream invocations."""
@abstractmethod
async def intercept_unary_stream(
self,
continuation: Callable[
[ClientCallDetails, RequestType], UnaryStreamCall
],
client_call_details: ClientCallDetails,
request: RequestType,
) -> Union[ResponseIterableType, UnaryStreamCall]:
"""Intercepts a unary-stream invocation asynchronously.
The function could return the call object or an asynchronous
iterator, in case of being an asyncrhonous iterator this will
become the source of the reads done by the caller.
Args:
continuation: A coroutine that proceeds with the invocation by
executing the next interceptor in the chain or invoking the
actual RPC on the underlying Channel. It is the interceptor's
responsibility to call it if it decides to move the RPC forward.
The interceptor can use
`call = await continuation(client_call_details, request)`
to continue with the RPC. `continuation` returns the call to the
RPC.
client_call_details: A ClientCallDetails object describing the
outgoing RPC.
request: The request value for the RPC.
Returns:
The RPC Call or an asynchronous iterator.
Raises:
AioRpcError: Indicating that the RPC terminated with non-OK status.
asyncio.CancelledError: Indicating that the RPC was canceled.
"""
class StreamUnaryClientInterceptor(ClientInterceptor, metaclass=ABCMeta):
"""Affords intercepting stream-unary invocations."""
@abstractmethod
async def intercept_stream_unary(
self,
continuation: Callable[
[ClientCallDetails, RequestType], StreamUnaryCall
],
client_call_details: ClientCallDetails,
request_iterator: RequestIterableType,
) -> StreamUnaryCall:
"""Intercepts a stream-unary invocation asynchronously.
Within the interceptor the usage of the call methods like `write` or
even awaiting the call should be done carefully, since the caller
could be expecting an untouched call, for example for start writing
messages to it.
Args:
continuation: A coroutine that proceeds with the invocation by
executing the next interceptor in the chain or invoking the
actual RPC on the underlying Channel. It is the interceptor's
responsibility to call it if it decides to move the RPC forward.
The interceptor can use
`call = await continuation(client_call_details, request_iterator)`
to continue with the RPC. `continuation` returns the call to the
RPC.
client_call_details: A ClientCallDetails object describing the
outgoing RPC.
request_iterator: The request iterator that will produce requests
for the RPC.
Returns:
The RPC Call.
Raises:
AioRpcError: Indicating that the RPC terminated with non-OK status.
asyncio.CancelledError: Indicating that the RPC was canceled.
"""
class StreamStreamClientInterceptor(ClientInterceptor, metaclass=ABCMeta):
"""Affords intercepting stream-stream invocations."""
@abstractmethod
async def intercept_stream_stream(
self,
continuation: Callable[
[ClientCallDetails, RequestType], StreamStreamCall
],
client_call_details: ClientCallDetails,
request_iterator: RequestIterableType,
) -> Union[ResponseIterableType, StreamStreamCall]:
"""Intercepts a stream-stream invocation asynchronously.
Within the interceptor the usage of the call methods like `write` or
even awaiting the call should be done carefully, since the caller
could be expecting an untouched call, for example for start writing
messages to it.
The function could return the call object or an asynchronous
iterator, in case of being an asyncrhonous iterator this will
become the source of the reads done by the caller.
Args:
continuation: A coroutine that proceeds with the invocation by
executing the next interceptor in the chain or invoking the
actual RPC on the underlying Channel. It is the interceptor's
responsibility to call it if it decides to move the RPC forward.
The interceptor can use
`call = await continuation(client_call_details, request_iterator)`
to continue with the RPC. `continuation` returns the call to the
RPC.
client_call_details: A ClientCallDetails object describing the
outgoing RPC.
request_iterator: The request iterator that will produce requests
for the RPC.
Returns:
The RPC Call or an asynchronous iterator.
Raises:
AioRpcError: Indicating that the RPC terminated with non-OK status.
asyncio.CancelledError: Indicating that the RPC was canceled.
"""
class InterceptedCall:
"""Base implementation for all intercepted call arities.
Interceptors might have some work to do before the RPC invocation with
the capacity of changing the invocation parameters, and some work to do
after the RPC invocation with the capacity for accessing to the wrapped
`UnaryUnaryCall`.
It handles also early and later cancellations, when the RPC has not even
started and the execution is still held by the interceptors or when the
RPC has finished but again the execution is still held by the interceptors.
Once the RPC is finally executed, all methods are finally done against the
intercepted call, being at the same time the same call returned to the
interceptors.
As a base class for all of the interceptors implements the logic around
final status, metadata and cancellation.
"""
_interceptors_task: asyncio.Task
_pending_add_done_callbacks: Sequence[DoneCallbackType]
def __init__(self, interceptors_task: asyncio.Task) -> None:
self._interceptors_task = interceptors_task
self._pending_add_done_callbacks = []
self._interceptors_task.add_done_callback(
self._fire_or_add_pending_done_callbacks
)
def __del__(self):
self.cancel()
def _fire_or_add_pending_done_callbacks(
self, interceptors_task: asyncio.Task
) -> None:
if not self._pending_add_done_callbacks:
return
call_completed = False
try:
call = interceptors_task.result()
if call.done():
call_completed = True
except (AioRpcError, asyncio.CancelledError):
call_completed = True
if call_completed:
for callback in self._pending_add_done_callbacks:
callback(self)
else:
for callback in self._pending_add_done_callbacks:
callback = functools.partial(
self._wrap_add_done_callback, callback
)
call.add_done_callback(callback)
self._pending_add_done_callbacks = []
def _wrap_add_done_callback(
self, callback: DoneCallbackType, unused_call: _base_call.Call
) -> None:
callback(self)
def cancel(self) -> bool:
if not self._interceptors_task.done():
# There is no yet the intercepted call available,
# Trying to cancel it by using the generic Asyncio
# cancellation method.
return self._interceptors_task.cancel()
try:
call = self._interceptors_task.result()
except AioRpcError:
return False
except asyncio.CancelledError:
return False
return call.cancel()
def cancelled(self) -> bool:
if not self._interceptors_task.done():
return False
try:
call = self._interceptors_task.result()
except AioRpcError as err:
return err.code() == grpc.StatusCode.CANCELLED
except asyncio.CancelledError:
return True
return call.cancelled()
def done(self) -> bool:
if not self._interceptors_task.done():
return False
try:
call = self._interceptors_task.result()
except (AioRpcError, asyncio.CancelledError):
return True
return call.done()
def add_done_callback(self, callback: DoneCallbackType) -> None:
if not self._interceptors_task.done():
self._pending_add_done_callbacks.append(callback)
return
try:
call = self._interceptors_task.result()
except (AioRpcError, asyncio.CancelledError):
callback(self)
return
if call.done():
callback(self)
else:
callback = functools.partial(self._wrap_add_done_callback, callback)
call.add_done_callback(callback)
def time_remaining(self) -> Optional[float]:
raise NotImplementedError()
async def initial_metadata(self) -> Optional[Metadata]:
try:
call = await self._interceptors_task
except AioRpcError as err:
return err.initial_metadata()
except asyncio.CancelledError:
return None
return await call.initial_metadata()
async def trailing_metadata(self) -> Optional[Metadata]:
try:
call = await self._interceptors_task
except AioRpcError as err:
return err.trailing_metadata()
except asyncio.CancelledError:
return None
return await call.trailing_metadata()
async def code(self) -> grpc.StatusCode:
try:
call = await self._interceptors_task
except AioRpcError as err:
return err.code()
except asyncio.CancelledError:
return grpc.StatusCode.CANCELLED
return await call.code()
async def details(self) -> str:
try:
call = await self._interceptors_task
except AioRpcError as err:
return err.details()
except asyncio.CancelledError:
return _LOCAL_CANCELLATION_DETAILS
return await call.details()
async def debug_error_string(self) -> Optional[str]:
try:
call = await self._interceptors_task
except AioRpcError as err:
return err.debug_error_string()
except asyncio.CancelledError:
return ""
return await call.debug_error_string()
async def wait_for_connection(self) -> None:
call = await self._interceptors_task
return await call.wait_for_connection()
class _InterceptedUnaryResponseMixin:
def __await__(self):
call = yield from self._interceptors_task.__await__()
response = yield from call.__await__()
return response
class _InterceptedStreamResponseMixin:
_response_aiter: Optional[AsyncIterable[ResponseType]]
def _init_stream_response_mixin(self) -> None:
# Is initalized later, otherwise if the iterator is not finnally
# consumed a logging warning is emmited by Asyncio.
self._response_aiter = None
async def _wait_for_interceptor_task_response_iterator(
self,
) -> ResponseType:
call = await self._interceptors_task
async for response in call:
yield response
def __aiter__(self) -> AsyncIterable[ResponseType]:
if self._response_aiter is None:
self._response_aiter = (
self._wait_for_interceptor_task_response_iterator()
)
return self._response_aiter
async def read(self) -> ResponseType:
if self._response_aiter is None:
self._response_aiter = (
self._wait_for_interceptor_task_response_iterator()
)
return await self._response_aiter.asend(None)
class _InterceptedStreamRequestMixin:
_write_to_iterator_async_gen: Optional[AsyncIterable[RequestType]]
_write_to_iterator_queue: Optional[asyncio.Queue]
_status_code_task: Optional[asyncio.Task]
_FINISH_ITERATOR_SENTINEL = object()
def _init_stream_request_mixin(
self, request_iterator: Optional[RequestIterableType]
) -> RequestIterableType:
if request_iterator is None:
# We provide our own request iterator which is a proxy
# of the futures writes that will be done by the caller.
self._write_to_iterator_queue = asyncio.Queue(maxsize=1)
self._write_to_iterator_async_gen = (
self._proxy_writes_as_request_iterator()
)
self._status_code_task = None
request_iterator = self._write_to_iterator_async_gen
else:
self._write_to_iterator_queue = None
return request_iterator
async def _proxy_writes_as_request_iterator(self):
await self._interceptors_task
while True:
value = await self._write_to_iterator_queue.get()
if (
value
is _InterceptedStreamRequestMixin._FINISH_ITERATOR_SENTINEL
):
break
yield value
async def _write_to_iterator_queue_interruptible(
self, request: RequestType, call: InterceptedCall
):
# Write the specified 'request' to the request iterator queue using the
# specified 'call' to allow for interruption of the write in the case
# of abrupt termination of the call.
if self._status_code_task is None:
self._status_code_task = self._loop.create_task(call.code())
await asyncio.wait(
(
self._loop.create_task(
self._write_to_iterator_queue.put(request)
),
self._status_code_task,
),
return_when=asyncio.FIRST_COMPLETED,
)
async def write(self, request: RequestType) -> None:
# If no queue was created it means that requests
# should be expected through an iterators provided
# by the caller.
if self._write_to_iterator_queue is None:
raise cygrpc.UsageError(_API_STYLE_ERROR)
try:
call = await self._interceptors_task
except (asyncio.CancelledError, AioRpcError):
raise asyncio.InvalidStateError(_RPC_ALREADY_FINISHED_DETAILS)
if call.done():
raise asyncio.InvalidStateError(_RPC_ALREADY_FINISHED_DETAILS)
elif call._done_writing_flag:
raise asyncio.InvalidStateError(_RPC_HALF_CLOSED_DETAILS)
await self._write_to_iterator_queue_interruptible(request, call)
if call.done():
raise asyncio.InvalidStateError(_RPC_ALREADY_FINISHED_DETAILS)
async def done_writing(self) -> None:
"""Signal peer that client is done writing.
This method is idempotent.
"""
# If no queue was created it means that requests
# should be expected through an iterators provided
# by the caller.
if self._write_to_iterator_queue is None:
raise cygrpc.UsageError(_API_STYLE_ERROR)
try:
call = await self._interceptors_task
except asyncio.CancelledError:
raise asyncio.InvalidStateError(_RPC_ALREADY_FINISHED_DETAILS)
await self._write_to_iterator_queue_interruptible(
_InterceptedStreamRequestMixin._FINISH_ITERATOR_SENTINEL, call
)
class InterceptedUnaryUnaryCall(
_InterceptedUnaryResponseMixin, InterceptedCall, _base_call.UnaryUnaryCall
):
"""Used for running a `UnaryUnaryCall` wrapped by interceptors.
For the `__await__` method is it is proxied to the intercepted call only when
the interceptor task is finished.
"""
_loop: asyncio.AbstractEventLoop
_channel: cygrpc.AioChannel
# pylint: disable=too-many-arguments
def __init__(
self,
interceptors: Sequence[UnaryUnaryClientInterceptor],
request: RequestType,
timeout: Optional[float],
metadata: Metadata,
credentials: Optional[grpc.CallCredentials],
wait_for_ready: Optional[bool],
channel: cygrpc.AioChannel,
method: bytes,
request_serializer: SerializingFunction,
response_deserializer: DeserializingFunction,
loop: asyncio.AbstractEventLoop,
) -> None:
self._loop = loop
self._channel = channel
interceptors_task = loop.create_task(
self._invoke(
interceptors,
method,
timeout,
metadata,
credentials,
wait_for_ready,
request,
request_serializer,
response_deserializer,
)
)
super().__init__(interceptors_task)
# pylint: disable=too-many-arguments
async def _invoke(
self,
interceptors: Sequence[UnaryUnaryClientInterceptor],
method: bytes,
timeout: Optional[float],
metadata: Optional[Metadata],
credentials: Optional[grpc.CallCredentials],
wait_for_ready: Optional[bool],
request: RequestType,
request_serializer: SerializingFunction,
response_deserializer: DeserializingFunction,
) -> UnaryUnaryCall:
"""Run the RPC call wrapped in interceptors"""
async def _run_interceptor(
interceptors: List[UnaryUnaryClientInterceptor],
client_call_details: ClientCallDetails,
request: RequestType,
) -> _base_call.UnaryUnaryCall:
if interceptors:
continuation = functools.partial(
_run_interceptor, interceptors[1:]
)
call_or_response = await interceptors[0].intercept_unary_unary(
continuation, client_call_details, request
)
if isinstance(call_or_response, _base_call.UnaryUnaryCall):
return call_or_response
else:
return UnaryUnaryCallResponse(call_or_response)
else:
return UnaryUnaryCall(
request,
_timeout_to_deadline(client_call_details.timeout),
client_call_details.metadata,
client_call_details.credentials,
client_call_details.wait_for_ready,
self._channel,
client_call_details.method,
request_serializer,
response_deserializer,
self._loop,
)
client_call_details = ClientCallDetails(
method, timeout, metadata, credentials, wait_for_ready
)
return await _run_interceptor(
list(interceptors), client_call_details, request
)
def time_remaining(self) -> Optional[float]:
raise NotImplementedError()
class InterceptedUnaryStreamCall(
_InterceptedStreamResponseMixin, InterceptedCall, _base_call.UnaryStreamCall
):
"""Used for running a `UnaryStreamCall` wrapped by interceptors."""
_loop: asyncio.AbstractEventLoop
_channel: cygrpc.AioChannel
_last_returned_call_from_interceptors = Optional[_base_call.UnaryStreamCall]
# pylint: disable=too-many-arguments
def __init__(
self,
interceptors: Sequence[UnaryStreamClientInterceptor],
request: RequestType,
timeout: Optional[float],
metadata: Metadata,
credentials: Optional[grpc.CallCredentials],
wait_for_ready: Optional[bool],
channel: cygrpc.AioChannel,
method: bytes,
request_serializer: SerializingFunction,
response_deserializer: DeserializingFunction,
loop: asyncio.AbstractEventLoop,
) -> None:
self._loop = loop
self._channel = channel
self._init_stream_response_mixin()
self._last_returned_call_from_interceptors = None
interceptors_task = loop.create_task(
self._invoke(
interceptors,
method,
timeout,
metadata,
credentials,
wait_for_ready,
request,
request_serializer,
response_deserializer,
)
)
super().__init__(interceptors_task)
# pylint: disable=too-many-arguments
async def _invoke(
self,
interceptors: Sequence[UnaryUnaryClientInterceptor],
method: bytes,
timeout: Optional[float],
metadata: Optional[Metadata],
credentials: Optional[grpc.CallCredentials],
wait_for_ready: Optional[bool],
request: RequestType,
request_serializer: SerializingFunction,
response_deserializer: DeserializingFunction,
) -> UnaryStreamCall:
"""Run the RPC call wrapped in interceptors"""
async def _run_interceptor(
interceptors: List[UnaryStreamClientInterceptor],
client_call_details: ClientCallDetails,
request: RequestType,
) -> _base_call.UnaryUnaryCall:
if interceptors:
continuation = functools.partial(
_run_interceptor, interceptors[1:]
)
call_or_response_iterator = await interceptors[
0
].intercept_unary_stream(
continuation, client_call_details, request
)
if isinstance(
call_or_response_iterator, _base_call.UnaryStreamCall
):
self._last_returned_call_from_interceptors = (
call_or_response_iterator
)
else:
self._last_returned_call_from_interceptors = (
UnaryStreamCallResponseIterator(
self._last_returned_call_from_interceptors,
call_or_response_iterator,
)
)
return self._last_returned_call_from_interceptors
else:
self._last_returned_call_from_interceptors = UnaryStreamCall(
request,
_timeout_to_deadline(client_call_details.timeout),
client_call_details.metadata,
client_call_details.credentials,
client_call_details.wait_for_ready,
self._channel,
client_call_details.method,
request_serializer,
response_deserializer,
self._loop,
)
return self._last_returned_call_from_interceptors
client_call_details = ClientCallDetails(
method, timeout, metadata, credentials, wait_for_ready
)
return await _run_interceptor(
list(interceptors), client_call_details, request
)
def time_remaining(self) -> Optional[float]:
raise NotImplementedError()
class InterceptedStreamUnaryCall(
_InterceptedUnaryResponseMixin,
_InterceptedStreamRequestMixin,
InterceptedCall,
_base_call.StreamUnaryCall,
):
"""Used for running a `StreamUnaryCall` wrapped by interceptors.
For the `__await__` method is it is proxied to the intercepted call only when
the interceptor task is finished.
"""
_loop: asyncio.AbstractEventLoop
_channel: cygrpc.AioChannel
# pylint: disable=too-many-arguments
def __init__(
self,
interceptors: Sequence[StreamUnaryClientInterceptor],
request_iterator: Optional[RequestIterableType],
timeout: Optional[float],
metadata: Metadata,
credentials: Optional[grpc.CallCredentials],
wait_for_ready: Optional[bool],
channel: cygrpc.AioChannel,
method: bytes,
request_serializer: SerializingFunction,
response_deserializer: DeserializingFunction,
loop: asyncio.AbstractEventLoop,
) -> None:
self._loop = loop
self._channel = channel
request_iterator = self._init_stream_request_mixin(request_iterator)
interceptors_task = loop.create_task(
self._invoke(
interceptors,
method,
timeout,
metadata,
credentials,
wait_for_ready,
request_iterator,
request_serializer,
response_deserializer,
)
)
super().__init__(interceptors_task)
# pylint: disable=too-many-arguments
async def _invoke(
self,
interceptors: Sequence[StreamUnaryClientInterceptor],
method: bytes,
timeout: Optional[float],
metadata: Optional[Metadata],
credentials: Optional[grpc.CallCredentials],
wait_for_ready: Optional[bool],
request_iterator: RequestIterableType,
request_serializer: SerializingFunction,
response_deserializer: DeserializingFunction,
) -> StreamUnaryCall:
"""Run the RPC call wrapped in interceptors"""
async def _run_interceptor(
interceptors: Iterator[UnaryUnaryClientInterceptor],
client_call_details: ClientCallDetails,
request_iterator: RequestIterableType,
) -> _base_call.StreamUnaryCall:
if interceptors:
continuation = functools.partial(
_run_interceptor, interceptors[1:]
)
return await interceptors[0].intercept_stream_unary(
continuation, client_call_details, request_iterator
)
else:
return StreamUnaryCall(
request_iterator,
_timeout_to_deadline(client_call_details.timeout),
client_call_details.metadata,
client_call_details.credentials,
client_call_details.wait_for_ready,
self._channel,
client_call_details.method,
request_serializer,
response_deserializer,
self._loop,
)
client_call_details = ClientCallDetails(
method, timeout, metadata, credentials, wait_for_ready
)
return await _run_interceptor(
list(interceptors), client_call_details, request_iterator
)
def time_remaining(self) -> Optional[float]:
raise NotImplementedError()
class InterceptedStreamStreamCall(
_InterceptedStreamResponseMixin,
_InterceptedStreamRequestMixin,
InterceptedCall,
_base_call.StreamStreamCall,
):
"""Used for running a `StreamStreamCall` wrapped by interceptors."""
_loop: asyncio.AbstractEventLoop
_channel: cygrpc.AioChannel
_last_returned_call_from_interceptors = Optional[_base_call.UnaryStreamCall]
# pylint: disable=too-many-arguments
def __init__(
self,
interceptors: Sequence[StreamStreamClientInterceptor],
request_iterator: Optional[RequestIterableType],
timeout: Optional[float],
metadata: Metadata,
credentials: Optional[grpc.CallCredentials],
wait_for_ready: Optional[bool],
channel: cygrpc.AioChannel,
method: bytes,
request_serializer: SerializingFunction,
response_deserializer: DeserializingFunction,
loop: asyncio.AbstractEventLoop,
) -> None:
self._loop = loop
self._channel = channel
self._init_stream_response_mixin()
request_iterator = self._init_stream_request_mixin(request_iterator)
self._last_returned_call_from_interceptors = None
interceptors_task = loop.create_task(
self._invoke(
interceptors,
method,
timeout,
metadata,
credentials,
wait_for_ready,
request_iterator,
request_serializer,
response_deserializer,
)
)
super().__init__(interceptors_task)
# pylint: disable=too-many-arguments
async def _invoke(
self,
interceptors: Sequence[StreamStreamClientInterceptor],
method: bytes,
timeout: Optional[float],
metadata: Optional[Metadata],
credentials: Optional[grpc.CallCredentials],
wait_for_ready: Optional[bool],
request_iterator: RequestIterableType,
request_serializer: SerializingFunction,
response_deserializer: DeserializingFunction,
) -> StreamStreamCall:
"""Run the RPC call wrapped in interceptors"""
async def _run_interceptor(
interceptors: List[StreamStreamClientInterceptor],
client_call_details: ClientCallDetails,
request_iterator: RequestIterableType,
) -> _base_call.StreamStreamCall:
if interceptors:
continuation = functools.partial(
_run_interceptor, interceptors[1:]
)
call_or_response_iterator = await interceptors[
0
].intercept_stream_stream(
continuation, client_call_details, request_iterator
)
if isinstance(
call_or_response_iterator, _base_call.StreamStreamCall
):
self._last_returned_call_from_interceptors = (
call_or_response_iterator
)
else:
self._last_returned_call_from_interceptors = (
StreamStreamCallResponseIterator(
self._last_returned_call_from_interceptors,
call_or_response_iterator,
)
)
return self._last_returned_call_from_interceptors
else:
self._last_returned_call_from_interceptors = StreamStreamCall(
request_iterator,
_timeout_to_deadline(client_call_details.timeout),
client_call_details.metadata,
client_call_details.credentials,
client_call_details.wait_for_ready,
self._channel,
client_call_details.method,
request_serializer,
response_deserializer,
self._loop,
)
return self._last_returned_call_from_interceptors
client_call_details = ClientCallDetails(
method, timeout, metadata, credentials, wait_for_ready
)
return await _run_interceptor(
list(interceptors), client_call_details, request_iterator
)
def time_remaining(self) -> Optional[float]:
raise NotImplementedError()
class UnaryUnaryCallResponse(_base_call.UnaryUnaryCall):
"""Final UnaryUnaryCall class finished with a response."""
_response: ResponseType
def __init__(self, response: ResponseType) -> None:
self._response = response
def cancel(self) -> bool:
return False
def cancelled(self) -> bool:
return False
def done(self) -> bool:
return True
def add_done_callback(self, unused_callback) -> None:
raise NotImplementedError()
def time_remaining(self) -> Optional[float]:
raise NotImplementedError()
async def initial_metadata(self) -> Optional[Metadata]:
return None
async def trailing_metadata(self) -> Optional[Metadata]:
return None
async def code(self) -> grpc.StatusCode:
return grpc.StatusCode.OK
async def details(self) -> str:
return ""
async def debug_error_string(self) -> Optional[str]:
return None
def __await__(self):
if False: # pylint: disable=using-constant-test
# This code path is never used, but a yield statement is needed
# for telling the interpreter that __await__ is a generator.
yield None
return self._response
async def wait_for_connection(self) -> None:
pass
class _StreamCallResponseIterator:
_call: Union[_base_call.UnaryStreamCall, _base_call.StreamStreamCall]
_response_iterator: AsyncIterable[ResponseType]
def __init__(
self,
call: Union[_base_call.UnaryStreamCall, _base_call.StreamStreamCall],
response_iterator: AsyncIterable[ResponseType],
) -> None:
self._response_iterator = response_iterator
self._call = call
def cancel(self) -> bool:
return self._call.cancel()
def cancelled(self) -> bool:
return self._call.cancelled()
def done(self) -> bool:
return self._call.done()
def add_done_callback(self, callback) -> None:
self._call.add_done_callback(callback)
def time_remaining(self) -> Optional[float]:
return self._call.time_remaining()
async def initial_metadata(self) -> Optional[Metadata]:
return await self._call.initial_metadata()
async def trailing_metadata(self) -> Optional[Metadata]:
return await self._call.trailing_metadata()
async def code(self) -> grpc.StatusCode:
return await self._call.code()
async def details(self) -> str:
return await self._call.details()
async def debug_error_string(self) -> Optional[str]:
return await self._call.debug_error_string()
def __aiter__(self):
return self._response_iterator.__aiter__()
async def wait_for_connection(self) -> None:
return await self._call.wait_for_connection()
class UnaryStreamCallResponseIterator(
_StreamCallResponseIterator, _base_call.UnaryStreamCall
):
"""UnaryStreamCall class wich uses an alternative response iterator."""
async def read(self) -> ResponseType:
# Behind the scenes everyting goes through the
# async iterator. So this path should not be reached.
raise NotImplementedError()
class StreamStreamCallResponseIterator(
_StreamCallResponseIterator, _base_call.StreamStreamCall
):
"""StreamStreamCall class wich uses an alternative response iterator."""
async def read(self) -> ResponseType:
# Behind the scenes everyting goes through the
# async iterator. So this path should not be reached.
raise NotImplementedError()
async def write(self, request: RequestType) -> None:
# Behind the scenes everyting goes through the
# async iterator provided by the InterceptedStreamStreamCall.
# So this path should not be reached.
raise NotImplementedError()
async def done_writing(self) -> None:
# Behind the scenes everyting goes through the
# async iterator provided by the InterceptedStreamStreamCall.
# So this path should not be reached.
raise NotImplementedError()
@property
def _done_writing_flag(self) -> bool:
return self._call._done_writing_flag
| 40,771
| 33.96741
| 84
|
py
|
grpc
|
grpc-master/src/python/grpcio/grpc/aio/_utils.py
|
# Copyright 2019 gRPC authors.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Internal utilities used by the gRPC Aio module."""
import time
from typing import Optional
def _timeout_to_deadline(timeout: Optional[float]) -> Optional[float]:
if timeout is None:
return None
return time.time() + timeout
| 821
| 34.73913
| 74
|
py
|
grpc
|
grpc-master/src/python/grpcio/grpc/aio/_base_server.py
|
# Copyright 2020 The gRPC Authors
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Abstract base classes for server-side classes."""
import abc
from typing import Generic, Iterable, Mapping, NoReturn, Optional, Sequence
import grpc
from ._metadata import Metadata
from ._typing import DoneCallbackType
from ._typing import MetadataType
from ._typing import RequestType
from ._typing import ResponseType
class Server(abc.ABC):
"""Serves RPCs."""
@abc.abstractmethod
def add_generic_rpc_handlers(
self, generic_rpc_handlers: Sequence[grpc.GenericRpcHandler]
) -> None:
"""Registers GenericRpcHandlers with this Server.
This method is only safe to call before the server is started.
Args:
generic_rpc_handlers: A sequence of GenericRpcHandlers that will be
used to service RPCs.
"""
@abc.abstractmethod
def add_insecure_port(self, address: str) -> int:
"""Opens an insecure port for accepting RPCs.
A port is a communication endpoint that used by networking protocols,
like TCP and UDP. To date, we only support TCP.
This method may only be called before starting the server.
Args:
address: The address for which to open a port. If the port is 0,
or not specified in the address, then the gRPC runtime will choose a port.
Returns:
An integer port on which the server will accept RPC requests.
"""
@abc.abstractmethod
def add_secure_port(
self, address: str, server_credentials: grpc.ServerCredentials
) -> int:
"""Opens a secure port for accepting RPCs.
A port is a communication endpoint that used by networking protocols,
like TCP and UDP. To date, we only support TCP.
This method may only be called before starting the server.
Args:
address: The address for which to open a port.
if the port is 0, or not specified in the address, then the gRPC
runtime will choose a port.
server_credentials: A ServerCredentials object.
Returns:
An integer port on which the server will accept RPC requests.
"""
@abc.abstractmethod
async def start(self) -> None:
"""Starts this Server.
This method may only be called once. (i.e. it is not idempotent).
"""
@abc.abstractmethod
async def stop(self, grace: Optional[float]) -> None:
"""Stops this Server.
This method immediately stops the server from servicing new RPCs in
all cases.
If a grace period is specified, this method returns immediately and all
RPCs active at the end of the grace period are aborted. If a grace
period is not specified (by passing None for grace), all existing RPCs
are aborted immediately and this method blocks until the last RPC
handler terminates.
This method is idempotent and may be called at any time. Passing a
smaller grace value in a subsequent call will have the effect of
stopping the Server sooner (passing None will have the effect of
stopping the server immediately). Passing a larger grace value in a
subsequent call will not have the effect of stopping the server later
(i.e. the most restrictive grace value is used).
Args:
grace: A duration of time in seconds or None.
"""
@abc.abstractmethod
async def wait_for_termination(
self, timeout: Optional[float] = None
) -> bool:
"""Continues current coroutine once the server stops.
This is an EXPERIMENTAL API.
The wait will not consume computational resources during blocking, and
it will block until one of the two following conditions are met:
1) The server is stopped or terminated;
2) A timeout occurs if timeout is not `None`.
The timeout argument works in the same way as `threading.Event.wait()`.
https://docs.python.org/3/library/threading.html#threading.Event.wait
Args:
timeout: A floating point number specifying a timeout for the
operation in seconds.
Returns:
A bool indicates if the operation times out.
"""
# pylint: disable=too-many-public-methods
class ServicerContext(Generic[RequestType, ResponseType], abc.ABC):
"""A context object passed to method implementations."""
@abc.abstractmethod
async def read(self) -> RequestType:
"""Reads one message from the RPC.
Only one read operation is allowed simultaneously.
Returns:
A response message of the RPC.
Raises:
An RpcError exception if the read failed.
"""
@abc.abstractmethod
async def write(self, message: ResponseType) -> None:
"""Writes one message to the RPC.
Only one write operation is allowed simultaneously.
Raises:
An RpcError exception if the write failed.
"""
@abc.abstractmethod
async def send_initial_metadata(
self, initial_metadata: MetadataType
) -> None:
"""Sends the initial metadata value to the client.
This method need not be called by implementations if they have no
metadata to add to what the gRPC runtime will transmit.
Args:
initial_metadata: The initial :term:`metadata`.
"""
@abc.abstractmethod
async def abort(
self,
code: grpc.StatusCode,
details: str = "",
trailing_metadata: MetadataType = tuple(),
) -> NoReturn:
"""Raises an exception to terminate the RPC with a non-OK status.
The code and details passed as arguments will supercede any existing
ones.
Args:
code: A StatusCode object to be sent to the client.
It must not be StatusCode.OK.
details: A UTF-8-encodable string to be sent to the client upon
termination of the RPC.
trailing_metadata: A sequence of tuple represents the trailing
:term:`metadata`.
Raises:
Exception: An exception is always raised to signal the abortion the
RPC to the gRPC runtime.
"""
@abc.abstractmethod
def set_trailing_metadata(self, trailing_metadata: MetadataType) -> None:
"""Sends the trailing metadata for the RPC.
This method need not be called by implementations if they have no
metadata to add to what the gRPC runtime will transmit.
Args:
trailing_metadata: The trailing :term:`metadata`.
"""
@abc.abstractmethod
def invocation_metadata(self) -> Optional[Metadata]:
"""Accesses the metadata sent by the client.
Returns:
The invocation :term:`metadata`.
"""
@abc.abstractmethod
def set_code(self, code: grpc.StatusCode) -> None:
"""Sets the value to be used as status code upon RPC completion.
This method need not be called by method implementations if they wish
the gRPC runtime to determine the status code of the RPC.
Args:
code: A StatusCode object to be sent to the client.
"""
@abc.abstractmethod
def set_details(self, details: str) -> None:
"""Sets the value to be used the as detail string upon RPC completion.
This method need not be called by method implementations if they have
no details to transmit.
Args:
details: A UTF-8-encodable string to be sent to the client upon
termination of the RPC.
"""
@abc.abstractmethod
def set_compression(self, compression: grpc.Compression) -> None:
"""Set the compression algorithm to be used for the entire call.
Args:
compression: An element of grpc.compression, e.g.
grpc.compression.Gzip.
"""
@abc.abstractmethod
def disable_next_message_compression(self) -> None:
"""Disables compression for the next response message.
This method will override any compression configuration set during
server creation or set on the call.
"""
@abc.abstractmethod
def peer(self) -> str:
"""Identifies the peer that invoked the RPC being serviced.
Returns:
A string identifying the peer that invoked the RPC being serviced.
The string format is determined by gRPC runtime.
"""
@abc.abstractmethod
def peer_identities(self) -> Optional[Iterable[bytes]]:
"""Gets one or more peer identity(s).
Equivalent to
servicer_context.auth_context().get(servicer_context.peer_identity_key())
Returns:
An iterable of the identities, or None if the call is not
authenticated. Each identity is returned as a raw bytes type.
"""
@abc.abstractmethod
def peer_identity_key(self) -> Optional[str]:
"""The auth property used to identify the peer.
For example, "x509_common_name" or "x509_subject_alternative_name" are
used to identify an SSL peer.
Returns:
The auth property (string) that indicates the
peer identity, or None if the call is not authenticated.
"""
@abc.abstractmethod
def auth_context(self) -> Mapping[str, Iterable[bytes]]:
"""Gets the auth context for the call.
Returns:
A map of strings to an iterable of bytes for each auth property.
"""
def time_remaining(self) -> float:
"""Describes the length of allowed time remaining for the RPC.
Returns:
A nonnegative float indicating the length of allowed time in seconds
remaining for the RPC to complete before it is considered to have
timed out, or None if no deadline was specified for the RPC.
"""
def trailing_metadata(self):
"""Access value to be used as trailing metadata upon RPC completion.
This is an EXPERIMENTAL API.
Returns:
The trailing :term:`metadata` for the RPC.
"""
raise NotImplementedError()
def code(self):
"""Accesses the value to be used as status code upon RPC completion.
This is an EXPERIMENTAL API.
Returns:
The StatusCode value for the RPC.
"""
raise NotImplementedError()
def details(self):
"""Accesses the value to be used as detail string upon RPC completion.
This is an EXPERIMENTAL API.
Returns:
The details string of the RPC.
"""
raise NotImplementedError()
def add_done_callback(self, callback: DoneCallbackType) -> None:
"""Registers a callback to be called on RPC termination.
This is an EXPERIMENTAL API.
Args:
callback: A callable object will be called with the servicer context
object as its only argument.
"""
def cancelled(self) -> bool:
"""Return True if the RPC is cancelled.
The RPC is cancelled when the cancellation was requested with cancel().
This is an EXPERIMENTAL API.
Returns:
A bool indicates whether the RPC is cancelled or not.
"""
def done(self) -> bool:
"""Return True if the RPC is done.
An RPC is done if the RPC is completed, cancelled or aborted.
This is an EXPERIMENTAL API.
Returns:
A bool indicates if the RPC is done.
"""
| 12,062
| 31.254011
| 86
|
py
|
grpc
|
grpc-master/src/python/grpcio/grpc/aio/_metadata.py
|
# Copyright 2020 gRPC authors.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Implementation of the metadata abstraction for gRPC Asyncio Python."""
from collections import OrderedDict
from collections import abc
from typing import Any, Iterator, List, Tuple, Union
MetadataKey = str
MetadataValue = Union[str, bytes]
class Metadata(abc.Mapping):
"""Metadata abstraction for the asynchronous calls and interceptors.
The metadata is a mapping from str -> List[str]
Traits
* Multiple entries are allowed for the same key
* The order of the values by key is preserved
* Getting by an element by key, retrieves the first mapped value
* Supports an immutable view of the data
* Allows partial mutation on the data without recreating the new object from scratch.
"""
def __init__(self, *args: Tuple[MetadataKey, MetadataValue]) -> None:
self._metadata = OrderedDict()
for md_key, md_value in args:
self.add(md_key, md_value)
@classmethod
def from_tuple(cls, raw_metadata: tuple):
if raw_metadata:
return cls(*raw_metadata)
return cls()
def add(self, key: MetadataKey, value: MetadataValue) -> None:
self._metadata.setdefault(key, [])
self._metadata[key].append(value)
def __len__(self) -> int:
"""Return the total number of elements that there are in the metadata,
including multiple values for the same key.
"""
return sum(map(len, self._metadata.values()))
def __getitem__(self, key: MetadataKey) -> MetadataValue:
"""When calling <metadata>[<key>], the first element of all those
mapped for <key> is returned.
"""
try:
return self._metadata[key][0]
except (ValueError, IndexError) as e:
raise KeyError("{0!r}".format(key)) from e
def __setitem__(self, key: MetadataKey, value: MetadataValue) -> None:
"""Calling metadata[<key>] = <value>
Maps <value> to the first instance of <key>.
"""
if key not in self:
self._metadata[key] = [value]
else:
current_values = self.get_all(key)
self._metadata[key] = [value, *current_values[1:]]
def __delitem__(self, key: MetadataKey) -> None:
"""``del metadata[<key>]`` deletes the first mapping for <key>."""
current_values = self.get_all(key)
if not current_values:
raise KeyError(repr(key))
self._metadata[key] = current_values[1:]
def delete_all(self, key: MetadataKey) -> None:
"""Delete all mappings for <key>."""
del self._metadata[key]
def __iter__(self) -> Iterator[Tuple[MetadataKey, MetadataValue]]:
for key, values in self._metadata.items():
for value in values:
yield (key, value)
def get_all(self, key: MetadataKey) -> List[MetadataValue]:
"""For compatibility with other Metadata abstraction objects (like in Java),
this would return all items under the desired <key>.
"""
return self._metadata.get(key, [])
def set_all(self, key: MetadataKey, values: List[MetadataValue]) -> None:
self._metadata[key] = values
def __contains__(self, key: MetadataKey) -> bool:
return key in self._metadata
def __eq__(self, other: Any) -> bool:
if isinstance(other, self.__class__):
return self._metadata == other._metadata
if isinstance(other, tuple):
return tuple(self) == other
return NotImplemented # pytype: disable=bad-return-type
def __add__(self, other: Any) -> "Metadata":
if isinstance(other, self.__class__):
return Metadata(*(tuple(self) + tuple(other)))
if isinstance(other, tuple):
return Metadata(*(tuple(self) + other))
return NotImplemented # pytype: disable=bad-return-type
def __repr__(self) -> str:
view = tuple(self)
return "{0}({1!r})".format(self.__class__.__name__, view)
| 4,570
| 36.77686
| 93
|
py
|
grpc
|
grpc-master/src/python/grpcio/grpc/aio/_channel.py
|
# Copyright 2019 gRPC authors.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Invocation-side implementation of gRPC Asyncio Python."""
import asyncio
import sys
from typing import Any, Iterable, List, Optional, Sequence
import grpc
from grpc import _common
from grpc import _compression
from grpc import _grpcio_metadata
from grpc._cython import cygrpc
from . import _base_call
from . import _base_channel
from ._call import StreamStreamCall
from ._call import StreamUnaryCall
from ._call import UnaryStreamCall
from ._call import UnaryUnaryCall
from ._interceptor import ClientInterceptor
from ._interceptor import InterceptedStreamStreamCall
from ._interceptor import InterceptedStreamUnaryCall
from ._interceptor import InterceptedUnaryStreamCall
from ._interceptor import InterceptedUnaryUnaryCall
from ._interceptor import StreamStreamClientInterceptor
from ._interceptor import StreamUnaryClientInterceptor
from ._interceptor import UnaryStreamClientInterceptor
from ._interceptor import UnaryUnaryClientInterceptor
from ._metadata import Metadata
from ._typing import ChannelArgumentType
from ._typing import DeserializingFunction
from ._typing import RequestIterableType
from ._typing import RequestType
from ._typing import ResponseType
from ._typing import SerializingFunction
from ._utils import _timeout_to_deadline
_USER_AGENT = "grpc-python-asyncio/{}".format(_grpcio_metadata.__version__)
if sys.version_info[1] < 7:
def _all_tasks() -> Iterable[asyncio.Task]:
return asyncio.Task.all_tasks() # pylint: disable=no-member
else:
def _all_tasks() -> Iterable[asyncio.Task]:
return asyncio.all_tasks()
def _augment_channel_arguments(
base_options: ChannelArgumentType, compression: Optional[grpc.Compression]
):
compression_channel_argument = _compression.create_channel_option(
compression
)
user_agent_channel_argument = (
(
cygrpc.ChannelArgKey.primary_user_agent_string,
_USER_AGENT,
),
)
return (
tuple(base_options)
+ compression_channel_argument
+ user_agent_channel_argument
)
class _BaseMultiCallable:
"""Base class of all multi callable objects.
Handles the initialization logic and stores common attributes.
"""
_loop: asyncio.AbstractEventLoop
_channel: cygrpc.AioChannel
_method: bytes
_request_serializer: SerializingFunction
_response_deserializer: DeserializingFunction
_interceptors: Optional[Sequence[ClientInterceptor]]
_references: List[Any]
_loop: asyncio.AbstractEventLoop
# pylint: disable=too-many-arguments
def __init__(
self,
channel: cygrpc.AioChannel,
method: bytes,
request_serializer: SerializingFunction,
response_deserializer: DeserializingFunction,
interceptors: Optional[Sequence[ClientInterceptor]],
references: List[Any],
loop: asyncio.AbstractEventLoop,
) -> None:
self._loop = loop
self._channel = channel
self._method = method
self._request_serializer = request_serializer
self._response_deserializer = response_deserializer
self._interceptors = interceptors
self._references = references
@staticmethod
def _init_metadata(
metadata: Optional[Metadata] = None,
compression: Optional[grpc.Compression] = None,
) -> Metadata:
"""Based on the provided values for <metadata> or <compression> initialise the final
metadata, as it should be used for the current call.
"""
metadata = metadata or Metadata()
if compression:
metadata = Metadata(
*_compression.augment_metadata(metadata, compression)
)
return metadata
class UnaryUnaryMultiCallable(
_BaseMultiCallable, _base_channel.UnaryUnaryMultiCallable
):
def __call__(
self,
request: RequestType,
*,
timeout: Optional[float] = None,
metadata: Optional[Metadata] = None,
credentials: Optional[grpc.CallCredentials] = None,
wait_for_ready: Optional[bool] = None,
compression: Optional[grpc.Compression] = None,
) -> _base_call.UnaryUnaryCall[RequestType, ResponseType]:
metadata = self._init_metadata(metadata, compression)
if not self._interceptors:
call = UnaryUnaryCall(
request,
_timeout_to_deadline(timeout),
metadata,
credentials,
wait_for_ready,
self._channel,
self._method,
self._request_serializer,
self._response_deserializer,
self._loop,
)
else:
call = InterceptedUnaryUnaryCall(
self._interceptors,
request,
timeout,
metadata,
credentials,
wait_for_ready,
self._channel,
self._method,
self._request_serializer,
self._response_deserializer,
self._loop,
)
return call
class UnaryStreamMultiCallable(
_BaseMultiCallable, _base_channel.UnaryStreamMultiCallable
):
def __call__(
self,
request: RequestType,
*,
timeout: Optional[float] = None,
metadata: Optional[Metadata] = None,
credentials: Optional[grpc.CallCredentials] = None,
wait_for_ready: Optional[bool] = None,
compression: Optional[grpc.Compression] = None,
) -> _base_call.UnaryStreamCall[RequestType, ResponseType]:
metadata = self._init_metadata(metadata, compression)
deadline = _timeout_to_deadline(timeout)
if not self._interceptors:
call = UnaryStreamCall(
request,
deadline,
metadata,
credentials,
wait_for_ready,
self._channel,
self._method,
self._request_serializer,
self._response_deserializer,
self._loop,
)
else:
call = InterceptedUnaryStreamCall(
self._interceptors,
request,
deadline,
metadata,
credentials,
wait_for_ready,
self._channel,
self._method,
self._request_serializer,
self._response_deserializer,
self._loop,
)
return call
class StreamUnaryMultiCallable(
_BaseMultiCallable, _base_channel.StreamUnaryMultiCallable
):
def __call__(
self,
request_iterator: Optional[RequestIterableType] = None,
timeout: Optional[float] = None,
metadata: Optional[Metadata] = None,
credentials: Optional[grpc.CallCredentials] = None,
wait_for_ready: Optional[bool] = None,
compression: Optional[grpc.Compression] = None,
) -> _base_call.StreamUnaryCall:
metadata = self._init_metadata(metadata, compression)
deadline = _timeout_to_deadline(timeout)
if not self._interceptors:
call = StreamUnaryCall(
request_iterator,
deadline,
metadata,
credentials,
wait_for_ready,
self._channel,
self._method,
self._request_serializer,
self._response_deserializer,
self._loop,
)
else:
call = InterceptedStreamUnaryCall(
self._interceptors,
request_iterator,
deadline,
metadata,
credentials,
wait_for_ready,
self._channel,
self._method,
self._request_serializer,
self._response_deserializer,
self._loop,
)
return call
class StreamStreamMultiCallable(
_BaseMultiCallable, _base_channel.StreamStreamMultiCallable
):
def __call__(
self,
request_iterator: Optional[RequestIterableType] = None,
timeout: Optional[float] = None,
metadata: Optional[Metadata] = None,
credentials: Optional[grpc.CallCredentials] = None,
wait_for_ready: Optional[bool] = None,
compression: Optional[grpc.Compression] = None,
) -> _base_call.StreamStreamCall:
metadata = self._init_metadata(metadata, compression)
deadline = _timeout_to_deadline(timeout)
if not self._interceptors:
call = StreamStreamCall(
request_iterator,
deadline,
metadata,
credentials,
wait_for_ready,
self._channel,
self._method,
self._request_serializer,
self._response_deserializer,
self._loop,
)
else:
call = InterceptedStreamStreamCall(
self._interceptors,
request_iterator,
deadline,
metadata,
credentials,
wait_for_ready,
self._channel,
self._method,
self._request_serializer,
self._response_deserializer,
self._loop,
)
return call
class Channel(_base_channel.Channel):
_loop: asyncio.AbstractEventLoop
_channel: cygrpc.AioChannel
_unary_unary_interceptors: List[UnaryUnaryClientInterceptor]
_unary_stream_interceptors: List[UnaryStreamClientInterceptor]
_stream_unary_interceptors: List[StreamUnaryClientInterceptor]
_stream_stream_interceptors: List[StreamStreamClientInterceptor]
def __init__(
self,
target: str,
options: ChannelArgumentType,
credentials: Optional[grpc.ChannelCredentials],
compression: Optional[grpc.Compression],
interceptors: Optional[Sequence[ClientInterceptor]],
):
"""Constructor.
Args:
target: The target to which to connect.
options: Configuration options for the channel.
credentials: A cygrpc.ChannelCredentials or None.
compression: An optional value indicating the compression method to be
used over the lifetime of the channel.
interceptors: An optional list of interceptors that would be used for
intercepting any RPC executed with that channel.
"""
self._unary_unary_interceptors = []
self._unary_stream_interceptors = []
self._stream_unary_interceptors = []
self._stream_stream_interceptors = []
if interceptors is not None:
for interceptor in interceptors:
if isinstance(interceptor, UnaryUnaryClientInterceptor):
self._unary_unary_interceptors.append(interceptor)
elif isinstance(interceptor, UnaryStreamClientInterceptor):
self._unary_stream_interceptors.append(interceptor)
elif isinstance(interceptor, StreamUnaryClientInterceptor):
self._stream_unary_interceptors.append(interceptor)
elif isinstance(interceptor, StreamStreamClientInterceptor):
self._stream_stream_interceptors.append(interceptor)
else:
raise ValueError(
"Interceptor {} must be ".format(interceptor)
+ "{} or ".format(UnaryUnaryClientInterceptor.__name__)
+ "{} or ".format(UnaryStreamClientInterceptor.__name__)
+ "{} or ".format(StreamUnaryClientInterceptor.__name__)
+ "{}. ".format(StreamStreamClientInterceptor.__name__)
)
self._loop = cygrpc.get_working_loop()
self._channel = cygrpc.AioChannel(
_common.encode(target),
_augment_channel_arguments(options, compression),
credentials,
self._loop,
)
async def __aenter__(self):
return self
async def __aexit__(self, exc_type, exc_val, exc_tb):
await self._close(None)
async def _close(self, grace): # pylint: disable=too-many-branches
if self._channel.closed():
return
# No new calls will be accepted by the Cython channel.
self._channel.closing()
# Iterate through running tasks
tasks = _all_tasks()
calls = []
call_tasks = []
for task in tasks:
try:
stack = task.get_stack(limit=1)
except AttributeError as attribute_error:
# NOTE(lidiz) tl;dr: If the Task is created with a CPython
# object, it will trigger AttributeError.
#
# In the global finalizer, the event loop schedules
# a CPython PyAsyncGenAThrow object.
# https://github.com/python/cpython/blob/00e45877e33d32bb61aa13a2033e3bba370bda4d/Lib/asyncio/base_events.py#L484
#
# However, the PyAsyncGenAThrow object is written in C and
# failed to include the normal Python frame objects. Hence,
# this exception is a false negative, and it is safe to ignore
# the failure. It is fixed by https://github.com/python/cpython/pull/18669,
# but not available until 3.9 or 3.8.3. So, we have to keep it
# for a while.
# TODO(lidiz) drop this hack after 3.8 deprecation
if "frame" in str(attribute_error):
continue
else:
raise
# If the Task is created by a C-extension, the stack will be empty.
if not stack:
continue
# Locate ones created by `aio.Call`.
frame = stack[0]
candidate = frame.f_locals.get("self")
if candidate:
if isinstance(candidate, _base_call.Call):
if hasattr(candidate, "_channel"):
# For intercepted Call object
if candidate._channel is not self._channel:
continue
elif hasattr(candidate, "_cython_call"):
# For normal Call object
if candidate._cython_call._channel is not self._channel:
continue
else:
# Unidentified Call object
raise cygrpc.InternalError(
f"Unrecognized call object: {candidate}"
)
calls.append(candidate)
call_tasks.append(task)
# If needed, try to wait for them to finish.
# Call objects are not always awaitables.
if grace and call_tasks:
await asyncio.wait(call_tasks, timeout=grace)
# Time to cancel existing calls.
for call in calls:
call.cancel()
# Destroy the channel
self._channel.close()
async def close(self, grace: Optional[float] = None):
await self._close(grace)
def __del__(self):
if hasattr(self, "_channel"):
if not self._channel.closed():
self._channel.close()
def get_state(
self, try_to_connect: bool = False
) -> grpc.ChannelConnectivity:
result = self._channel.check_connectivity_state(try_to_connect)
return _common.CYGRPC_CONNECTIVITY_STATE_TO_CHANNEL_CONNECTIVITY[result]
async def wait_for_state_change(
self,
last_observed_state: grpc.ChannelConnectivity,
) -> None:
assert await self._channel.watch_connectivity_state(
last_observed_state.value[0], None
)
async def channel_ready(self) -> None:
state = self.get_state(try_to_connect=True)
while state != grpc.ChannelConnectivity.READY:
await self.wait_for_state_change(state)
state = self.get_state(try_to_connect=True)
def unary_unary(
self,
method: str,
request_serializer: Optional[SerializingFunction] = None,
response_deserializer: Optional[DeserializingFunction] = None,
) -> UnaryUnaryMultiCallable:
return UnaryUnaryMultiCallable(
self._channel,
_common.encode(method),
request_serializer,
response_deserializer,
self._unary_unary_interceptors,
[self],
self._loop,
)
def unary_stream(
self,
method: str,
request_serializer: Optional[SerializingFunction] = None,
response_deserializer: Optional[DeserializingFunction] = None,
) -> UnaryStreamMultiCallable:
return UnaryStreamMultiCallable(
self._channel,
_common.encode(method),
request_serializer,
response_deserializer,
self._unary_stream_interceptors,
[self],
self._loop,
)
def stream_unary(
self,
method: str,
request_serializer: Optional[SerializingFunction] = None,
response_deserializer: Optional[DeserializingFunction] = None,
) -> StreamUnaryMultiCallable:
return StreamUnaryMultiCallable(
self._channel,
_common.encode(method),
request_serializer,
response_deserializer,
self._stream_unary_interceptors,
[self],
self._loop,
)
def stream_stream(
self,
method: str,
request_serializer: Optional[SerializingFunction] = None,
response_deserializer: Optional[DeserializingFunction] = None,
) -> StreamStreamMultiCallable:
return StreamStreamMultiCallable(
self._channel,
_common.encode(method),
request_serializer,
response_deserializer,
self._stream_stream_interceptors,
[self],
self._loop,
)
def insecure_channel(
target: str,
options: Optional[ChannelArgumentType] = None,
compression: Optional[grpc.Compression] = None,
interceptors: Optional[Sequence[ClientInterceptor]] = None,
):
"""Creates an insecure asynchronous Channel to a server.
Args:
target: The server address
options: An optional list of key-value pairs (:term:`channel_arguments`
in gRPC Core runtime) to configure the channel.
compression: An optional value indicating the compression method to be
used over the lifetime of the channel.
interceptors: An optional sequence of interceptors that will be executed for
any call executed with this channel.
Returns:
A Channel.
"""
return Channel(
target,
() if options is None else options,
None,
compression,
interceptors,
)
def secure_channel(
target: str,
credentials: grpc.ChannelCredentials,
options: Optional[ChannelArgumentType] = None,
compression: Optional[grpc.Compression] = None,
interceptors: Optional[Sequence[ClientInterceptor]] = None,
):
"""Creates a secure asynchronous Channel to a server.
Args:
target: The server address.
credentials: A ChannelCredentials instance.
options: An optional list of key-value pairs (:term:`channel_arguments`
in gRPC Core runtime) to configure the channel.
compression: An optional value indicating the compression method to be
used over the lifetime of the channel.
interceptors: An optional sequence of interceptors that will be executed for
any call executed with this channel.
Returns:
An aio.Channel.
"""
return Channel(
target,
() if options is None else options,
credentials._credentials,
compression,
interceptors,
)
| 20,766
| 33.38245
| 129
|
py
|
grpc
|
grpc-master/src/python/grpcio/grpc/aio/__init__.py
|
# Copyright 2019 gRPC authors.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""gRPC's Asynchronous Python API.
gRPC Async API objects may only be used on the thread on which they were
created. AsyncIO doesn't provide thread safety for most of its APIs.
"""
from typing import Any, Optional, Sequence, Tuple
import grpc
from grpc._cython.cygrpc import AbortError
from grpc._cython.cygrpc import BaseError
from grpc._cython.cygrpc import EOF
from grpc._cython.cygrpc import InternalError
from grpc._cython.cygrpc import UsageError
from grpc._cython.cygrpc import init_grpc_aio
from grpc._cython.cygrpc import shutdown_grpc_aio
from ._base_call import Call
from ._base_call import RpcContext
from ._base_call import StreamStreamCall
from ._base_call import StreamUnaryCall
from ._base_call import UnaryStreamCall
from ._base_call import UnaryUnaryCall
from ._base_channel import Channel
from ._base_channel import StreamStreamMultiCallable
from ._base_channel import StreamUnaryMultiCallable
from ._base_channel import UnaryStreamMultiCallable
from ._base_channel import UnaryUnaryMultiCallable
from ._base_server import Server
from ._base_server import ServicerContext
from ._call import AioRpcError
from ._channel import insecure_channel
from ._channel import secure_channel
from ._interceptor import ClientCallDetails
from ._interceptor import ClientInterceptor
from ._interceptor import InterceptedUnaryUnaryCall
from ._interceptor import ServerInterceptor
from ._interceptor import StreamStreamClientInterceptor
from ._interceptor import StreamUnaryClientInterceptor
from ._interceptor import UnaryStreamClientInterceptor
from ._interceptor import UnaryUnaryClientInterceptor
from ._metadata import Metadata
from ._server import server
from ._typing import ChannelArgumentType
################################### __all__ #################################
__all__ = (
"init_grpc_aio",
"shutdown_grpc_aio",
"AioRpcError",
"RpcContext",
"Call",
"UnaryUnaryCall",
"UnaryStreamCall",
"StreamUnaryCall",
"StreamStreamCall",
"Channel",
"UnaryUnaryMultiCallable",
"UnaryStreamMultiCallable",
"StreamUnaryMultiCallable",
"StreamStreamMultiCallable",
"ClientCallDetails",
"ClientInterceptor",
"UnaryStreamClientInterceptor",
"UnaryUnaryClientInterceptor",
"StreamUnaryClientInterceptor",
"StreamStreamClientInterceptor",
"InterceptedUnaryUnaryCall",
"ServerInterceptor",
"insecure_channel",
"server",
"Server",
"ServicerContext",
"EOF",
"secure_channel",
"AbortError",
"BaseError",
"UsageError",
"InternalError",
"Metadata",
)
| 3,160
| 31.927083
| 79
|
py
|
grpc
|
grpc-master/src/python/grpcio/grpc/aio/_typing.py
|
# Copyright 2019 The gRPC Authors
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Common types for gRPC Async API"""
from typing import (
Any,
AsyncIterable,
Callable,
Iterable,
Sequence,
Tuple,
TypeVar,
Union,
)
from grpc._cython.cygrpc import EOF
from ._metadata import Metadata
from ._metadata import MetadataKey
from ._metadata import MetadataValue
RequestType = TypeVar("RequestType")
ResponseType = TypeVar("ResponseType")
SerializingFunction = Callable[[Any], bytes]
DeserializingFunction = Callable[[bytes], Any]
MetadatumType = Tuple[MetadataKey, MetadataValue]
MetadataType = Union[Metadata, Sequence[MetadatumType]]
ChannelArgumentType = Sequence[Tuple[str, Any]]
EOFType = type(EOF)
DoneCallbackType = Callable[[Any], None]
RequestIterableType = Union[Iterable[Any], AsyncIterable[Any]]
ResponseIterableType = AsyncIterable[Any]
| 1,378
| 30.340909
| 74
|
py
|
grpc
|
grpc-master/src/python/grpcio/grpc/aio/_base_call.py
|
# Copyright 2019 The gRPC Authors
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Abstract base classes for client-side Call objects.
Call objects represents the RPC itself, and offer methods to access / modify
its information. They also offer methods to manipulate the life-cycle of the
RPC, e.g. cancellation.
"""
from abc import ABCMeta
from abc import abstractmethod
from typing import Any, AsyncIterator, Generator, Generic, Optional, Union
import grpc
from ._metadata import Metadata
from ._typing import DoneCallbackType
from ._typing import EOFType
from ._typing import RequestType
from ._typing import ResponseType
__all__ = "RpcContext", "Call", "UnaryUnaryCall", "UnaryStreamCall"
class RpcContext(metaclass=ABCMeta):
"""Provides RPC-related information and action."""
@abstractmethod
def cancelled(self) -> bool:
"""Return True if the RPC is cancelled.
The RPC is cancelled when the cancellation was requested with cancel().
Returns:
A bool indicates whether the RPC is cancelled or not.
"""
@abstractmethod
def done(self) -> bool:
"""Return True if the RPC is done.
An RPC is done if the RPC is completed, cancelled or aborted.
Returns:
A bool indicates if the RPC is done.
"""
@abstractmethod
def time_remaining(self) -> Optional[float]:
"""Describes the length of allowed time remaining for the RPC.
Returns:
A nonnegative float indicating the length of allowed time in seconds
remaining for the RPC to complete before it is considered to have
timed out, or None if no deadline was specified for the RPC.
"""
@abstractmethod
def cancel(self) -> bool:
"""Cancels the RPC.
Idempotent and has no effect if the RPC has already terminated.
Returns:
A bool indicates if the cancellation is performed or not.
"""
@abstractmethod
def add_done_callback(self, callback: DoneCallbackType) -> None:
"""Registers a callback to be called on RPC termination.
Args:
callback: A callable object will be called with the call object as
its only argument.
"""
class Call(RpcContext, metaclass=ABCMeta):
"""The abstract base class of an RPC on the client-side."""
@abstractmethod
async def initial_metadata(self) -> Metadata:
"""Accesses the initial metadata sent by the server.
Returns:
The initial :term:`metadata`.
"""
@abstractmethod
async def trailing_metadata(self) -> Metadata:
"""Accesses the trailing metadata sent by the server.
Returns:
The trailing :term:`metadata`.
"""
@abstractmethod
async def code(self) -> grpc.StatusCode:
"""Accesses the status code sent by the server.
Returns:
The StatusCode value for the RPC.
"""
@abstractmethod
async def details(self) -> str:
"""Accesses the details sent by the server.
Returns:
The details string of the RPC.
"""
@abstractmethod
async def wait_for_connection(self) -> None:
"""Waits until connected to peer and raises aio.AioRpcError if failed.
This is an EXPERIMENTAL method.
This method ensures the RPC has been successfully connected. Otherwise,
an AioRpcError will be raised to explain the reason of the connection
failure.
This method is recommended for building retry mechanisms.
"""
class UnaryUnaryCall(
Generic[RequestType, ResponseType], Call, metaclass=ABCMeta
):
"""The abstract base class of an unary-unary RPC on the client-side."""
@abstractmethod
def __await__(self) -> Generator[Any, None, ResponseType]:
"""Await the response message to be ready.
Returns:
The response message of the RPC.
"""
class UnaryStreamCall(
Generic[RequestType, ResponseType], Call, metaclass=ABCMeta
):
@abstractmethod
def __aiter__(self) -> AsyncIterator[ResponseType]:
"""Returns the async iterator representation that yields messages.
Under the hood, it is calling the "read" method.
Returns:
An async iterator object that yields messages.
"""
@abstractmethod
async def read(self) -> Union[EOFType, ResponseType]:
"""Reads one message from the stream.
Read operations must be serialized when called from multiple
coroutines.
Returns:
A response message, or an `grpc.aio.EOF` to indicate the end of the
stream.
"""
class StreamUnaryCall(
Generic[RequestType, ResponseType], Call, metaclass=ABCMeta
):
@abstractmethod
async def write(self, request: RequestType) -> None:
"""Writes one message to the stream.
Raises:
An RpcError exception if the write failed.
"""
@abstractmethod
async def done_writing(self) -> None:
"""Notifies server that the client is done sending messages.
After done_writing is called, any additional invocation to the write
function will fail. This function is idempotent.
"""
@abstractmethod
def __await__(self) -> Generator[Any, None, ResponseType]:
"""Await the response message to be ready.
Returns:
The response message of the stream.
"""
class StreamStreamCall(
Generic[RequestType, ResponseType], Call, metaclass=ABCMeta
):
@abstractmethod
def __aiter__(self) -> AsyncIterator[ResponseType]:
"""Returns the async iterator representation that yields messages.
Under the hood, it is calling the "read" method.
Returns:
An async iterator object that yields messages.
"""
@abstractmethod
async def read(self) -> Union[EOFType, ResponseType]:
"""Reads one message from the stream.
Read operations must be serialized when called from multiple
coroutines.
Returns:
A response message, or an `grpc.aio.EOF` to indicate the end of the
stream.
"""
@abstractmethod
async def write(self, request: RequestType) -> None:
"""Writes one message to the stream.
Raises:
An RpcError exception if the write failed.
"""
@abstractmethod
async def done_writing(self) -> None:
"""Notifies server that the client is done sending messages.
After done_writing is called, any additional invocation to the write
function will fail. This function is idempotent.
"""
| 7,185
| 28.211382
| 79
|
py
|
grpc
|
grpc-master/src/python/grpcio/grpc/aio/_server.py
|
# Copyright 2019 The gRPC Authors
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Server-side implementation of gRPC Asyncio Python."""
from concurrent.futures import Executor
from typing import Any, Optional, Sequence
import grpc
from grpc import _common
from grpc import _compression
from grpc._cython import cygrpc
from . import _base_server
from ._interceptor import ServerInterceptor
from ._typing import ChannelArgumentType
def _augment_channel_arguments(
base_options: ChannelArgumentType, compression: Optional[grpc.Compression]
):
compression_option = _compression.create_channel_option(compression)
return tuple(base_options) + compression_option
class Server(_base_server.Server):
"""Serves RPCs."""
def __init__(
self,
thread_pool: Optional[Executor],
generic_handlers: Optional[Sequence[grpc.GenericRpcHandler]],
interceptors: Optional[Sequence[Any]],
options: ChannelArgumentType,
maximum_concurrent_rpcs: Optional[int],
compression: Optional[grpc.Compression],
):
self._loop = cygrpc.get_working_loop()
if interceptors:
invalid_interceptors = [
interceptor
for interceptor in interceptors
if not isinstance(interceptor, ServerInterceptor)
]
if invalid_interceptors:
raise ValueError(
"Interceptor must be ServerInterceptor, the "
f"following are invalid: {invalid_interceptors}"
)
self._server = cygrpc.AioServer(
self._loop,
thread_pool,
generic_handlers,
interceptors,
_augment_channel_arguments(options, compression),
maximum_concurrent_rpcs,
)
def add_generic_rpc_handlers(
self, generic_rpc_handlers: Sequence[grpc.GenericRpcHandler]
) -> None:
"""Registers GenericRpcHandlers with this Server.
This method is only safe to call before the server is started.
Args:
generic_rpc_handlers: A sequence of GenericRpcHandlers that will be
used to service RPCs.
"""
self._server.add_generic_rpc_handlers(generic_rpc_handlers)
def add_insecure_port(self, address: str) -> int:
"""Opens an insecure port for accepting RPCs.
This method may only be called before starting the server.
Args:
address: The address for which to open a port. If the port is 0,
or not specified in the address, then the gRPC runtime will choose a port.
Returns:
An integer port on which the server will accept RPC requests.
"""
return _common.validate_port_binding_result(
address, self._server.add_insecure_port(_common.encode(address))
)
def add_secure_port(
self, address: str, server_credentials: grpc.ServerCredentials
) -> int:
"""Opens a secure port for accepting RPCs.
This method may only be called before starting the server.
Args:
address: The address for which to open a port.
if the port is 0, or not specified in the address, then the gRPC
runtime will choose a port.
server_credentials: A ServerCredentials object.
Returns:
An integer port on which the server will accept RPC requests.
"""
return _common.validate_port_binding_result(
address,
self._server.add_secure_port(
_common.encode(address), server_credentials
),
)
async def start(self) -> None:
"""Starts this Server.
This method may only be called once. (i.e. it is not idempotent).
"""
await self._server.start()
async def stop(self, grace: Optional[float]) -> None:
"""Stops this Server.
This method immediately stops the server from servicing new RPCs in
all cases.
If a grace period is specified, this method returns immediately and all
RPCs active at the end of the grace period are aborted. If a grace
period is not specified (by passing None for grace), all existing RPCs
are aborted immediately and this method blocks until the last RPC
handler terminates.
This method is idempotent and may be called at any time. Passing a
smaller grace value in a subsequent call will have the effect of
stopping the Server sooner (passing None will have the effect of
stopping the server immediately). Passing a larger grace value in a
subsequent call will not have the effect of stopping the server later
(i.e. the most restrictive grace value is used).
Args:
grace: A duration of time in seconds or None.
"""
await self._server.shutdown(grace)
async def wait_for_termination(
self, timeout: Optional[float] = None
) -> bool:
"""Block current coroutine until the server stops.
This is an EXPERIMENTAL API.
The wait will not consume computational resources during blocking, and
it will block until one of the two following conditions are met:
1) The server is stopped or terminated;
2) A timeout occurs if timeout is not `None`.
The timeout argument works in the same way as `threading.Event.wait()`.
https://docs.python.org/3/library/threading.html#threading.Event.wait
Args:
timeout: A floating point number specifying a timeout for the
operation in seconds.
Returns:
A bool indicates if the operation times out.
"""
return await self._server.wait_for_termination(timeout)
def __del__(self):
"""Schedules a graceful shutdown in current event loop.
The Cython AioServer doesn't hold a ref-count to this class. It should
be safe to slightly extend the underlying Cython object's life span.
"""
if hasattr(self, "_server"):
if self._server.is_running():
cygrpc.schedule_coro_threadsafe(
self._server.shutdown(None),
self._loop,
)
def server(
migration_thread_pool: Optional[Executor] = None,
handlers: Optional[Sequence[grpc.GenericRpcHandler]] = None,
interceptors: Optional[Sequence[Any]] = None,
options: Optional[ChannelArgumentType] = None,
maximum_concurrent_rpcs: Optional[int] = None,
compression: Optional[grpc.Compression] = None,
):
"""Creates a Server with which RPCs can be serviced.
Args:
migration_thread_pool: A futures.ThreadPoolExecutor to be used by the
Server to execute non-AsyncIO RPC handlers for migration purpose.
handlers: An optional list of GenericRpcHandlers used for executing RPCs.
More handlers may be added by calling add_generic_rpc_handlers any time
before the server is started.
interceptors: An optional list of ServerInterceptor objects that observe
and optionally manipulate the incoming RPCs before handing them over to
handlers. The interceptors are given control in the order they are
specified. This is an EXPERIMENTAL API.
options: An optional list of key-value pairs (:term:`channel_arguments` in gRPC runtime)
to configure the channel.
maximum_concurrent_rpcs: The maximum number of concurrent RPCs this server
will service before returning RESOURCE_EXHAUSTED status, or None to
indicate no limit.
compression: An element of grpc.compression, e.g.
grpc.compression.Gzip. This compression algorithm will be used for the
lifetime of the server unless overridden by set_compression.
Returns:
A Server object.
"""
return Server(
migration_thread_pool,
() if handlers is None else handlers,
() if interceptors is None else interceptors,
() if options is None else options,
maximum_concurrent_rpcs,
compression,
)
| 8,630
| 36.363636
| 94
|
py
|
grpc
|
grpc-master/src/python/grpcio/grpc/aio/_base_channel.py
|
# Copyright 2020 The gRPC Authors
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Abstract base classes for Channel objects and Multicallable objects."""
import abc
from typing import Generic, Optional
import grpc
from . import _base_call
from ._typing import DeserializingFunction
from ._typing import MetadataType
from ._typing import RequestIterableType
from ._typing import RequestType
from ._typing import ResponseType
from ._typing import SerializingFunction
class UnaryUnaryMultiCallable(Generic[RequestType, ResponseType], abc.ABC):
"""Enables asynchronous invocation of a unary-call RPC."""
@abc.abstractmethod
def __call__(
self,
request: RequestType,
*,
timeout: Optional[float] = None,
metadata: Optional[MetadataType] = None,
credentials: Optional[grpc.CallCredentials] = None,
wait_for_ready: Optional[bool] = None,
compression: Optional[grpc.Compression] = None,
) -> _base_call.UnaryUnaryCall[RequestType, ResponseType]:
"""Asynchronously invokes the underlying RPC.
Args:
request: The request value for the RPC.
timeout: An optional duration of time in seconds to allow
for the RPC.
metadata: Optional :term:`metadata` to be transmitted to the
service-side of the RPC.
credentials: An optional CallCredentials for the RPC. Only valid for
secure Channel.
wait_for_ready: An optional flag to enable :term:`wait_for_ready` mechanism.
compression: An element of grpc.compression, e.g.
grpc.compression.Gzip.
Returns:
A UnaryUnaryCall object.
Raises:
RpcError: Indicates that the RPC terminated with non-OK status. The
raised RpcError will also be a Call for the RPC affording the RPC's
metadata, status code, and details.
"""
class UnaryStreamMultiCallable(Generic[RequestType, ResponseType], abc.ABC):
"""Enables asynchronous invocation of a server-streaming RPC."""
@abc.abstractmethod
def __call__(
self,
request: RequestType,
*,
timeout: Optional[float] = None,
metadata: Optional[MetadataType] = None,
credentials: Optional[grpc.CallCredentials] = None,
wait_for_ready: Optional[bool] = None,
compression: Optional[grpc.Compression] = None,
) -> _base_call.UnaryStreamCall[RequestType, ResponseType]:
"""Asynchronously invokes the underlying RPC.
Args:
request: The request value for the RPC.
timeout: An optional duration of time in seconds to allow
for the RPC.
metadata: Optional :term:`metadata` to be transmitted to the
service-side of the RPC.
credentials: An optional CallCredentials for the RPC. Only valid for
secure Channel.
wait_for_ready: An optional flag to enable :term:`wait_for_ready` mechanism.
compression: An element of grpc.compression, e.g.
grpc.compression.Gzip.
Returns:
A UnaryStreamCall object.
Raises:
RpcError: Indicates that the RPC terminated with non-OK status. The
raised RpcError will also be a Call for the RPC affording the RPC's
metadata, status code, and details.
"""
class StreamUnaryMultiCallable(abc.ABC):
"""Enables asynchronous invocation of a client-streaming RPC."""
@abc.abstractmethod
def __call__(
self,
request_iterator: Optional[RequestIterableType] = None,
timeout: Optional[float] = None,
metadata: Optional[MetadataType] = None,
credentials: Optional[grpc.CallCredentials] = None,
wait_for_ready: Optional[bool] = None,
compression: Optional[grpc.Compression] = None,
) -> _base_call.StreamUnaryCall:
"""Asynchronously invokes the underlying RPC.
Args:
request_iterator: An optional async iterable or iterable of request
messages for the RPC.
timeout: An optional duration of time in seconds to allow
for the RPC.
metadata: Optional :term:`metadata` to be transmitted to the
service-side of the RPC.
credentials: An optional CallCredentials for the RPC. Only valid for
secure Channel.
wait_for_ready: An optional flag to enable :term:`wait_for_ready` mechanism.
compression: An element of grpc.compression, e.g.
grpc.compression.Gzip.
Returns:
A StreamUnaryCall object.
Raises:
RpcError: Indicates that the RPC terminated with non-OK status. The
raised RpcError will also be a Call for the RPC affording the RPC's
metadata, status code, and details.
"""
class StreamStreamMultiCallable(abc.ABC):
"""Enables asynchronous invocation of a bidirectional-streaming RPC."""
@abc.abstractmethod
def __call__(
self,
request_iterator: Optional[RequestIterableType] = None,
timeout: Optional[float] = None,
metadata: Optional[MetadataType] = None,
credentials: Optional[grpc.CallCredentials] = None,
wait_for_ready: Optional[bool] = None,
compression: Optional[grpc.Compression] = None,
) -> _base_call.StreamStreamCall:
"""Asynchronously invokes the underlying RPC.
Args:
request_iterator: An optional async iterable or iterable of request
messages for the RPC.
timeout: An optional duration of time in seconds to allow
for the RPC.
metadata: Optional :term:`metadata` to be transmitted to the
service-side of the RPC.
credentials: An optional CallCredentials for the RPC. Only valid for
secure Channel.
wait_for_ready: An optional flag to enable :term:`wait_for_ready` mechanism.
compression: An element of grpc.compression, e.g.
grpc.compression.Gzip.
Returns:
A StreamStreamCall object.
Raises:
RpcError: Indicates that the RPC terminated with non-OK status. The
raised RpcError will also be a Call for the RPC affording the RPC's
metadata, status code, and details.
"""
class Channel(abc.ABC):
"""Enables asynchronous RPC invocation as a client.
Channel objects implement the Asynchronous Context Manager (aka. async
with) type, although they are not supportted to be entered and exited
multiple times.
"""
@abc.abstractmethod
async def __aenter__(self):
"""Starts an asynchronous context manager.
Returns:
Channel the channel that was instantiated.
"""
@abc.abstractmethod
async def __aexit__(self, exc_type, exc_val, exc_tb):
"""Finishes the asynchronous context manager by closing the channel.
Still active RPCs will be cancelled.
"""
@abc.abstractmethod
async def close(self, grace: Optional[float] = None):
"""Closes this Channel and releases all resources held by it.
This method immediately stops the channel from executing new RPCs in
all cases.
If a grace period is specified, this method wait until all active
RPCs are finshed, once the grace period is reached the ones that haven't
been terminated are cancelled. If a grace period is not specified
(by passing None for grace), all existing RPCs are cancelled immediately.
This method is idempotent.
"""
@abc.abstractmethod
def get_state(
self, try_to_connect: bool = False
) -> grpc.ChannelConnectivity:
"""Checks the connectivity state of a channel.
This is an EXPERIMENTAL API.
If the channel reaches a stable connectivity state, it is guaranteed
that the return value of this function will eventually converge to that
state.
Args:
try_to_connect: a bool indicate whether the Channel should try to
connect to peer or not.
Returns: A ChannelConnectivity object.
"""
@abc.abstractmethod
async def wait_for_state_change(
self,
last_observed_state: grpc.ChannelConnectivity,
) -> None:
"""Waits for a change in connectivity state.
This is an EXPERIMENTAL API.
The function blocks until there is a change in the channel connectivity
state from the "last_observed_state". If the state is already
different, this function will return immediately.
There is an inherent race between the invocation of
"Channel.wait_for_state_change" and "Channel.get_state". The state can
change arbitrary many times during the race, so there is no way to
observe every state transition.
If there is a need to put a timeout for this function, please refer to
"asyncio.wait_for".
Args:
last_observed_state: A grpc.ChannelConnectivity object representing
the last known state.
"""
@abc.abstractmethod
async def channel_ready(self) -> None:
"""Creates a coroutine that blocks until the Channel is READY."""
@abc.abstractmethod
def unary_unary(
self,
method: str,
request_serializer: Optional[SerializingFunction] = None,
response_deserializer: Optional[DeserializingFunction] = None,
) -> UnaryUnaryMultiCallable:
"""Creates a UnaryUnaryMultiCallable for a unary-unary method.
Args:
method: The name of the RPC method.
request_serializer: Optional :term:`serializer` for serializing the request
message. Request goes unserialized in case None is passed.
response_deserializer: Optional :term:`deserializer` for deserializing the
response message. Response goes undeserialized in case None
is passed.
Returns:
A UnaryUnaryMultiCallable value for the named unary-unary method.
"""
@abc.abstractmethod
def unary_stream(
self,
method: str,
request_serializer: Optional[SerializingFunction] = None,
response_deserializer: Optional[DeserializingFunction] = None,
) -> UnaryStreamMultiCallable:
"""Creates a UnaryStreamMultiCallable for a unary-stream method.
Args:
method: The name of the RPC method.
request_serializer: Optional :term:`serializer` for serializing the request
message. Request goes unserialized in case None is passed.
response_deserializer: Optional :term:`deserializer` for deserializing the
response message. Response goes undeserialized in case None
is passed.
Returns:
A UnarySteramMultiCallable value for the named unary-stream method.
"""
@abc.abstractmethod
def stream_unary(
self,
method: str,
request_serializer: Optional[SerializingFunction] = None,
response_deserializer: Optional[DeserializingFunction] = None,
) -> StreamUnaryMultiCallable:
"""Creates a StreamUnaryMultiCallable for a stream-unary method.
Args:
method: The name of the RPC method.
request_serializer: Optional :term:`serializer` for serializing the request
message. Request goes unserialized in case None is passed.
response_deserializer: Optional :term:`deserializer` for deserializing the
response message. Response goes undeserialized in case None
is passed.
Returns:
A StreamUnaryMultiCallable value for the named stream-unary method.
"""
@abc.abstractmethod
def stream_stream(
self,
method: str,
request_serializer: Optional[SerializingFunction] = None,
response_deserializer: Optional[DeserializingFunction] = None,
) -> StreamStreamMultiCallable:
"""Creates a StreamStreamMultiCallable for a stream-stream method.
Args:
method: The name of the RPC method.
request_serializer: Optional :term:`serializer` for serializing the request
message. Request goes unserialized in case None is passed.
response_deserializer: Optional :term:`deserializer` for deserializing the
response message. Response goes undeserialized in case None
is passed.
Returns:
A StreamStreamMultiCallable value for the named stream-stream method.
"""
| 13,134
| 36.315341
| 86
|
py
|
grpc
|
grpc-master/src/python/grpcio/grpc/_cython/__init__.py
|
# Copyright 2015 gRPC authors.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
| 577
| 40.285714
| 74
|
py
|
grpc
|
grpc-master/src/python/grpcio/grpc/_cython/_cygrpc/__init__.py
|
# Copyright 2015 gRPC authors.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
| 577
| 40.285714
| 74
|
py
|
grpc
|
grpc-master/src/python/grpcio/grpc/experimental/session_cache.py
|
# Copyright 2018 gRPC authors.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""gRPC's APIs for TLS Session Resumption support"""
from grpc._cython import cygrpc as _cygrpc
def ssl_session_cache_lru(capacity):
"""Creates an SSLSessionCache with LRU replacement policy
Args:
capacity: Size of the cache
Returns:
An SSLSessionCache with LRU replacement policy that can be passed as a value for
the grpc.ssl_session_cache option to a grpc.Channel. SSL session caches are used
to store session tickets, which clients can present to resume previous TLS sessions
with a server.
"""
return SSLSessionCache(_cygrpc.SSLSessionCacheLRU(capacity))
class SSLSessionCache(object):
"""An encapsulation of a session cache used for TLS session resumption.
Instances of this class can be passed to a Channel as values for the
grpc.ssl_session_cache option
"""
def __init__(self, cache):
self._cache = cache
def __int__(self):
return int(self._cache)
| 1,533
| 32.347826
| 89
|
py
|
grpc
|
grpc-master/src/python/grpcio/grpc/experimental/__init__.py
|
# Copyright 2018 gRPC authors.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""gRPC's experimental APIs.
These APIs are subject to be removed during any minor version release.
"""
import copy
import functools
import sys
import warnings
import grpc
from grpc._cython import cygrpc as _cygrpc
_EXPERIMENTAL_APIS_USED = set()
class ChannelOptions(object):
"""Indicates a channel option unique to gRPC Python.
This enumeration is part of an EXPERIMENTAL API.
Attributes:
SingleThreadedUnaryStream: Perform unary-stream RPCs on a single thread.
"""
SingleThreadedUnaryStream = "SingleThreadedUnaryStream"
class UsageError(Exception):
"""Raised by the gRPC library to indicate usage not allowed by the API."""
# It's important that there be a single insecure credentials object so that its
# hash is deterministic and can be used for indexing in the simple stubs cache.
_insecure_channel_credentials = grpc.ChannelCredentials(
_cygrpc.channel_credentials_insecure()
)
def insecure_channel_credentials():
"""Creates a ChannelCredentials for use with an insecure channel.
THIS IS AN EXPERIMENTAL API.
"""
return _insecure_channel_credentials
class ExperimentalApiWarning(Warning):
"""A warning that an API is experimental."""
def _warn_experimental(api_name, stack_offset):
if api_name not in _EXPERIMENTAL_APIS_USED:
_EXPERIMENTAL_APIS_USED.add(api_name)
msg = (
"'{}' is an experimental API. It is subject to change or ".format(
api_name
)
+ "removal between minor releases. Proceed with caution."
)
warnings.warn(msg, ExperimentalApiWarning, stacklevel=2 + stack_offset)
def experimental_api(f):
@functools.wraps(f)
def _wrapper(*args, **kwargs):
_warn_experimental(f.__name__, 1)
return f(*args, **kwargs)
return _wrapper
def wrap_server_method_handler(wrapper, handler):
"""Wraps the server method handler function.
The server implementation requires all server handlers being wrapped as
RpcMethodHandler objects. This helper function ease the pain of writing
server handler wrappers.
Args:
wrapper: A wrapper function that takes in a method handler behavior
(the actual function) and returns a wrapped function.
handler: A RpcMethodHandler object to be wrapped.
Returns:
A newly created RpcMethodHandler.
"""
if not handler:
return None
if not handler.request_streaming:
if not handler.response_streaming:
# NOTE(lidiz) _replace is a public API:
# https://docs.python.org/dev/library/collections.html
return handler._replace(unary_unary=wrapper(handler.unary_unary))
else:
return handler._replace(unary_stream=wrapper(handler.unary_stream))
else:
if not handler.response_streaming:
return handler._replace(stream_unary=wrapper(handler.stream_unary))
else:
return handler._replace(
stream_stream=wrapper(handler.stream_stream)
)
__all__ = (
"ChannelOptions",
"ExperimentalApiWarning",
"UsageError",
"insecure_channel_credentials",
"wrap_server_method_handler",
)
if sys.version_info > (3, 6):
from grpc._simple_stubs import stream_stream
from grpc._simple_stubs import stream_unary
from grpc._simple_stubs import unary_stream
from grpc._simple_stubs import unary_unary
__all__ = __all__ + (unary_unary, unary_stream, stream_unary, stream_stream)
| 4,103
| 29.4
| 80
|
py
|
grpc
|
grpc-master/src/python/grpcio/grpc/experimental/gevent.py
|
# Copyright 2018 gRPC authors.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""gRPC's Python gEvent APIs."""
from grpc._cython import cygrpc as _cygrpc
def init_gevent():
"""Patches gRPC's libraries to be compatible with gevent.
This must be called AFTER the python standard lib has been patched,
but BEFORE creating and gRPC objects.
In order for progress to be made, the application must drive the event loop.
"""
_cygrpc.init_grpc_gevent()
| 973
| 33.785714
| 80
|
py
|
grpc
|
grpc-master/src/python/grpcio/grpc/experimental/aio/__init__.py
|
# Copyright 2020 The gRPC Authors
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Alias of grpc.aio to keep backward compatibility."""
from grpc.aio import *
| 660
| 37.882353
| 74
|
py
|
grpc
|
grpc-master/src/python/grpcio/grpc/framework/__init__.py
|
# Copyright 2015 gRPC authors.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
| 577
| 40.285714
| 74
|
py
|
grpc
|
grpc-master/src/python/grpcio/grpc/framework/common/cardinality.py
|
# Copyright 2015 gRPC authors.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Defines an enum for classifying RPC methods by streaming semantics."""
import enum
@enum.unique
class Cardinality(enum.Enum):
"""Describes the streaming semantics of an RPC method."""
UNARY_UNARY = "request-unary/response-unary"
UNARY_STREAM = "request-unary/response-streaming"
STREAM_UNARY = "request-streaming/response-unary"
STREAM_STREAM = "request-streaming/response-streaming"
| 988
| 35.62963
| 74
|
py
|
grpc
|
grpc-master/src/python/grpcio/grpc/framework/common/style.py
|
# Copyright 2015 gRPC authors.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Defines an enum for classifying RPC methods by control flow semantics."""
import enum
@enum.unique
class Service(enum.Enum):
"""Describes the control flow style of RPC method implementation."""
INLINE = "inline"
EVENT = "event"
| 824
| 32
| 76
|
py
|
grpc
|
grpc-master/src/python/grpcio/grpc/framework/common/__init__.py
|
# Copyright 2015 gRPC authors.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
| 577
| 40.285714
| 74
|
py
|
grpc
|
grpc-master/src/python/grpcio/grpc/framework/foundation/stream.py
|
# Copyright 2015 gRPC authors.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Interfaces related to streams of values or objects."""
import abc
class Consumer(abc.ABC):
"""Interface for consumers of finite streams of values or objects."""
@abc.abstractmethod
def consume(self, value):
"""Accepts a value.
Args:
value: Any value accepted by this Consumer.
"""
raise NotImplementedError()
@abc.abstractmethod
def terminate(self):
"""Indicates to this Consumer that no more values will be supplied."""
raise NotImplementedError()
@abc.abstractmethod
def consume_and_terminate(self, value):
"""Supplies a value and signals that no more values will be supplied.
Args:
value: Any value accepted by this Consumer.
"""
raise NotImplementedError()
| 1,377
| 30.318182
| 78
|
py
|
grpc
|
grpc-master/src/python/grpcio/grpc/framework/foundation/callable_util.py
|
# Copyright 2015 gRPC authors.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Utilities for working with callables."""
from abc import ABC
import collections
import enum
import functools
import logging
_LOGGER = logging.getLogger(__name__)
class Outcome(ABC):
"""A sum type describing the outcome of some call.
Attributes:
kind: One of Kind.RETURNED or Kind.RAISED respectively indicating that the
call returned a value or raised an exception.
return_value: The value returned by the call. Must be present if kind is
Kind.RETURNED.
exception: The exception raised by the call. Must be present if kind is
Kind.RAISED.
"""
@enum.unique
class Kind(enum.Enum):
"""Identifies the general kind of the outcome of some call."""
RETURNED = object()
RAISED = object()
class _EasyOutcome(
collections.namedtuple(
"_EasyOutcome", ["kind", "return_value", "exception"]
),
Outcome,
):
"""A trivial implementation of Outcome."""
def _call_logging_exceptions(behavior, message, *args, **kwargs):
try:
return _EasyOutcome(
Outcome.Kind.RETURNED, behavior(*args, **kwargs), None
)
except Exception as e: # pylint: disable=broad-except
_LOGGER.exception(message)
return _EasyOutcome(Outcome.Kind.RAISED, None, e)
def with_exceptions_logged(behavior, message):
"""Wraps a callable in a try-except that logs any exceptions it raises.
Args:
behavior: Any callable.
message: A string to log if the behavior raises an exception.
Returns:
A callable that when executed invokes the given behavior. The returned
callable takes the same arguments as the given behavior but returns a
future.Outcome describing whether the given behavior returned a value or
raised an exception.
"""
@functools.wraps(behavior)
def wrapped_behavior(*args, **kwargs):
return _call_logging_exceptions(behavior, message, *args, **kwargs)
return wrapped_behavior
def call_logging_exceptions(behavior, message, *args, **kwargs):
"""Calls a behavior in a try-except that logs any exceptions it raises.
Args:
behavior: Any callable.
message: A string to log if the behavior raises an exception.
*args: Positional arguments to pass to the given behavior.
**kwargs: Keyword arguments to pass to the given behavior.
Returns:
An Outcome describing whether the given behavior returned a value or raised
an exception.
"""
return _call_logging_exceptions(behavior, message, *args, **kwargs)
| 3,151
| 30.838384
| 81
|
py
|
grpc
|
grpc-master/src/python/grpcio/grpc/framework/foundation/abandonment.py
|
# Copyright 2015 gRPC authors.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Utilities for indicating abandonment of computation."""
class Abandoned(Exception):
"""Indicates that some computation is being abandoned.
Abandoning a computation is different than returning a value or raising
an exception indicating some operational or programming defect.
"""
| 878
| 37.217391
| 75
|
py
|
grpc
|
grpc-master/src/python/grpcio/grpc/framework/foundation/logging_pool.py
|
# Copyright 2015 gRPC authors.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""A thread pool that logs exceptions raised by tasks executed within it."""
from concurrent import futures
import logging
_LOGGER = logging.getLogger(__name__)
def _wrap(behavior):
"""Wraps an arbitrary callable behavior in exception-logging."""
def _wrapping(*args, **kwargs):
try:
return behavior(*args, **kwargs)
except Exception:
_LOGGER.exception(
"Unexpected exception from %s executed in logging pool!",
behavior,
)
raise
return _wrapping
class _LoggingPool(object):
"""An exception-logging futures.ThreadPoolExecutor-compatible thread pool."""
def __init__(self, backing_pool):
self._backing_pool = backing_pool
def __enter__(self):
return self
def __exit__(self, exc_type, exc_val, exc_tb):
self._backing_pool.shutdown(wait=True)
def submit(self, fn, *args, **kwargs):
return self._backing_pool.submit(_wrap(fn), *args, **kwargs)
def map(self, func, *iterables, **kwargs):
return self._backing_pool.map(
_wrap(func), *iterables, timeout=kwargs.get("timeout", None)
)
def shutdown(self, wait=True):
self._backing_pool.shutdown(wait=wait)
def pool(max_workers):
"""Creates a thread pool that logs exceptions raised by the tasks within it.
Args:
max_workers: The maximum number of worker threads to allow the pool.
Returns:
A futures.ThreadPoolExecutor-compatible thread pool that logs exceptions
raised by the tasks executed within it.
"""
return _LoggingPool(futures.ThreadPoolExecutor(max_workers))
| 2,248
| 29.808219
| 81
|
py
|
grpc
|
grpc-master/src/python/grpcio/grpc/framework/foundation/future.py
|
# Copyright 2015 gRPC authors.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""A Future interface.
Python doesn't have a Future interface in its standard library. In the absence
of such a standard, three separate, incompatible implementations
(concurrent.futures.Future, ndb.Future, and asyncio.Future) have appeared. This
interface attempts to be as compatible as possible with
concurrent.futures.Future. From ndb.Future it adopts a traceback-object accessor
method.
Unlike the concrete and implemented Future classes listed above, the Future
class defined in this module is an entirely abstract interface that anyone may
implement and use.
The one known incompatibility between this interface and the interface of
concurrent.futures.Future is that this interface defines its own CancelledError
and TimeoutError exceptions rather than raising the implementation-private
concurrent.futures._base.CancelledError and the
built-in-but-only-in-3.3-and-later TimeoutError.
"""
import abc
class TimeoutError(Exception):
"""Indicates that a particular call timed out."""
class CancelledError(Exception):
"""Indicates that the computation underlying a Future was cancelled."""
class Future(abc.ABC):
"""A representation of a computation in another control flow.
Computations represented by a Future may be yet to be begun, may be ongoing,
or may have already completed.
"""
# NOTE(nathaniel): This isn't the return type that I would want to have if it
# were up to me. Were this interface being written from scratch, the return
# type of this method would probably be a sum type like:
#
# NOT_COMMENCED
# COMMENCED_AND_NOT_COMPLETED
# PARTIAL_RESULT<Partial_Result_Type>
# COMPLETED<Result_Type>
# UNCANCELLABLE
# NOT_IMMEDIATELY_DETERMINABLE
@abc.abstractmethod
def cancel(self):
"""Attempts to cancel the computation.
This method does not block.
Returns:
True if the computation has not yet begun, will not be allowed to take
place, and determination of both was possible without blocking. False
under all other circumstances including but not limited to the
computation's already having begun, the computation's already having
finished, and the computation's having been scheduled for execution on a
remote system for which a determination of whether or not it commenced
before being cancelled cannot be made without blocking.
"""
raise NotImplementedError()
# NOTE(nathaniel): Here too this isn't the return type that I'd want this
# method to have if it were up to me. I think I'd go with another sum type
# like:
#
# NOT_CANCELLED (this object's cancel method hasn't been called)
# NOT_COMMENCED
# COMMENCED_AND_NOT_COMPLETED
# PARTIAL_RESULT<Partial_Result_Type>
# COMPLETED<Result_Type>
# UNCANCELLABLE
# NOT_IMMEDIATELY_DETERMINABLE
#
# Notice how giving the cancel method the right semantics obviates most
# reasons for this method to exist.
@abc.abstractmethod
def cancelled(self):
"""Describes whether the computation was cancelled.
This method does not block.
Returns:
True if the computation was cancelled any time before its result became
immediately available. False under all other circumstances including but
not limited to this object's cancel method not having been called and
the computation's result having become immediately available.
"""
raise NotImplementedError()
@abc.abstractmethod
def running(self):
"""Describes whether the computation is taking place.
This method does not block.
Returns:
True if the computation is scheduled to take place in the future or is
taking place now, or False if the computation took place in the past or
was cancelled.
"""
raise NotImplementedError()
# NOTE(nathaniel): These aren't quite the semantics I'd like here either. I
# would rather this only returned True in cases in which the underlying
# computation completed successfully. A computation's having been cancelled
# conflicts with considering that computation "done".
@abc.abstractmethod
def done(self):
"""Describes whether the computation has taken place.
This method does not block.
Returns:
True if the computation is known to have either completed or have been
unscheduled or interrupted. False if the computation may possibly be
executing or scheduled to execute later.
"""
raise NotImplementedError()
@abc.abstractmethod
def result(self, timeout=None):
"""Accesses the outcome of the computation or raises its exception.
This method may return immediately or may block.
Args:
timeout: The length of time in seconds to wait for the computation to
finish or be cancelled, or None if this method should block until the
computation has finished or is cancelled no matter how long that takes.
Returns:
The return value of the computation.
Raises:
TimeoutError: If a timeout value is passed and the computation does not
terminate within the allotted time.
CancelledError: If the computation was cancelled.
Exception: If the computation raised an exception, this call will raise
the same exception.
"""
raise NotImplementedError()
@abc.abstractmethod
def exception(self, timeout=None):
"""Return the exception raised by the computation.
This method may return immediately or may block.
Args:
timeout: The length of time in seconds to wait for the computation to
terminate or be cancelled, or None if this method should block until
the computation is terminated or is cancelled no matter how long that
takes.
Returns:
The exception raised by the computation, or None if the computation did
not raise an exception.
Raises:
TimeoutError: If a timeout value is passed and the computation does not
terminate within the allotted time.
CancelledError: If the computation was cancelled.
"""
raise NotImplementedError()
@abc.abstractmethod
def traceback(self, timeout=None):
"""Access the traceback of the exception raised by the computation.
This method may return immediately or may block.
Args:
timeout: The length of time in seconds to wait for the computation to
terminate or be cancelled, or None if this method should block until
the computation is terminated or is cancelled no matter how long that
takes.
Returns:
The traceback of the exception raised by the computation, or None if the
computation did not raise an exception.
Raises:
TimeoutError: If a timeout value is passed and the computation does not
terminate within the allotted time.
CancelledError: If the computation was cancelled.
"""
raise NotImplementedError()
@abc.abstractmethod
def add_done_callback(self, fn):
"""Adds a function to be called at completion of the computation.
The callback will be passed this Future object describing the outcome of
the computation.
If the computation has already completed, the callback will be called
immediately.
Args:
fn: A callable taking this Future object as its single parameter.
"""
raise NotImplementedError()
| 8,373
| 37.063636
| 84
|
py
|
grpc
|
grpc-master/src/python/grpcio/grpc/framework/foundation/stream_util.py
|
# Copyright 2015 gRPC authors.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Helpful utilities related to the stream module."""
import logging
import threading
from grpc.framework.foundation import stream
_NO_VALUE = object()
_LOGGER = logging.getLogger(__name__)
class TransformingConsumer(stream.Consumer):
"""A stream.Consumer that passes a transformation of its input to another."""
def __init__(self, transformation, downstream):
self._transformation = transformation
self._downstream = downstream
def consume(self, value):
self._downstream.consume(self._transformation(value))
def terminate(self):
self._downstream.terminate()
def consume_and_terminate(self, value):
self._downstream.consume_and_terminate(self._transformation(value))
class IterableConsumer(stream.Consumer):
"""A Consumer that when iterated over emits the values it has consumed."""
def __init__(self):
self._condition = threading.Condition()
self._values = []
self._active = True
def consume(self, value):
with self._condition:
if self._active:
self._values.append(value)
self._condition.notify()
def terminate(self):
with self._condition:
self._active = False
self._condition.notify()
def consume_and_terminate(self, value):
with self._condition:
if self._active:
self._values.append(value)
self._active = False
self._condition.notify()
def __iter__(self):
return self
def __next__(self):
return self.next()
def next(self):
with self._condition:
while self._active and not self._values:
self._condition.wait()
if self._values:
return self._values.pop(0)
else:
raise StopIteration()
class ThreadSwitchingConsumer(stream.Consumer):
"""A Consumer decorator that affords serialization and asynchrony."""
def __init__(self, sink, pool):
self._lock = threading.Lock()
self._sink = sink
self._pool = pool
# True if self._spin has been submitted to the pool to be called once and
# that call has not yet returned, False otherwise.
self._spinning = False
self._values = []
self._active = True
def _spin(self, sink, value, terminate):
while True:
try:
if value is _NO_VALUE:
sink.terminate()
elif terminate:
sink.consume_and_terminate(value)
else:
sink.consume(value)
except Exception as e: # pylint:disable=broad-except
_LOGGER.exception(e)
with self._lock:
if terminate:
self._spinning = False
return
elif self._values:
value = self._values.pop(0)
terminate = not self._values and not self._active
elif not self._active:
value = _NO_VALUE
terminate = True
else:
self._spinning = False
return
def consume(self, value):
with self._lock:
if self._active:
if self._spinning:
self._values.append(value)
else:
self._pool.submit(self._spin, self._sink, value, False)
self._spinning = True
def terminate(self):
with self._lock:
if self._active:
self._active = False
if not self._spinning:
self._pool.submit(self._spin, self._sink, _NO_VALUE, True)
self._spinning = True
def consume_and_terminate(self, value):
with self._lock:
if self._active:
self._active = False
if self._spinning:
self._values.append(value)
else:
self._pool.submit(self._spin, self._sink, value, True)
self._spinning = True
| 4,772
| 31.033557
| 81
|
py
|
grpc
|
grpc-master/src/python/grpcio/grpc/framework/foundation/__init__.py
|
# Copyright 2015 gRPC authors.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
| 577
| 40.285714
| 74
|
py
|
grpc
|
grpc-master/src/python/grpcio/grpc/framework/interfaces/__init__.py
|
# Copyright 2015 gRPC authors.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
| 577
| 40.285714
| 74
|
py
|
grpc
|
grpc-master/src/python/grpcio/grpc/framework/interfaces/base/base.py
|
# Copyright 2015 gRPC authors.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""The base interface of RPC Framework.
Implementations of this interface support the conduct of "operations":
exchanges between two distinct ends of an arbitrary number of data payloads
and metadata such as a name for the operation, initial and terminal metadata
in each direction, and flow control. These operations may be used for transfers
of data, remote procedure calls, status indication, or anything else
applications choose.
"""
# threading is referenced from specification in this module.
import abc
import enum
import threading # pylint: disable=unused-import
# pylint: disable=too-many-arguments
class NoSuchMethodError(Exception):
"""Indicates that an unrecognized operation has been called.
Attributes:
code: A code value to communicate to the other side of the operation
along with indication of operation termination. May be None.
details: A details value to communicate to the other side of the
operation along with indication of operation termination. May be None.
"""
def __init__(self, code, details):
"""Constructor.
Args:
code: A code value to communicate to the other side of the operation
along with indication of operation termination. May be None.
details: A details value to communicate to the other side of the
operation along with indication of operation termination. May be None.
"""
super(NoSuchMethodError, self).__init__()
self.code = code
self.details = details
class Outcome(object):
"""The outcome of an operation.
Attributes:
kind: A Kind value coarsely identifying how the operation terminated.
code: An application-specific code value or None if no such value was
provided.
details: An application-specific details value or None if no such value was
provided.
"""
@enum.unique
class Kind(enum.Enum):
"""Ways in which an operation can terminate."""
COMPLETED = "completed"
CANCELLED = "cancelled"
EXPIRED = "expired"
LOCAL_SHUTDOWN = "local shutdown"
REMOTE_SHUTDOWN = "remote shutdown"
RECEPTION_FAILURE = "reception failure"
TRANSMISSION_FAILURE = "transmission failure"
LOCAL_FAILURE = "local failure"
REMOTE_FAILURE = "remote failure"
class Completion(abc.ABC):
"""An aggregate of the values exchanged upon operation completion.
Attributes:
terminal_metadata: A terminal metadata value for the operaton.
code: A code value for the operation.
message: A message value for the operation.
"""
class OperationContext(abc.ABC):
"""Provides operation-related information and action."""
@abc.abstractmethod
def outcome(self):
"""Indicates the operation's outcome (or that the operation is ongoing).
Returns:
None if the operation is still active or the Outcome value for the
operation if it has terminated.
"""
raise NotImplementedError()
@abc.abstractmethod
def add_termination_callback(self, callback):
"""Adds a function to be called upon operation termination.
Args:
callback: A callable to be passed an Outcome value on operation
termination.
Returns:
None if the operation has not yet terminated and the passed callback will
later be called when it does terminate, or if the operation has already
terminated an Outcome value describing the operation termination and the
passed callback will not be called as a result of this method call.
"""
raise NotImplementedError()
@abc.abstractmethod
def time_remaining(self):
"""Describes the length of allowed time remaining for the operation.
Returns:
A nonnegative float indicating the length of allowed time in seconds
remaining for the operation to complete before it is considered to have
timed out. Zero is returned if the operation has terminated.
"""
raise NotImplementedError()
@abc.abstractmethod
def cancel(self):
"""Cancels the operation if the operation has not yet terminated."""
raise NotImplementedError()
@abc.abstractmethod
def fail(self, exception):
"""Indicates that the operation has failed.
Args:
exception: An exception germane to the operation failure. May be None.
"""
raise NotImplementedError()
class Operator(abc.ABC):
"""An interface through which to participate in an operation."""
@abc.abstractmethod
def advance(
self,
initial_metadata=None,
payload=None,
completion=None,
allowance=None,
):
"""Progresses the operation.
Args:
initial_metadata: An initial metadata value. Only one may ever be
communicated in each direction for an operation, and they must be
communicated no later than either the first payload or the completion.
payload: A payload value.
completion: A Completion value. May only ever be non-None once in either
direction, and no payloads may be passed after it has been communicated.
allowance: A positive integer communicating the number of additional
payloads allowed to be passed by the remote side of the operation.
"""
raise NotImplementedError()
class ProtocolReceiver(abc.ABC):
"""A means of receiving protocol values during an operation."""
@abc.abstractmethod
def context(self, protocol_context):
"""Accepts the protocol context object for the operation.
Args:
protocol_context: The protocol context object for the operation.
"""
raise NotImplementedError()
class Subscription(abc.ABC):
"""Describes customer code's interest in values from the other side.
Attributes:
kind: A Kind value describing the overall kind of this value.
termination_callback: A callable to be passed the Outcome associated with
the operation after it has terminated. Must be non-None if kind is
Kind.TERMINATION_ONLY. Must be None otherwise.
allowance: A callable behavior that accepts positive integers representing
the number of additional payloads allowed to be passed to the other side
of the operation. Must be None if kind is Kind.FULL. Must not be None
otherwise.
operator: An Operator to be passed values from the other side of the
operation. Must be non-None if kind is Kind.FULL. Must be None otherwise.
protocol_receiver: A ProtocolReceiver to be passed protocol objects as they
become available during the operation. Must be non-None if kind is
Kind.FULL.
"""
@enum.unique
class Kind(enum.Enum):
NONE = "none"
TERMINATION_ONLY = "termination only"
FULL = "full"
class Servicer(abc.ABC):
"""Interface for service implementations."""
@abc.abstractmethod
def service(self, group, method, context, output_operator):
"""Services an operation.
Args:
group: The group identifier of the operation to be serviced.
method: The method identifier of the operation to be serviced.
context: An OperationContext object affording contextual information and
actions.
output_operator: An Operator that will accept output values of the
operation.
Returns:
A Subscription via which this object may or may not accept more values of
the operation.
Raises:
NoSuchMethodError: If this Servicer does not handle operations with the
given group and method.
abandonment.Abandoned: If the operation has been aborted and there no
longer is any reason to service the operation.
"""
raise NotImplementedError()
class End(abc.ABC):
"""Common type for entry-point objects on both sides of an operation."""
@abc.abstractmethod
def start(self):
"""Starts this object's service of operations."""
raise NotImplementedError()
@abc.abstractmethod
def stop(self, grace):
"""Stops this object's service of operations.
This object will refuse service of new operations as soon as this method is
called but operations under way at the time of the call may be given a
grace period during which they are allowed to finish.
Args:
grace: A duration of time in seconds to allow ongoing operations to
terminate before being forcefully terminated by the stopping of this
End. May be zero to terminate all ongoing operations and immediately
stop.
Returns:
A threading.Event that will be set to indicate all operations having
terminated and this End having completely stopped. The returned event
may not be set until after the full grace period (if some ongoing
operation continues for the full length of the period) or it may be set
much sooner (if for example this End had no operations in progress at
the time its stop method was called).
"""
raise NotImplementedError()
@abc.abstractmethod
def operate(
self,
group,
method,
subscription,
timeout,
initial_metadata=None,
payload=None,
completion=None,
protocol_options=None,
):
"""Commences an operation.
Args:
group: The group identifier of the invoked operation.
method: The method identifier of the invoked operation.
subscription: A Subscription to which the results of the operation will be
passed.
timeout: A length of time in seconds to allow for the operation.
initial_metadata: An initial metadata value to be sent to the other side
of the operation. May be None if the initial metadata will be later
passed via the returned operator or if there will be no initial metadata
passed at all.
payload: An initial payload for the operation.
completion: A Completion value indicating the end of transmission to the
other side of the operation.
protocol_options: A value specified by the provider of a Base interface
implementation affording custom state and behavior.
Returns:
A pair of objects affording information about the operation and action
continuing the operation. The first element of the returned pair is an
OperationContext for the operation and the second element of the
returned pair is an Operator to which operation values not passed in
this call should later be passed.
"""
raise NotImplementedError()
@abc.abstractmethod
def operation_stats(self):
"""Reports the number of terminated operations broken down by outcome.
Returns:
A dictionary from Outcome.Kind value to an integer identifying the number
of operations that terminated with that outcome kind.
"""
raise NotImplementedError()
@abc.abstractmethod
def add_idle_action(self, action):
"""Adds an action to be called when this End has no ongoing operations.
Args:
action: A callable that accepts no arguments.
"""
raise NotImplementedError()
| 12,233
| 36.18541
| 84
|
py
|
grpc
|
grpc-master/src/python/grpcio/grpc/framework/interfaces/base/utilities.py
|
# Copyright 2015 gRPC authors.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Utilities for use with the base interface of RPC Framework."""
import collections
from grpc.framework.interfaces.base import base
class _Completion(
base.Completion,
collections.namedtuple(
"_Completion",
(
"terminal_metadata",
"code",
"message",
),
),
):
"""A trivial implementation of base.Completion."""
class _Subscription(
base.Subscription,
collections.namedtuple(
"_Subscription",
(
"kind",
"termination_callback",
"allowance",
"operator",
"protocol_receiver",
),
),
):
"""A trivial implementation of base.Subscription."""
_NONE_SUBSCRIPTION = _Subscription(
base.Subscription.Kind.NONE, None, None, None, None
)
def completion(terminal_metadata, code, message):
"""Creates a base.Completion aggregating the given operation values.
Args:
terminal_metadata: A terminal metadata value for an operaton.
code: A code value for an operation.
message: A message value for an operation.
Returns:
A base.Completion aggregating the given operation values.
"""
return _Completion(terminal_metadata, code, message)
def full_subscription(operator, protocol_receiver):
"""Creates a "full" base.Subscription for the given base.Operator.
Args:
operator: A base.Operator to be used in an operation.
protocol_receiver: A base.ProtocolReceiver to be used in an operation.
Returns:
A base.Subscription of kind base.Subscription.Kind.FULL wrapping the given
base.Operator and base.ProtocolReceiver.
"""
return _Subscription(
base.Subscription.Kind.FULL, None, None, operator, protocol_receiver
)
| 2,361
| 27.119048
| 80
|
py
|
grpc
|
grpc-master/src/python/grpcio/grpc/framework/interfaces/base/__init__.py
|
# Copyright 2015 gRPC authors.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
| 577
| 40.285714
| 74
|
py
|
grpc
|
grpc-master/src/python/grpcio/grpc/framework/interfaces/face/utilities.py
|
# Copyright 2015 gRPC authors.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Utilities for RPC Framework's Face interface."""
import collections
# stream is referenced from specification in this module.
from grpc.framework.common import cardinality
from grpc.framework.common import style
from grpc.framework.foundation import stream # pylint: disable=unused-import
from grpc.framework.interfaces.face import face
class _MethodImplementation(
face.MethodImplementation,
collections.namedtuple(
"_MethodImplementation",
[
"cardinality",
"style",
"unary_unary_inline",
"unary_stream_inline",
"stream_unary_inline",
"stream_stream_inline",
"unary_unary_event",
"unary_stream_event",
"stream_unary_event",
"stream_stream_event",
],
),
):
pass
def unary_unary_inline(behavior):
"""Creates an face.MethodImplementation for the given behavior.
Args:
behavior: The implementation of a unary-unary RPC method as a callable value
that takes a request value and an face.ServicerContext object and
returns a response value.
Returns:
An face.MethodImplementation derived from the given behavior.
"""
return _MethodImplementation(
cardinality.Cardinality.UNARY_UNARY,
style.Service.INLINE,
behavior,
None,
None,
None,
None,
None,
None,
None,
)
def unary_stream_inline(behavior):
"""Creates an face.MethodImplementation for the given behavior.
Args:
behavior: The implementation of a unary-stream RPC method as a callable
value that takes a request value and an face.ServicerContext object and
returns an iterator of response values.
Returns:
An face.MethodImplementation derived from the given behavior.
"""
return _MethodImplementation(
cardinality.Cardinality.UNARY_STREAM,
style.Service.INLINE,
None,
behavior,
None,
None,
None,
None,
None,
None,
)
def stream_unary_inline(behavior):
"""Creates an face.MethodImplementation for the given behavior.
Args:
behavior: The implementation of a stream-unary RPC method as a callable
value that takes an iterator of request values and an
face.ServicerContext object and returns a response value.
Returns:
An face.MethodImplementation derived from the given behavior.
"""
return _MethodImplementation(
cardinality.Cardinality.STREAM_UNARY,
style.Service.INLINE,
None,
None,
behavior,
None,
None,
None,
None,
None,
)
def stream_stream_inline(behavior):
"""Creates an face.MethodImplementation for the given behavior.
Args:
behavior: The implementation of a stream-stream RPC method as a callable
value that takes an iterator of request values and an
face.ServicerContext object and returns an iterator of response values.
Returns:
An face.MethodImplementation derived from the given behavior.
"""
return _MethodImplementation(
cardinality.Cardinality.STREAM_STREAM,
style.Service.INLINE,
None,
None,
None,
behavior,
None,
None,
None,
None,
)
def unary_unary_event(behavior):
"""Creates an face.MethodImplementation for the given behavior.
Args:
behavior: The implementation of a unary-unary RPC method as a callable
value that takes a request value, a response callback to which to pass
the response value of the RPC, and an face.ServicerContext.
Returns:
An face.MethodImplementation derived from the given behavior.
"""
return _MethodImplementation(
cardinality.Cardinality.UNARY_UNARY,
style.Service.EVENT,
None,
None,
None,
None,
behavior,
None,
None,
None,
)
def unary_stream_event(behavior):
"""Creates an face.MethodImplementation for the given behavior.
Args:
behavior: The implementation of a unary-stream RPC method as a callable
value that takes a request value, a stream.Consumer to which to pass the
the response values of the RPC, and an face.ServicerContext.
Returns:
An face.MethodImplementation derived from the given behavior.
"""
return _MethodImplementation(
cardinality.Cardinality.UNARY_STREAM,
style.Service.EVENT,
None,
None,
None,
None,
None,
behavior,
None,
None,
)
def stream_unary_event(behavior):
"""Creates an face.MethodImplementation for the given behavior.
Args:
behavior: The implementation of a stream-unary RPC method as a callable
value that takes a response callback to which to pass the response value
of the RPC and an face.ServicerContext and returns a stream.Consumer to
which the request values of the RPC should be passed.
Returns:
An face.MethodImplementation derived from the given behavior.
"""
return _MethodImplementation(
cardinality.Cardinality.STREAM_UNARY,
style.Service.EVENT,
None,
None,
None,
None,
None,
None,
behavior,
None,
)
def stream_stream_event(behavior):
"""Creates an face.MethodImplementation for the given behavior.
Args:
behavior: The implementation of a stream-stream RPC method as a callable
value that takes a stream.Consumer to which to pass the response values
of the RPC and an face.ServicerContext and returns a stream.Consumer to
which the request values of the RPC should be passed.
Returns:
An face.MethodImplementation derived from the given behavior.
"""
return _MethodImplementation(
cardinality.Cardinality.STREAM_STREAM,
style.Service.EVENT,
None,
None,
None,
None,
None,
None,
None,
behavior,
)
| 6,781
| 26.569106
| 82
|
py
|
grpc
|
grpc-master/src/python/grpcio/grpc/framework/interfaces/face/face.py
|
# Copyright 2015 gRPC authors.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Interfaces defining the Face layer of RPC Framework."""
import abc
import collections
import enum
# cardinality, style, abandonment, future, and stream are
# referenced from specification in this module.
from grpc.framework.common import cardinality # pylint: disable=unused-import
from grpc.framework.common import style # pylint: disable=unused-import
from grpc.framework.foundation import future # pylint: disable=unused-import
from grpc.framework.foundation import stream # pylint: disable=unused-import
# pylint: disable=too-many-arguments
class NoSuchMethodError(Exception):
"""Raised by customer code to indicate an unrecognized method.
Attributes:
group: The group of the unrecognized method.
name: The name of the unrecognized method.
"""
def __init__(self, group, method):
"""Constructor.
Args:
group: The group identifier of the unrecognized RPC name.
method: The method identifier of the unrecognized RPC name.
"""
super(NoSuchMethodError, self).__init__()
self.group = group
self.method = method
def __repr__(self):
return "face.NoSuchMethodError(%s, %s)" % (
self.group,
self.method,
)
class Abortion(
collections.namedtuple(
"Abortion",
(
"kind",
"initial_metadata",
"terminal_metadata",
"code",
"details",
),
)
):
"""A value describing RPC abortion.
Attributes:
kind: A Kind value identifying how the RPC failed.
initial_metadata: The initial metadata from the other side of the RPC or
None if no initial metadata value was received.
terminal_metadata: The terminal metadata from the other side of the RPC or
None if no terminal metadata value was received.
code: The code value from the other side of the RPC or None if no code value
was received.
details: The details value from the other side of the RPC or None if no
details value was received.
"""
@enum.unique
class Kind(enum.Enum):
"""Types of RPC abortion."""
CANCELLED = "cancelled"
EXPIRED = "expired"
LOCAL_SHUTDOWN = "local shutdown"
REMOTE_SHUTDOWN = "remote shutdown"
NETWORK_FAILURE = "network failure"
LOCAL_FAILURE = "local failure"
REMOTE_FAILURE = "remote failure"
class AbortionError(Exception, metaclass=abc.ABCMeta):
"""Common super type for exceptions indicating RPC abortion.
initial_metadata: The initial metadata from the other side of the RPC or
None if no initial metadata value was received.
terminal_metadata: The terminal metadata from the other side of the RPC or
None if no terminal metadata value was received.
code: The code value from the other side of the RPC or None if no code value
was received.
details: The details value from the other side of the RPC or None if no
details value was received.
"""
def __init__(self, initial_metadata, terminal_metadata, code, details):
super(AbortionError, self).__init__()
self.initial_metadata = initial_metadata
self.terminal_metadata = terminal_metadata
self.code = code
self.details = details
def __str__(self):
return '%s(code=%s, details="%s")' % (
self.__class__.__name__,
self.code,
self.details,
)
class CancellationError(AbortionError):
"""Indicates that an RPC has been cancelled."""
class ExpirationError(AbortionError):
"""Indicates that an RPC has expired ("timed out")."""
class LocalShutdownError(AbortionError):
"""Indicates that an RPC has terminated due to local shutdown of RPCs."""
class RemoteShutdownError(AbortionError):
"""Indicates that an RPC has terminated due to remote shutdown of RPCs."""
class NetworkError(AbortionError):
"""Indicates that some error occurred on the network."""
class LocalError(AbortionError):
"""Indicates that an RPC has terminated due to a local defect."""
class RemoteError(AbortionError):
"""Indicates that an RPC has terminated due to a remote defect."""
class RpcContext(abc.ABC):
"""Provides RPC-related information and action."""
@abc.abstractmethod
def is_active(self):
"""Describes whether the RPC is active or has terminated."""
raise NotImplementedError()
@abc.abstractmethod
def time_remaining(self):
"""Describes the length of allowed time remaining for the RPC.
Returns:
A nonnegative float indicating the length of allowed time in seconds
remaining for the RPC to complete before it is considered to have timed
out.
"""
raise NotImplementedError()
@abc.abstractmethod
def add_abortion_callback(self, abortion_callback):
"""Registers a callback to be called if the RPC is aborted.
Args:
abortion_callback: A callable to be called and passed an Abortion value
in the event of RPC abortion.
"""
raise NotImplementedError()
@abc.abstractmethod
def cancel(self):
"""Cancels the RPC.
Idempotent and has no effect if the RPC has already terminated.
"""
raise NotImplementedError()
@abc.abstractmethod
def protocol_context(self):
"""Accesses a custom object specified by an implementation provider.
Returns:
A value specified by the provider of a Face interface implementation
affording custom state and behavior.
"""
raise NotImplementedError()
class Call(RpcContext, metaclass=abc.ABCMeta):
"""Invocation-side utility object for an RPC."""
@abc.abstractmethod
def initial_metadata(self):
"""Accesses the initial metadata from the service-side of the RPC.
This method blocks until the value is available or is known not to have been
emitted from the service-side of the RPC.
Returns:
The initial metadata object emitted by the service-side of the RPC, or
None if there was no such value.
"""
raise NotImplementedError()
@abc.abstractmethod
def terminal_metadata(self):
"""Accesses the terminal metadata from the service-side of the RPC.
This method blocks until the value is available or is known not to have been
emitted from the service-side of the RPC.
Returns:
The terminal metadata object emitted by the service-side of the RPC, or
None if there was no such value.
"""
raise NotImplementedError()
@abc.abstractmethod
def code(self):
"""Accesses the code emitted by the service-side of the RPC.
This method blocks until the value is available or is known not to have been
emitted from the service-side of the RPC.
Returns:
The code object emitted by the service-side of the RPC, or None if there
was no such value.
"""
raise NotImplementedError()
@abc.abstractmethod
def details(self):
"""Accesses the details value emitted by the service-side of the RPC.
This method blocks until the value is available or is known not to have been
emitted from the service-side of the RPC.
Returns:
The details value emitted by the service-side of the RPC, or None if there
was no such value.
"""
raise NotImplementedError()
class ServicerContext(RpcContext, metaclass=abc.ABCMeta):
"""A context object passed to method implementations."""
@abc.abstractmethod
def invocation_metadata(self):
"""Accesses the metadata from the invocation-side of the RPC.
This method blocks until the value is available or is known not to have been
emitted from the invocation-side of the RPC.
Returns:
The metadata object emitted by the invocation-side of the RPC, or None if
there was no such value.
"""
raise NotImplementedError()
@abc.abstractmethod
def initial_metadata(self, initial_metadata):
"""Accepts the service-side initial metadata value of the RPC.
This method need not be called by method implementations if they have no
service-side initial metadata to transmit.
Args:
initial_metadata: The service-side initial metadata value of the RPC to
be transmitted to the invocation side of the RPC.
"""
raise NotImplementedError()
@abc.abstractmethod
def terminal_metadata(self, terminal_metadata):
"""Accepts the service-side terminal metadata value of the RPC.
This method need not be called by method implementations if they have no
service-side terminal metadata to transmit.
Args:
terminal_metadata: The service-side terminal metadata value of the RPC to
be transmitted to the invocation side of the RPC.
"""
raise NotImplementedError()
@abc.abstractmethod
def code(self, code):
"""Accepts the service-side code of the RPC.
This method need not be called by method implementations if they have no
code to transmit.
Args:
code: The code of the RPC to be transmitted to the invocation side of the
RPC.
"""
raise NotImplementedError()
@abc.abstractmethod
def details(self, details):
"""Accepts the service-side details of the RPC.
This method need not be called by method implementations if they have no
service-side details to transmit.
Args:
details: The service-side details value of the RPC to be transmitted to
the invocation side of the RPC.
"""
raise NotImplementedError()
class ResponseReceiver(abc.ABC):
"""Invocation-side object used to accept the output of an RPC."""
@abc.abstractmethod
def initial_metadata(self, initial_metadata):
"""Receives the initial metadata from the service-side of the RPC.
Args:
initial_metadata: The initial metadata object emitted from the
service-side of the RPC.
"""
raise NotImplementedError()
@abc.abstractmethod
def response(self, response):
"""Receives a response from the service-side of the RPC.
Args:
response: A response object emitted from the service-side of the RPC.
"""
raise NotImplementedError()
@abc.abstractmethod
def complete(self, terminal_metadata, code, details):
"""Receives the completion values emitted from the service-side of the RPC.
Args:
terminal_metadata: The terminal metadata object emitted from the
service-side of the RPC.
code: The code object emitted from the service-side of the RPC.
details: The details object emitted from the service-side of the RPC.
"""
raise NotImplementedError()
class UnaryUnaryMultiCallable(abc.ABC):
"""Affords invoking a unary-unary RPC in any call style."""
@abc.abstractmethod
def __call__(
self,
request,
timeout,
metadata=None,
with_call=False,
protocol_options=None,
):
"""Synchronously invokes the underlying RPC.
Args:
request: The request value for the RPC.
timeout: A duration of time in seconds to allow for the RPC.
metadata: A metadata value to be passed to the service-side of
the RPC.
with_call: Whether or not to include return a Call for the RPC in addition
to the response.
protocol_options: A value specified by the provider of a Face interface
implementation affording custom state and behavior.
Returns:
The response value for the RPC, and a Call for the RPC if with_call was
set to True at invocation.
Raises:
AbortionError: Indicating that the RPC was aborted.
"""
raise NotImplementedError()
@abc.abstractmethod
def future(self, request, timeout, metadata=None, protocol_options=None):
"""Asynchronously invokes the underlying RPC.
Args:
request: The request value for the RPC.
timeout: A duration of time in seconds to allow for the RPC.
metadata: A metadata value to be passed to the service-side of
the RPC.
protocol_options: A value specified by the provider of a Face interface
implementation affording custom state and behavior.
Returns:
An object that is both a Call for the RPC and a future.Future. In the
event of RPC completion, the return Future's result value will be the
response value of the RPC. In the event of RPC abortion, the returned
Future's exception value will be an AbortionError.
"""
raise NotImplementedError()
@abc.abstractmethod
def event(
self,
request,
receiver,
abortion_callback,
timeout,
metadata=None,
protocol_options=None,
):
"""Asynchronously invokes the underlying RPC.
Args:
request: The request value for the RPC.
receiver: A ResponseReceiver to be passed the response data of the RPC.
abortion_callback: A callback to be called and passed an Abortion value
in the event of RPC abortion.
timeout: A duration of time in seconds to allow for the RPC.
metadata: A metadata value to be passed to the service-side of
the RPC.
protocol_options: A value specified by the provider of a Face interface
implementation affording custom state and behavior.
Returns:
A Call for the RPC.
"""
raise NotImplementedError()
class UnaryStreamMultiCallable(abc.ABC):
"""Affords invoking a unary-stream RPC in any call style."""
@abc.abstractmethod
def __call__(self, request, timeout, metadata=None, protocol_options=None):
"""Invokes the underlying RPC.
Args:
request: The request value for the RPC.
timeout: A duration of time in seconds to allow for the RPC.
metadata: A metadata value to be passed to the service-side of
the RPC.
protocol_options: A value specified by the provider of a Face interface
implementation affording custom state and behavior.
Returns:
An object that is both a Call for the RPC and an iterator of response
values. Drawing response values from the returned iterator may raise
AbortionError indicating abortion of the RPC.
"""
raise NotImplementedError()
@abc.abstractmethod
def event(
self,
request,
receiver,
abortion_callback,
timeout,
metadata=None,
protocol_options=None,
):
"""Asynchronously invokes the underlying RPC.
Args:
request: The request value for the RPC.
receiver: A ResponseReceiver to be passed the response data of the RPC.
abortion_callback: A callback to be called and passed an Abortion value
in the event of RPC abortion.
timeout: A duration of time in seconds to allow for the RPC.
metadata: A metadata value to be passed to the service-side of
the RPC.
protocol_options: A value specified by the provider of a Face interface
implementation affording custom state and behavior.
Returns:
A Call object for the RPC.
"""
raise NotImplementedError()
class StreamUnaryMultiCallable(abc.ABC):
"""Affords invoking a stream-unary RPC in any call style."""
@abc.abstractmethod
def __call__(
self,
request_iterator,
timeout,
metadata=None,
with_call=False,
protocol_options=None,
):
"""Synchronously invokes the underlying RPC.
Args:
request_iterator: An iterator that yields request values for the RPC.
timeout: A duration of time in seconds to allow for the RPC.
metadata: A metadata value to be passed to the service-side of
the RPC.
with_call: Whether or not to include return a Call for the RPC in addition
to the response.
protocol_options: A value specified by the provider of a Face interface
implementation affording custom state and behavior.
Returns:
The response value for the RPC, and a Call for the RPC if with_call was
set to True at invocation.
Raises:
AbortionError: Indicating that the RPC was aborted.
"""
raise NotImplementedError()
@abc.abstractmethod
def future(
self, request_iterator, timeout, metadata=None, protocol_options=None
):
"""Asynchronously invokes the underlying RPC.
Args:
request_iterator: An iterator that yields request values for the RPC.
timeout: A duration of time in seconds to allow for the RPC.
metadata: A metadata value to be passed to the service-side of
the RPC.
protocol_options: A value specified by the provider of a Face interface
implementation affording custom state and behavior.
Returns:
An object that is both a Call for the RPC and a future.Future. In the
event of RPC completion, the return Future's result value will be the
response value of the RPC. In the event of RPC abortion, the returned
Future's exception value will be an AbortionError.
"""
raise NotImplementedError()
@abc.abstractmethod
def event(
self,
receiver,
abortion_callback,
timeout,
metadata=None,
protocol_options=None,
):
"""Asynchronously invokes the underlying RPC.
Args:
receiver: A ResponseReceiver to be passed the response data of the RPC.
abortion_callback: A callback to be called and passed an Abortion value
in the event of RPC abortion.
timeout: A duration of time in seconds to allow for the RPC.
metadata: A metadata value to be passed to the service-side of
the RPC.
protocol_options: A value specified by the provider of a Face interface
implementation affording custom state and behavior.
Returns:
A single object that is both a Call object for the RPC and a
stream.Consumer to which the request values of the RPC should be passed.
"""
raise NotImplementedError()
class StreamStreamMultiCallable(abc.ABC):
"""Affords invoking a stream-stream RPC in any call style."""
@abc.abstractmethod
def __call__(
self, request_iterator, timeout, metadata=None, protocol_options=None
):
"""Invokes the underlying RPC.
Args:
request_iterator: An iterator that yields request values for the RPC.
timeout: A duration of time in seconds to allow for the RPC.
metadata: A metadata value to be passed to the service-side of
the RPC.
protocol_options: A value specified by the provider of a Face interface
implementation affording custom state and behavior.
Returns:
An object that is both a Call for the RPC and an iterator of response
values. Drawing response values from the returned iterator may raise
AbortionError indicating abortion of the RPC.
"""
raise NotImplementedError()
@abc.abstractmethod
def event(
self,
receiver,
abortion_callback,
timeout,
metadata=None,
protocol_options=None,
):
"""Asynchronously invokes the underlying RPC.
Args:
receiver: A ResponseReceiver to be passed the response data of the RPC.
abortion_callback: A callback to be called and passed an Abortion value
in the event of RPC abortion.
timeout: A duration of time in seconds to allow for the RPC.
metadata: A metadata value to be passed to the service-side of
the RPC.
protocol_options: A value specified by the provider of a Face interface
implementation affording custom state and behavior.
Returns:
A single object that is both a Call object for the RPC and a
stream.Consumer to which the request values of the RPC should be passed.
"""
raise NotImplementedError()
class MethodImplementation(abc.ABC):
"""A sum type that describes a method implementation.
Attributes:
cardinality: A cardinality.Cardinality value.
style: A style.Service value.
unary_unary_inline: The implementation of the method as a callable value
that takes a request value and a ServicerContext object and returns a
response value. Only non-None if cardinality is
cardinality.Cardinality.UNARY_UNARY and style is style.Service.INLINE.
unary_stream_inline: The implementation of the method as a callable value
that takes a request value and a ServicerContext object and returns an
iterator of response values. Only non-None if cardinality is
cardinality.Cardinality.UNARY_STREAM and style is style.Service.INLINE.
stream_unary_inline: The implementation of the method as a callable value
that takes an iterator of request values and a ServicerContext object and
returns a response value. Only non-None if cardinality is
cardinality.Cardinality.STREAM_UNARY and style is style.Service.INLINE.
stream_stream_inline: The implementation of the method as a callable value
that takes an iterator of request values and a ServicerContext object and
returns an iterator of response values. Only non-None if cardinality is
cardinality.Cardinality.STREAM_STREAM and style is style.Service.INLINE.
unary_unary_event: The implementation of the method as a callable value that
takes a request value, a response callback to which to pass the response
value of the RPC, and a ServicerContext. Only non-None if cardinality is
cardinality.Cardinality.UNARY_UNARY and style is style.Service.EVENT.
unary_stream_event: The implementation of the method as a callable value
that takes a request value, a stream.Consumer to which to pass the
response values of the RPC, and a ServicerContext. Only non-None if
cardinality is cardinality.Cardinality.UNARY_STREAM and style is
style.Service.EVENT.
stream_unary_event: The implementation of the method as a callable value
that takes a response callback to which to pass the response value of the
RPC and a ServicerContext and returns a stream.Consumer to which the
request values of the RPC should be passed. Only non-None if cardinality
is cardinality.Cardinality.STREAM_UNARY and style is style.Service.EVENT.
stream_stream_event: The implementation of the method as a callable value
that takes a stream.Consumer to which to pass the response values of the
RPC and a ServicerContext and returns a stream.Consumer to which the
request values of the RPC should be passed. Only non-None if cardinality
is cardinality.Cardinality.STREAM_STREAM and style is
style.Service.EVENT.
"""
class MultiMethodImplementation(abc.ABC):
"""A general type able to service many methods."""
@abc.abstractmethod
def service(self, group, method, response_consumer, context):
"""Services an RPC.
Args:
group: The group identifier of the RPC.
method: The method identifier of the RPC.
response_consumer: A stream.Consumer to be called to accept the response
values of the RPC.
context: a ServicerContext object.
Returns:
A stream.Consumer with which to accept the request values of the RPC. The
consumer returned from this method may or may not be invoked to
completion: in the case of RPC abortion, RPC Framework will simply stop
passing values to this object. Implementations must not assume that this
object will be called to completion of the request stream or even called
at all.
Raises:
abandonment.Abandoned: May or may not be raised when the RPC has been
aborted.
NoSuchMethodError: If this MultiMethod does not recognize the given group
and name for the RPC and is not able to service the RPC.
"""
raise NotImplementedError()
class GenericStub(abc.ABC):
"""Affords RPC invocation via generic methods."""
@abc.abstractmethod
def blocking_unary_unary(
self,
group,
method,
request,
timeout,
metadata=None,
with_call=False,
protocol_options=None,
):
"""Invokes a unary-request-unary-response method.
This method blocks until either returning the response value of the RPC
(in the event of RPC completion) or raising an exception (in the event of
RPC abortion).
Args:
group: The group identifier of the RPC.
method: The method identifier of the RPC.
request: The request value for the RPC.
timeout: A duration of time in seconds to allow for the RPC.
metadata: A metadata value to be passed to the service-side of the RPC.
with_call: Whether or not to include return a Call for the RPC in addition
to the response.
protocol_options: A value specified by the provider of a Face interface
implementation affording custom state and behavior.
Returns:
The response value for the RPC, and a Call for the RPC if with_call was
set to True at invocation.
Raises:
AbortionError: Indicating that the RPC was aborted.
"""
raise NotImplementedError()
@abc.abstractmethod
def future_unary_unary(
self,
group,
method,
request,
timeout,
metadata=None,
protocol_options=None,
):
"""Invokes a unary-request-unary-response method.
Args:
group: The group identifier of the RPC.
method: The method identifier of the RPC.
request: The request value for the RPC.
timeout: A duration of time in seconds to allow for the RPC.
metadata: A metadata value to be passed to the service-side of the RPC.
protocol_options: A value specified by the provider of a Face interface
implementation affording custom state and behavior.
Returns:
An object that is both a Call for the RPC and a future.Future. In the
event of RPC completion, the return Future's result value will be the
response value of the RPC. In the event of RPC abortion, the returned
Future's exception value will be an AbortionError.
"""
raise NotImplementedError()
@abc.abstractmethod
def inline_unary_stream(
self,
group,
method,
request,
timeout,
metadata=None,
protocol_options=None,
):
"""Invokes a unary-request-stream-response method.
Args:
group: The group identifier of the RPC.
method: The method identifier of the RPC.
request: The request value for the RPC.
timeout: A duration of time in seconds to allow for the RPC.
metadata: A metadata value to be passed to the service-side of the RPC.
protocol_options: A value specified by the provider of a Face interface
implementation affording custom state and behavior.
Returns:
An object that is both a Call for the RPC and an iterator of response
values. Drawing response values from the returned iterator may raise
AbortionError indicating abortion of the RPC.
"""
raise NotImplementedError()
@abc.abstractmethod
def blocking_stream_unary(
self,
group,
method,
request_iterator,
timeout,
metadata=None,
with_call=False,
protocol_options=None,
):
"""Invokes a stream-request-unary-response method.
This method blocks until either returning the response value of the RPC
(in the event of RPC completion) or raising an exception (in the event of
RPC abortion).
Args:
group: The group identifier of the RPC.
method: The method identifier of the RPC.
request_iterator: An iterator that yields request values for the RPC.
timeout: A duration of time in seconds to allow for the RPC.
metadata: A metadata value to be passed to the service-side of the RPC.
with_call: Whether or not to include return a Call for the RPC in addition
to the response.
protocol_options: A value specified by the provider of a Face interface
implementation affording custom state and behavior.
Returns:
The response value for the RPC, and a Call for the RPC if with_call was
set to True at invocation.
Raises:
AbortionError: Indicating that the RPC was aborted.
"""
raise NotImplementedError()
@abc.abstractmethod
def future_stream_unary(
self,
group,
method,
request_iterator,
timeout,
metadata=None,
protocol_options=None,
):
"""Invokes a stream-request-unary-response method.
Args:
group: The group identifier of the RPC.
method: The method identifier of the RPC.
request_iterator: An iterator that yields request values for the RPC.
timeout: A duration of time in seconds to allow for the RPC.
metadata: A metadata value to be passed to the service-side of the RPC.
protocol_options: A value specified by the provider of a Face interface
implementation affording custom state and behavior.
Returns:
An object that is both a Call for the RPC and a future.Future. In the
event of RPC completion, the return Future's result value will be the
response value of the RPC. In the event of RPC abortion, the returned
Future's exception value will be an AbortionError.
"""
raise NotImplementedError()
@abc.abstractmethod
def inline_stream_stream(
self,
group,
method,
request_iterator,
timeout,
metadata=None,
protocol_options=None,
):
"""Invokes a stream-request-stream-response method.
Args:
group: The group identifier of the RPC.
method: The method identifier of the RPC.
request_iterator: An iterator that yields request values for the RPC.
timeout: A duration of time in seconds to allow for the RPC.
metadata: A metadata value to be passed to the service-side of the RPC.
protocol_options: A value specified by the provider of a Face interface
implementation affording custom state and behavior.
Returns:
An object that is both a Call for the RPC and an iterator of response
values. Drawing response values from the returned iterator may raise
AbortionError indicating abortion of the RPC.
"""
raise NotImplementedError()
@abc.abstractmethod
def event_unary_unary(
self,
group,
method,
request,
receiver,
abortion_callback,
timeout,
metadata=None,
protocol_options=None,
):
"""Event-driven invocation of a unary-request-unary-response method.
Args:
group: The group identifier of the RPC.
method: The method identifier of the RPC.
request: The request value for the RPC.
receiver: A ResponseReceiver to be passed the response data of the RPC.
abortion_callback: A callback to be called and passed an Abortion value
in the event of RPC abortion.
timeout: A duration of time in seconds to allow for the RPC.
metadata: A metadata value to be passed to the service-side of the RPC.
protocol_options: A value specified by the provider of a Face interface
implementation affording custom state and behavior.
Returns:
A Call for the RPC.
"""
raise NotImplementedError()
@abc.abstractmethod
def event_unary_stream(
self,
group,
method,
request,
receiver,
abortion_callback,
timeout,
metadata=None,
protocol_options=None,
):
"""Event-driven invocation of a unary-request-stream-response method.
Args:
group: The group identifier of the RPC.
method: The method identifier of the RPC.
request: The request value for the RPC.
receiver: A ResponseReceiver to be passed the response data of the RPC.
abortion_callback: A callback to be called and passed an Abortion value
in the event of RPC abortion.
timeout: A duration of time in seconds to allow for the RPC.
metadata: A metadata value to be passed to the service-side of the RPC.
protocol_options: A value specified by the provider of a Face interface
implementation affording custom state and behavior.
Returns:
A Call for the RPC.
"""
raise NotImplementedError()
@abc.abstractmethod
def event_stream_unary(
self,
group,
method,
receiver,
abortion_callback,
timeout,
metadata=None,
protocol_options=None,
):
"""Event-driven invocation of a unary-request-unary-response method.
Args:
group: The group identifier of the RPC.
method: The method identifier of the RPC.
receiver: A ResponseReceiver to be passed the response data of the RPC.
abortion_callback: A callback to be called and passed an Abortion value
in the event of RPC abortion.
timeout: A duration of time in seconds to allow for the RPC.
metadata: A metadata value to be passed to the service-side of the RPC.
protocol_options: A value specified by the provider of a Face interface
implementation affording custom state and behavior.
Returns:
A pair of a Call object for the RPC and a stream.Consumer to which the
request values of the RPC should be passed.
"""
raise NotImplementedError()
@abc.abstractmethod
def event_stream_stream(
self,
group,
method,
receiver,
abortion_callback,
timeout,
metadata=None,
protocol_options=None,
):
"""Event-driven invocation of a unary-request-stream-response method.
Args:
group: The group identifier of the RPC.
method: The method identifier of the RPC.
receiver: A ResponseReceiver to be passed the response data of the RPC.
abortion_callback: A callback to be called and passed an Abortion value
in the event of RPC abortion.
timeout: A duration of time in seconds to allow for the RPC.
metadata: A metadata value to be passed to the service-side of the RPC.
protocol_options: A value specified by the provider of a Face interface
implementation affording custom state and behavior.
Returns:
A pair of a Call object for the RPC and a stream.Consumer to which the
request values of the RPC should be passed.
"""
raise NotImplementedError()
@abc.abstractmethod
def unary_unary(self, group, method):
"""Creates a UnaryUnaryMultiCallable for a unary-unary method.
Args:
group: The group identifier of the RPC.
method: The method identifier of the RPC.
Returns:
A UnaryUnaryMultiCallable value for the named unary-unary method.
"""
raise NotImplementedError()
@abc.abstractmethod
def unary_stream(self, group, method):
"""Creates a UnaryStreamMultiCallable for a unary-stream method.
Args:
group: The group identifier of the RPC.
method: The method identifier of the RPC.
Returns:
A UnaryStreamMultiCallable value for the name unary-stream method.
"""
raise NotImplementedError()
@abc.abstractmethod
def stream_unary(self, group, method):
"""Creates a StreamUnaryMultiCallable for a stream-unary method.
Args:
group: The group identifier of the RPC.
method: The method identifier of the RPC.
Returns:
A StreamUnaryMultiCallable value for the named stream-unary method.
"""
raise NotImplementedError()
@abc.abstractmethod
def stream_stream(self, group, method):
"""Creates a StreamStreamMultiCallable for a stream-stream method.
Args:
group: The group identifier of the RPC.
method: The method identifier of the RPC.
Returns:
A StreamStreamMultiCallable value for the named stream-stream method.
"""
raise NotImplementedError()
class DynamicStub(abc.ABC):
"""Affords RPC invocation via attributes corresponding to afforded methods.
Instances of this type may be scoped to a single group so that attribute
access is unambiguous.
Instances of this type respond to attribute access as follows: if the
requested attribute is the name of a unary-unary method, the value of the
attribute will be a UnaryUnaryMultiCallable with which to invoke an RPC; if
the requested attribute is the name of a unary-stream method, the value of the
attribute will be a UnaryStreamMultiCallable with which to invoke an RPC; if
the requested attribute is the name of a stream-unary method, the value of the
attribute will be a StreamUnaryMultiCallable with which to invoke an RPC; and
if the requested attribute is the name of a stream-stream method, the value of
the attribute will be a StreamStreamMultiCallable with which to invoke an RPC.
"""
| 39,700
| 35.590783
| 84
|
py
|
grpc
|
grpc-master/src/python/grpcio/grpc/framework/interfaces/face/__init__.py
|
# Copyright 2015 gRPC authors.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
| 577
| 40.285714
| 74
|
py
|
grpc
|
grpc-master/src/proto/gen_build_yaml.py
|
#!/usr/bin/env python2.7
# Copyright 2015 gRPC authors.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Generates the appropriate build.json data for all the proto files."""
import yaml
import collections
import os
import re
import sys
def update_deps(key, proto_filename, deps, deps_external, is_trans, visited):
if not proto_filename in visited:
visited.append(proto_filename)
with open(proto_filename) as inp:
for line in inp:
imp = re.search(r'import "([^"]*)"', line)
if not imp:
continue
imp_proto = imp.group(1)
# This indicates an external dependency, which we should handle
# differently and not traverse recursively
if imp_proto.startswith("google/"):
if key not in deps_external:
deps_external[key] = []
deps_external[key].append(imp_proto[:-6])
continue
# In case that the path is changed by copybara,
# revert the change to avoid file error.
if imp_proto.startswith("third_party/grpc"):
imp_proto = imp_proto[17:]
if key not in deps:
deps[key] = []
deps[key].append(imp_proto[:-6])
if is_trans:
update_deps(
key, imp_proto, deps, deps_external, is_trans, visited
)
def main():
proto_dir = os.path.abspath(os.path.dirname(sys.argv[0]))
os.chdir(os.path.join(proto_dir, "../.."))
deps = {}
deps_trans = {}
deps_external = {}
deps_external_trans = {}
for root, dirs, files in os.walk("src/proto"):
for f in files:
if f[-6:] != ".proto":
continue
look_at = os.path.join(root, f)
deps_for = look_at[:-6]
# First level deps
update_deps(deps_for, look_at, deps, deps_external, False, [])
# Transitive deps
update_deps(
deps_for, look_at, deps_trans, deps_external_trans, True, []
)
json = {
"proto_deps": deps,
"proto_transitive_deps": deps_trans,
"proto_external_deps": deps_external,
"proto_transitive_external_deps": deps_external_trans,
}
print(yaml.dump(json))
if __name__ == "__main__":
main()
| 2,961
| 33.44186
| 79
|
py
|
grpc
|
grpc-master/src/objective-c/change-comments.py
|
#!/usr/bin/env python2.7
# Copyright 2015 gRPC authors.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Change comments style of source files from // to /** */"""
import re
import sys
if len(sys.argv) < 2:
print("Please provide at least one source file name as argument.")
sys.exit()
for file_name in sys.argv[1:]:
print(
"Modifying format of {file} comments in place...".format(
file=file_name,
)
)
# Input
with open(file_name, "r") as input_file:
lines = input_file.readlines()
def peek():
return lines[0]
def read_line():
return lines.pop(0)
def more_input_available():
return lines
# Output
output_lines = []
def write(line):
output_lines.append(line)
def flush_output():
with open(file_name, "w") as output_file:
for line in output_lines:
output_file.write(line)
# Pattern matching
comment_regex = r"^(\s*)//\s(.*)$"
def is_comment(line):
return re.search(comment_regex, line)
def isnt_comment(line):
return not is_comment(line)
def next_line(predicate):
return more_input_available() and predicate(peek())
# Transformation
def indentation_of(line):
match = re.search(comment_regex, line)
return match.group(1)
def content(line):
match = re.search(comment_regex, line)
return match.group(2)
def format_as_block(comment_block):
if len(comment_block) == 0:
return []
indent = indentation_of(comment_block[0])
if len(comment_block) == 1:
return [indent + "/** " + content(comment_block[0]) + " */\n"]
block = (
["/**"]
+ [" * " + content(line) for line in comment_block]
+ [" */"]
)
return [indent + line.rstrip() + "\n" for line in block]
# Main algorithm
while more_input_available():
while next_line(isnt_comment):
write(read_line())
comment_block = []
# Get all lines in the same comment block. We could restrict the indentation
# to be the same as the first line of the block, but it's probably ok.
while next_line(is_comment):
comment_block.append(read_line())
for line in format_as_block(comment_block):
write(line)
flush_output()
| 2,922
| 25.098214
| 84
|
py
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.