repo
stringlengths 2
99
| file
stringlengths 13
225
| code
stringlengths 0
18.3M
| file_length
int64 0
18.3M
| avg_line_length
float64 0
1.36M
| max_line_length
int64 0
4.26M
| extension_type
stringclasses 1
value |
|---|---|---|---|---|---|---|
grpc
|
grpc-master/src/python/grpcio_tests/tests/reflection/_reflection_servicer_test.py
|
# Copyright 2016 gRPC authors.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Tests of grpc_reflection.v1alpha.reflection."""
import sys
import unittest
from google.protobuf import descriptor_pb2
from google.protobuf import descriptor_pool
import grpc
from grpc_reflection.v1alpha import reflection
from grpc_reflection.v1alpha import reflection_pb2
from grpc_reflection.v1alpha import reflection_pb2_grpc
from src.proto.grpc.testing import empty_pb2
from src.proto.grpc.testing.proto2 import empty2_extensions_pb2
from src.proto.grpc.testing.proto2 import empty2_pb2
from tests.unit import test_common
_EMPTY_PROTO_FILE_NAME = "src/proto/grpc/testing/empty.proto"
_EMPTY_PROTO_SYMBOL_NAME = "grpc.testing.Empty"
_SERVICE_NAMES = (
"Angstrom",
"Bohr",
"Curie",
"Dyson",
"Einstein",
"Feynman",
"Galilei",
)
_EMPTY_EXTENSIONS_SYMBOL_NAME = "grpc.testing.proto2.EmptyWithExtensions"
_EMPTY_EXTENSIONS_NUMBERS = (
124,
125,
126,
127,
128,
)
def _file_descriptor_to_proto(descriptor):
proto = descriptor_pb2.FileDescriptorProto()
descriptor.CopyToProto(proto)
return proto.SerializeToString()
@unittest.skipIf(
sys.version_info[0] < 3, "ProtoBuf descriptor has moved on from Python2"
)
class ReflectionServicerTest(unittest.TestCase):
def setUp(self):
self._server = test_common.test_server()
reflection.enable_server_reflection(_SERVICE_NAMES, self._server)
port = self._server.add_insecure_port("[::]:0")
self._server.start()
self._channel = grpc.insecure_channel("localhost:%d" % port)
self._stub = reflection_pb2_grpc.ServerReflectionStub(self._channel)
def tearDown(self):
self._server.stop(None)
self._channel.close()
def testFileByName(self):
requests = (
reflection_pb2.ServerReflectionRequest(
file_by_filename=_EMPTY_PROTO_FILE_NAME
),
reflection_pb2.ServerReflectionRequest(
file_by_filename="i-donut-exist"
),
)
responses = tuple(self._stub.ServerReflectionInfo(iter(requests)))
expected_responses = (
reflection_pb2.ServerReflectionResponse(
valid_host="",
file_descriptor_response=reflection_pb2.FileDescriptorResponse(
file_descriptor_proto=(
_file_descriptor_to_proto(empty_pb2.DESCRIPTOR),
)
),
),
reflection_pb2.ServerReflectionResponse(
valid_host="",
error_response=reflection_pb2.ErrorResponse(
error_code=grpc.StatusCode.NOT_FOUND.value[0],
error_message=grpc.StatusCode.NOT_FOUND.value[1].encode(),
),
),
)
self.assertEqual(expected_responses, responses)
def testFileBySymbol(self):
requests = (
reflection_pb2.ServerReflectionRequest(
file_containing_symbol=_EMPTY_PROTO_SYMBOL_NAME
),
reflection_pb2.ServerReflectionRequest(
file_containing_symbol="i.donut.exist.co.uk.org.net.me.name.foo"
),
)
responses = tuple(self._stub.ServerReflectionInfo(iter(requests)))
expected_responses = (
reflection_pb2.ServerReflectionResponse(
valid_host="",
file_descriptor_response=reflection_pb2.FileDescriptorResponse(
file_descriptor_proto=(
_file_descriptor_to_proto(empty_pb2.DESCRIPTOR),
)
),
),
reflection_pb2.ServerReflectionResponse(
valid_host="",
error_response=reflection_pb2.ErrorResponse(
error_code=grpc.StatusCode.NOT_FOUND.value[0],
error_message=grpc.StatusCode.NOT_FOUND.value[1].encode(),
),
),
)
self.assertEqual(expected_responses, responses)
def testFileContainingExtension(self):
requests = (
reflection_pb2.ServerReflectionRequest(
file_containing_extension=reflection_pb2.ExtensionRequest(
containing_type=_EMPTY_EXTENSIONS_SYMBOL_NAME,
extension_number=125,
),
),
reflection_pb2.ServerReflectionRequest(
file_containing_extension=reflection_pb2.ExtensionRequest(
containing_type="i.donut.exist.co.uk.org.net.me.name.foo",
extension_number=55,
),
),
)
responses = tuple(self._stub.ServerReflectionInfo(iter(requests)))
expected_responses = (
reflection_pb2.ServerReflectionResponse(
valid_host="",
file_descriptor_response=reflection_pb2.FileDescriptorResponse(
file_descriptor_proto=(
_file_descriptor_to_proto(
empty2_extensions_pb2.DESCRIPTOR
),
_file_descriptor_to_proto(empty2_pb2.DESCRIPTOR),
)
),
),
reflection_pb2.ServerReflectionResponse(
valid_host="",
error_response=reflection_pb2.ErrorResponse(
error_code=grpc.StatusCode.NOT_FOUND.value[0],
error_message=grpc.StatusCode.NOT_FOUND.value[1].encode(),
),
),
)
self.assertEqual(expected_responses, responses)
def testExtensionNumbersOfType(self):
requests = (
reflection_pb2.ServerReflectionRequest(
all_extension_numbers_of_type=_EMPTY_EXTENSIONS_SYMBOL_NAME
),
reflection_pb2.ServerReflectionRequest(
all_extension_numbers_of_type="i.donut.exist.co.uk.net.name.foo"
),
)
responses = tuple(self._stub.ServerReflectionInfo(iter(requests)))
expected_responses = (
reflection_pb2.ServerReflectionResponse(
valid_host="",
all_extension_numbers_response=reflection_pb2.ExtensionNumberResponse(
base_type_name=_EMPTY_EXTENSIONS_SYMBOL_NAME,
extension_number=_EMPTY_EXTENSIONS_NUMBERS,
),
),
reflection_pb2.ServerReflectionResponse(
valid_host="",
error_response=reflection_pb2.ErrorResponse(
error_code=grpc.StatusCode.NOT_FOUND.value[0],
error_message=grpc.StatusCode.NOT_FOUND.value[1].encode(),
),
),
)
self.assertEqual(expected_responses, responses)
def testListServices(self):
requests = (
reflection_pb2.ServerReflectionRequest(
list_services="",
),
)
responses = tuple(self._stub.ServerReflectionInfo(iter(requests)))
expected_responses = (
reflection_pb2.ServerReflectionResponse(
valid_host="",
list_services_response=reflection_pb2.ListServiceResponse(
service=tuple(
reflection_pb2.ServiceResponse(name=name)
for name in _SERVICE_NAMES
)
),
),
)
self.assertEqual(expected_responses, responses)
def testReflectionServiceName(self):
self.assertEqual(
reflection.SERVICE_NAME, "grpc.reflection.v1alpha.ServerReflection"
)
if __name__ == "__main__":
unittest.main(verbosity=2)
| 8,298
| 35.559471
| 86
|
py
|
grpc
|
grpc-master/src/python/grpcio_tests/tests/reflection/_reflection_client_test.py
|
# Copyright 2022 gRPC authors.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Tests of grpc_reflection.v1alpha.reflection."""
import unittest
from google.protobuf.descriptor_pool import DescriptorPool
import grpc
from grpc_reflection.v1alpha import reflection
from grpc_reflection.v1alpha.proto_reflection_descriptor_database import (
ProtoReflectionDescriptorDatabase,
)
from src.proto.grpc.testing import test_pb2
# Needed to load the EmptyWithExtensions message
from src.proto.grpc.testing.proto2 import empty2_extensions_pb2
from tests.unit import test_common
_PROTO_PACKAGE_NAME = "grpc.testing"
_PROTO_FILE_NAME = "src/proto/grpc/testing/test.proto"
_EMPTY_PROTO_FILE_NAME = "src/proto/grpc/testing/empty.proto"
_INVALID_FILE_NAME = "i-do-not-exist.proto"
_EMPTY_PROTO_SYMBOL_NAME = "grpc.testing.Empty"
_INVALID_SYMBOL_NAME = "IDoNotExist"
_EMPTY_EXTENSIONS_SYMBOL_NAME = "grpc.testing.proto2.EmptyWithExtensions"
class ReflectionClientTest(unittest.TestCase):
def setUp(self):
self._server = test_common.test_server()
self._SERVICE_NAMES = (
test_pb2.DESCRIPTOR.services_by_name["TestService"].full_name,
reflection.SERVICE_NAME,
)
reflection.enable_server_reflection(self._SERVICE_NAMES, self._server)
port = self._server.add_insecure_port("[::]:0")
self._server.start()
self._channel = grpc.insecure_channel("localhost:%d" % port)
self._reflection_db = ProtoReflectionDescriptorDatabase(self._channel)
self.desc_pool = DescriptorPool(self._reflection_db)
def tearDown(self):
self._server.stop(None)
self._channel.close()
def testListServices(self):
services = self._reflection_db.get_services()
self.assertCountEqual(self._SERVICE_NAMES, services)
def testReflectionServiceName(self):
self.assertEqual(
reflection.SERVICE_NAME, "grpc.reflection.v1alpha.ServerReflection"
)
def testFindFile(self):
file_name = _PROTO_FILE_NAME
file_desc = self.desc_pool.FindFileByName(file_name)
self.assertEqual(file_name, file_desc.name)
self.assertEqual(_PROTO_PACKAGE_NAME, file_desc.package)
self.assertIn("TestService", file_desc.services_by_name)
file_name = _EMPTY_PROTO_FILE_NAME
file_desc = self.desc_pool.FindFileByName(file_name)
self.assertEqual(file_name, file_desc.name)
self.assertEqual(_PROTO_PACKAGE_NAME, file_desc.package)
self.assertIn("Empty", file_desc.message_types_by_name)
def testFindFileError(self):
with self.assertRaises(KeyError):
self.desc_pool.FindFileByName(_INVALID_FILE_NAME)
def testFindMessage(self):
message_name = _EMPTY_PROTO_SYMBOL_NAME
message_desc = self.desc_pool.FindMessageTypeByName(message_name)
self.assertEqual(message_name, message_desc.full_name)
self.assertTrue(message_name.endswith(message_desc.name))
def testFindMessageError(self):
with self.assertRaises(KeyError):
self.desc_pool.FindMessageTypeByName(_INVALID_SYMBOL_NAME)
def testFindServiceFindMethod(self):
service_name = self._SERVICE_NAMES[0]
service_desc = self.desc_pool.FindServiceByName(service_name)
self.assertEqual(service_name, service_desc.full_name)
self.assertTrue(service_name.endswith(service_desc.name))
file_name = _PROTO_FILE_NAME
file_desc = self.desc_pool.FindFileByName(file_name)
self.assertIs(file_desc, service_desc.file)
method_name = "EmptyCall"
self.assertIn(method_name, service_desc.methods_by_name)
method_desc = service_desc.FindMethodByName(method_name)
self.assertIs(method_desc, service_desc.methods_by_name[method_name])
self.assertIs(service_desc, method_desc.containing_service)
self.assertEqual(method_name, method_desc.name)
self.assertTrue(method_desc.full_name.endswith(method_name))
empty_message_desc = self.desc_pool.FindMessageTypeByName(
_EMPTY_PROTO_SYMBOL_NAME
)
self.assertEqual(empty_message_desc, method_desc.input_type)
self.assertEqual(empty_message_desc, method_desc.output_type)
def testFindServiceError(self):
with self.assertRaises(KeyError):
self.desc_pool.FindServiceByName(_INVALID_SYMBOL_NAME)
def testFindMethodError(self):
service_name = self._SERVICE_NAMES[0]
service_desc = self.desc_pool.FindServiceByName(service_name)
# FindMethodByName sometimes raises a KeyError, and sometimes returns None.
# See https://github.com/protocolbuffers/protobuf/issues/9592
with self.assertRaises(KeyError):
res = service_desc.FindMethodByName(_INVALID_SYMBOL_NAME)
if res is None:
raise KeyError()
def testFindExtensionNotImplemented(self):
"""
Extensions aren't implemented in Protobuf for Python.
For now, simply assert that indeed they don't work.
"""
message_name = _EMPTY_EXTENSIONS_SYMBOL_NAME
message_desc = self.desc_pool.FindMessageTypeByName(message_name)
self.assertEqual(message_name, message_desc.full_name)
self.assertTrue(message_name.endswith(message_desc.name))
extension_field_descs = self.desc_pool.FindAllExtensions(message_desc)
self.assertEqual(0, len(extension_field_descs))
with self.assertRaises(KeyError):
self.desc_pool.FindExtensionByName(message_name)
if __name__ == "__main__":
unittest.main(verbosity=2)
| 6,142
| 39.150327
| 83
|
py
|
grpc
|
grpc-master/src/python/grpcio_tests/tests/reflection/__init__.py
|
# Copyright 2016 gRPC authors.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
| 577
| 40.285714
| 74
|
py
|
grpc
|
grpc-master/src/python/grpcio_tests/tests/qps/benchmark_server.py
|
# Copyright 2016 gRPC authors.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
from src.proto.grpc.testing import benchmark_service_pb2_grpc
from src.proto.grpc.testing import messages_pb2
class BenchmarkServer(benchmark_service_pb2_grpc.BenchmarkServiceServicer):
"""Synchronous Server implementation for the Benchmark service."""
def UnaryCall(self, request, context):
payload = messages_pb2.Payload(body=b"\0" * request.response_size)
return messages_pb2.SimpleResponse(payload=payload)
def StreamingCall(self, request_iterator, context):
for request in request_iterator:
payload = messages_pb2.Payload(body=b"\0" * request.response_size)
yield messages_pb2.SimpleResponse(payload=payload)
class GenericBenchmarkServer(
benchmark_service_pb2_grpc.BenchmarkServiceServicer
):
"""Generic Server implementation for the Benchmark service."""
def __init__(self, resp_size):
self._response = b"\0" * resp_size
def UnaryCall(self, request, context):
return self._response
def StreamingCall(self, request_iterator, context):
for request in request_iterator:
yield self._response
| 1,698
| 35.934783
| 78
|
py
|
grpc
|
grpc-master/src/python/grpcio_tests/tests/qps/client_runner.py
|
# Copyright 2016 gRPC authors.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Defines behavior for WHEN clients send requests.
Each client exposes a non-blocking send_request() method that the
ClientRunner invokes either periodically or in response to some event.
"""
import abc
import threading
import time
class ClientRunner:
"""Abstract interface for sending requests from clients."""
__metaclass__ = abc.ABCMeta
def __init__(self, client):
self._client = client
@abc.abstractmethod
def start(self):
raise NotImplementedError()
@abc.abstractmethod
def stop(self):
raise NotImplementedError()
class OpenLoopClientRunner(ClientRunner):
def __init__(self, client, interval_generator):
super(OpenLoopClientRunner, self).__init__(client)
self._is_running = False
self._interval_generator = interval_generator
self._dispatch_thread = threading.Thread(
target=self._dispatch_requests, args=()
)
def start(self):
self._is_running = True
self._client.start()
self._dispatch_thread.start()
def stop(self):
self._is_running = False
self._client.stop()
self._dispatch_thread.join()
self._client = None
def _dispatch_requests(self):
while self._is_running:
self._client.send_request()
time.sleep(next(self._interval_generator))
class ClosedLoopClientRunner(ClientRunner):
def __init__(self, client, request_count, no_ping_pong):
super(ClosedLoopClientRunner, self).__init__(client)
self._is_running = False
self._request_count = request_count
# For server-streaming RPC, don't spawn new RPC after each responses.
# This yield at most ~17% for single RPC scenarios.
if not no_ping_pong:
# Send a new request on each response for closed loop
self._client.add_response_callback(self._send_request)
def start(self):
self._is_running = True
self._client.start()
for _ in range(self._request_count):
self._client.send_request()
def stop(self):
self._is_running = False
self._client.stop()
self._client = None
def _send_request(self, client, unused_response_time):
if self._is_running:
client.send_request()
| 2,887
| 30.053763
| 77
|
py
|
grpc
|
grpc-master/src/python/grpcio_tests/tests/qps/worker_server.py
|
# Copyright 2016 gRPC authors.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
from concurrent import futures
import multiprocessing
import random
import threading
import time
try:
# The resource module is not available on Windows. While this server only
# supports Linux, we must still be _importable_ on Windows.
import resource
except ImportError:
pass
import grpc
from src.proto.grpc.testing import benchmark_service_pb2_grpc
from src.proto.grpc.testing import control_pb2
from src.proto.grpc.testing import stats_pb2
from src.proto.grpc.testing import worker_service_pb2_grpc
from tests.qps import benchmark_client
from tests.qps import benchmark_server
from tests.qps import client_runner
from tests.qps import histogram
from tests.unit import resources
from tests.unit import test_common
class Snapshotter:
def __init__(self):
self._start_time = 0.0
self._end_time = 0.0
self._last_utime = 0.0
self._utime = 0.0
self._last_stime = 0.0
self._stime = 0.0
def get_time_elapsed(self):
return self._end_time - self._start_time
def get_utime(self):
return self._utime - self._last_utime
def get_stime(self):
return self._stime - self._last_stime
def snapshot(self):
self._end_time = time.time()
usage = resource.getrusage(resource.RUSAGE_SELF)
self._utime = usage.ru_utime
self._stime = usage.ru_stime
def reset(self):
self._start_time = self._end_time
self._last_utime = self._utime
self._last_stime = self._stime
def stats(self):
return {
"time_elapsed": self.get_time_elapsed(),
"time_user": self.get_utime(),
"time_system": self.get_stime(),
}
class WorkerServer(worker_service_pb2_grpc.WorkerServiceServicer):
"""Python Worker Server implementation."""
def __init__(self, server_port=None):
self._quit_event = threading.Event()
self._server_port = server_port
self._snapshotter = Snapshotter()
def RunServer(self, request_iterator, context):
# pylint: disable=stop-iteration-return
config = next(request_iterator).setup
# pylint: enable=stop-iteration-return
server, port = self._create_server(config)
cores = multiprocessing.cpu_count()
server.start()
self._snapshotter.snapshot()
self._snapshotter.reset()
yield self._get_server_status(port, cores)
for request in request_iterator:
self._snapshotter.snapshot()
status = self._get_server_status(port, cores)
if request.mark.reset:
self._snapshotter.reset()
yield status
server.stop(None)
def _get_server_status(self, port, cores):
stats = stats_pb2.ServerStats(**self._snapshotter.stats())
return control_pb2.ServerStatus(stats=stats, port=port, cores=cores)
def _create_server(self, config):
if config.async_server_threads == 0:
# This is the default concurrent.futures thread pool size, but
# None doesn't seem to work
server_threads = multiprocessing.cpu_count() * 5
else:
server_threads = config.async_server_threads
server = test_common.test_server(max_workers=server_threads)
if config.server_type == control_pb2.ASYNC_SERVER:
servicer = benchmark_server.BenchmarkServer()
benchmark_service_pb2_grpc.add_BenchmarkServiceServicer_to_server(
servicer, server
)
elif config.server_type == control_pb2.ASYNC_GENERIC_SERVER:
resp_size = config.payload_config.bytebuf_params.resp_size
servicer = benchmark_server.GenericBenchmarkServer(resp_size)
method_implementations = {
"StreamingCall": grpc.stream_stream_rpc_method_handler(
servicer.StreamingCall
),
"UnaryCall": grpc.unary_unary_rpc_method_handler(
servicer.UnaryCall
),
}
handler = grpc.method_handlers_generic_handler(
"grpc.testing.BenchmarkService", method_implementations
)
server.add_generic_rpc_handlers((handler,))
else:
raise Exception(
"Unsupported server type {}".format(config.server_type)
)
if self._server_port is not None and config.port == 0:
server_port = self._server_port
else:
server_port = config.port
if config.HasField("security_params"): # Use SSL
server_creds = grpc.ssl_server_credentials(
((resources.private_key(), resources.certificate_chain()),)
)
port = server.add_secure_port(
"[::]:{}".format(server_port), server_creds
)
else:
port = server.add_insecure_port("[::]:{}".format(server_port))
return (server, port)
def RunClient(self, request_iterator, context):
# pylint: disable=stop-iteration-return
config = next(request_iterator).setup
# pylint: enable=stop-iteration-return
client_runners = []
qps_data = histogram.Histogram(
config.histogram_params.resolution,
config.histogram_params.max_possible,
)
self._snapshotter.snapshot()
self._snapshotter.reset()
# Create a client for each channel
for i in range(config.client_channels):
server = config.server_targets[i % len(config.server_targets)]
runner = self._create_client_runner(server, config, qps_data)
client_runners.append(runner)
runner.start()
self._snapshotter.snapshot()
yield self._get_client_status(qps_data)
# Respond to stat requests
for request in request_iterator:
self._snapshotter.snapshot()
status = self._get_client_status(qps_data)
if request.mark.reset:
qps_data.reset()
self._snapshotter.reset()
yield status
# Cleanup the clients
for runner in client_runners:
runner.stop()
def _get_client_status(self, qps_data):
latencies = qps_data.get_data()
stats = stats_pb2.ClientStats(
latencies=latencies, **self._snapshotter.stats()
)
return control_pb2.ClientStatus(stats=stats)
def _create_client_runner(self, server, config, qps_data):
no_ping_pong = False
if config.client_type == control_pb2.SYNC_CLIENT:
if config.rpc_type == control_pb2.UNARY:
client = benchmark_client.UnarySyncBenchmarkClient(
server, config, qps_data
)
elif config.rpc_type == control_pb2.STREAMING:
client = benchmark_client.StreamingSyncBenchmarkClient(
server, config, qps_data
)
elif config.rpc_type == control_pb2.STREAMING_FROM_SERVER:
no_ping_pong = True
client = benchmark_client.ServerStreamingSyncBenchmarkClient(
server, config, qps_data
)
elif config.client_type == control_pb2.ASYNC_CLIENT:
if config.rpc_type == control_pb2.UNARY:
client = benchmark_client.UnaryAsyncBenchmarkClient(
server, config, qps_data
)
else:
raise Exception("Async streaming client not supported")
else:
raise Exception(
"Unsupported client type {}".format(config.client_type)
)
# In multi-channel tests, we split the load across all channels
load_factor = float(config.client_channels)
if config.load_params.WhichOneof("load") == "closed_loop":
runner = client_runner.ClosedLoopClientRunner(
client, config.outstanding_rpcs_per_channel, no_ping_pong
)
else: # Open loop Poisson
alpha = config.load_params.poisson.offered_load / load_factor
def poisson():
while True:
yield random.expovariate(alpha)
runner = client_runner.OpenLoopClientRunner(client, poisson())
return runner
def CoreCount(self, request, context):
return control_pb2.CoreResponse(cores=multiprocessing.cpu_count())
def QuitWorker(self, request, context):
self._quit_event.set()
return control_pb2.Void()
def wait_for_quit(self):
self._quit_event.wait()
| 9,242
| 34.964981
| 78
|
py
|
grpc
|
grpc-master/src/python/grpcio_tests/tests/qps/qps_worker.py
|
# Copyright 2016 gRPC authors.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""The entry point for the qps worker."""
import argparse
import logging
import time
import grpc
from src.proto.grpc.testing import worker_service_pb2_grpc
from tests.qps import worker_server
from tests.unit import test_common
def run_worker_server(driver_port, server_port):
server = test_common.test_server()
servicer = worker_server.WorkerServer(server_port)
worker_service_pb2_grpc.add_WorkerServiceServicer_to_server(
servicer, server
)
server.add_insecure_port("[::]:{}".format(driver_port))
server.start()
servicer.wait_for_quit()
server.stop(0)
if __name__ == "__main__":
logging.basicConfig(level=logging.DEBUG)
parser = argparse.ArgumentParser(
description="gRPC Python performance testing worker"
)
parser.add_argument(
"--driver_port",
type=int,
dest="driver_port",
help="The port for the worker to expose for driver communication",
)
parser.add_argument(
"--server_port",
type=int,
default=None,
dest="server_port",
help=(
"The port for the server if not specified by server config message"
),
)
args = parser.parse_args()
run_worker_server(args.driver_port, args.server_port)
| 1,851
| 28.870968
| 79
|
py
|
grpc
|
grpc-master/src/python/grpcio_tests/tests/qps/benchmark_client.py
|
# Copyright 2016 gRPC authors.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Defines test client behaviors (UNARY/STREAMING) (SYNC/ASYNC)."""
import abc
from concurrent import futures
import queue
import threading
import time
import grpc
from src.proto.grpc.testing import benchmark_service_pb2_grpc
from src.proto.grpc.testing import messages_pb2
from tests.unit import resources
from tests.unit import test_common
_TIMEOUT = 60 * 60 * 24
class GenericStub(object):
def __init__(self, channel):
self.UnaryCall = channel.unary_unary(
"/grpc.testing.BenchmarkService/UnaryCall"
)
self.StreamingFromServer = channel.unary_stream(
"/grpc.testing.BenchmarkService/StreamingFromServer"
)
self.StreamingCall = channel.stream_stream(
"/grpc.testing.BenchmarkService/StreamingCall"
)
class BenchmarkClient:
"""Benchmark client interface that exposes a non-blocking send_request()."""
__metaclass__ = abc.ABCMeta
def __init__(self, server, config, hist):
# Create the stub
if config.HasField("security_params"):
creds = grpc.ssl_channel_credentials(
resources.test_root_certificates()
)
channel = test_common.test_secure_channel(
server, creds, config.security_params.server_host_override
)
else:
channel = grpc.insecure_channel(server)
# waits for the channel to be ready before we start sending messages
grpc.channel_ready_future(channel).result()
if config.payload_config.WhichOneof("payload") == "simple_params":
self._generic = False
self._stub = benchmark_service_pb2_grpc.BenchmarkServiceStub(
channel
)
payload = messages_pb2.Payload(
body=bytes(b"\0" * config.payload_config.simple_params.req_size)
)
self._request = messages_pb2.SimpleRequest(
payload=payload,
response_size=config.payload_config.simple_params.resp_size,
)
else:
self._generic = True
self._stub = GenericStub(channel)
self._request = bytes(
b"\0" * config.payload_config.bytebuf_params.req_size
)
self._hist = hist
self._response_callbacks = []
def add_response_callback(self, callback):
"""callback will be invoked as callback(client, query_time)"""
self._response_callbacks.append(callback)
@abc.abstractmethod
def send_request(self):
"""Non-blocking wrapper for a client's request operation."""
raise NotImplementedError()
def start(self):
pass
def stop(self):
pass
def _handle_response(self, client, query_time):
self._hist.add(query_time * 1e9) # Report times in nanoseconds
for callback in self._response_callbacks:
callback(client, query_time)
class UnarySyncBenchmarkClient(BenchmarkClient):
def __init__(self, server, config, hist):
super(UnarySyncBenchmarkClient, self).__init__(server, config, hist)
self._pool = futures.ThreadPoolExecutor(
max_workers=config.outstanding_rpcs_per_channel
)
def send_request(self):
# Send requests in separate threads to support multiple outstanding rpcs
# (See src/proto/grpc/testing/control.proto)
self._pool.submit(self._dispatch_request)
def stop(self):
self._pool.shutdown(wait=True)
self._stub = None
def _dispatch_request(self):
start_time = time.time()
self._stub.UnaryCall(self._request, _TIMEOUT)
end_time = time.time()
self._handle_response(self, end_time - start_time)
class UnaryAsyncBenchmarkClient(BenchmarkClient):
def send_request(self):
# Use the Future callback api to support multiple outstanding rpcs
start_time = time.time()
response_future = self._stub.UnaryCall.future(self._request, _TIMEOUT)
response_future.add_done_callback(
lambda resp: self._response_received(start_time, resp)
)
def _response_received(self, start_time, resp):
resp.result()
end_time = time.time()
self._handle_response(self, end_time - start_time)
def stop(self):
self._stub = None
class _SyncStream(object):
def __init__(self, stub, generic, request, handle_response):
self._stub = stub
self._generic = generic
self._request = request
self._handle_response = handle_response
self._is_streaming = False
self._request_queue = queue.Queue()
self._send_time_queue = queue.Queue()
def send_request(self):
self._send_time_queue.put(time.time())
self._request_queue.put(self._request)
def start(self):
self._is_streaming = True
response_stream = self._stub.StreamingCall(
self._request_generator(), _TIMEOUT
)
for _ in response_stream:
self._handle_response(
self, time.time() - self._send_time_queue.get_nowait()
)
def stop(self):
self._is_streaming = False
def _request_generator(self):
while self._is_streaming:
try:
request = self._request_queue.get(block=True, timeout=1.0)
yield request
except queue.Empty:
pass
class StreamingSyncBenchmarkClient(BenchmarkClient):
def __init__(self, server, config, hist):
super(StreamingSyncBenchmarkClient, self).__init__(server, config, hist)
self._pool = futures.ThreadPoolExecutor(
max_workers=config.outstanding_rpcs_per_channel
)
self._streams = [
_SyncStream(
self._stub, self._generic, self._request, self._handle_response
)
for _ in range(config.outstanding_rpcs_per_channel)
]
self._curr_stream = 0
def send_request(self):
# Use a round_robin scheduler to determine what stream to send on
self._streams[self._curr_stream].send_request()
self._curr_stream = (self._curr_stream + 1) % len(self._streams)
def start(self):
for stream in self._streams:
self._pool.submit(stream.start)
def stop(self):
for stream in self._streams:
stream.stop()
self._pool.shutdown(wait=True)
self._stub = None
class ServerStreamingSyncBenchmarkClient(BenchmarkClient):
def __init__(self, server, config, hist):
super(ServerStreamingSyncBenchmarkClient, self).__init__(
server, config, hist
)
if config.outstanding_rpcs_per_channel == 1:
self._pool = None
else:
self._pool = futures.ThreadPoolExecutor(
max_workers=config.outstanding_rpcs_per_channel
)
self._rpcs = []
self._sender = None
def send_request(self):
if self._pool is None:
self._sender = threading.Thread(
target=self._one_stream_streaming_rpc, daemon=True
)
self._sender.start()
else:
self._pool.submit(self._one_stream_streaming_rpc)
def _one_stream_streaming_rpc(self):
response_stream = self._stub.StreamingFromServer(
self._request, _TIMEOUT
)
self._rpcs.append(response_stream)
start_time = time.time()
for _ in response_stream:
self._handle_response(self, time.time() - start_time)
start_time = time.time()
def stop(self):
for call in self._rpcs:
call.cancel()
if self._sender is not None:
self._sender.join()
if self._pool is not None:
self._pool.shutdown(wait=False)
self._stub = None
| 8,456
| 32.035156
| 80
|
py
|
grpc
|
grpc-master/src/python/grpcio_tests/tests/qps/__init__.py
|
# Copyright 2016 gRPC authors.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
| 577
| 40.285714
| 74
|
py
|
grpc
|
grpc-master/src/python/grpcio_tests/tests/qps/histogram.py
|
# Copyright 2016 gRPC authors.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import math
import threading
from src.proto.grpc.testing import stats_pb2
class Histogram(object):
"""Histogram class used for recording performance testing data.
This class is thread safe.
"""
def __init__(self, resolution, max_possible):
self._lock = threading.Lock()
self._resolution = resolution
self._max_possible = max_possible
self._sum = 0
self._sum_of_squares = 0
self.multiplier = 1.0 + self._resolution
self._count = 0
self._min = self._max_possible
self._max = 0
self._buckets = [0] * (self._bucket_for(self._max_possible) + 1)
def reset(self):
with self._lock:
self._sum = 0
self._sum_of_squares = 0
self._count = 0
self._min = self._max_possible
self._max = 0
self._buckets = [0] * (self._bucket_for(self._max_possible) + 1)
def add(self, val):
with self._lock:
self._sum += val
self._sum_of_squares += val * val
self._count += 1
self._min = min(self._min, val)
self._max = max(self._max, val)
self._buckets[self._bucket_for(val)] += 1
def get_data(self):
with self._lock:
data = stats_pb2.HistogramData()
data.bucket.extend(self._buckets)
data.min_seen = self._min
data.max_seen = self._max
data.sum = self._sum
data.sum_of_squares = self._sum_of_squares
data.count = self._count
return data
def merge(self, another_data):
with self._lock:
for i in range(len(self._buckets)):
self._buckets[i] += another_data.bucket[i]
self._min = min(self._min, another_data.min_seen)
self._max = max(self._max, another_data.max_seen)
self._sum += another_data.sum
self._sum_of_squares += another_data.sum_of_squares
self._count += another_data.count
def _bucket_for(self, val):
val = min(val, self._max_possible)
return int(math.log(val, self.multiplier))
| 2,736
| 32.790123
| 76
|
py
|
grpc
|
grpc-master/src/python/grpcio_tests/tests/_sanity/_sanity_test.py
|
# Copyright 2016 gRPC authors.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import json
import pkgutil
import unittest
import tests
class SanityTest(unittest.TestCase):
maxDiff = 32768
TEST_PKG_MODULE_NAME = "tests"
TEST_PKG_PATH = "tests"
def testTestsJsonUpToDate(self):
"""Autodiscovers all test suites and checks that tests.json is up to date"""
loader = tests.Loader()
loader.loadTestsFromNames([self.TEST_PKG_MODULE_NAME])
test_suite_names = sorted(
{
test_case_class.id().rsplit(".", 1)[0]
for test_case_class in tests._loader.iterate_suite_cases(
loader.suite
)
}
)
tests_json_string = pkgutil.get_data(self.TEST_PKG_PATH, "tests.json")
tests_json = tests_json_string.decode()
self.assertSequenceEqual(tests_json, test_suite_names)
if __name__ == "__main__":
unittest.main(verbosity=2)
| 1,481
| 29.244898
| 84
|
py
|
grpc
|
grpc-master/src/python/grpcio_tests/tests/_sanity/__init__.py
|
# Copyright 2016 gRPC authors.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
| 577
| 40.285714
| 74
|
py
|
grpc
|
grpc-master/src/python/grpcio_tests/tests/admin/test_admin.py
|
# Copyright 2021 The gRPC Authors
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""A test to ensure that admin services are registered correctly."""
from concurrent.futures import ThreadPoolExecutor
import logging
import sys
import unittest
import grpc
import grpc_admin
from grpc_channelz.v1 import channelz_pb2
from grpc_channelz.v1 import channelz_pb2_grpc
from grpc_csds import csds_pb2
from grpc_csds import csds_pb2_grpc
@unittest.skipIf(
sys.version_info[0] < 3, "ProtoBuf descriptor has moved on from Python2"
)
class TestAdmin(unittest.TestCase):
def setUp(self):
self._server = grpc.server(ThreadPoolExecutor())
port = self._server.add_insecure_port("localhost:0")
grpc_admin.add_admin_servicers(self._server)
self._server.start()
self._channel = grpc.insecure_channel("localhost:%s" % port)
def tearDown(self):
self._channel.close()
self._server.stop(0)
def test_has_csds(self):
stub = csds_pb2_grpc.ClientStatusDiscoveryServiceStub(self._channel)
resp = stub.FetchClientStatus(csds_pb2.ClientStatusRequest())
# No exception raised and the response is valid
self.assertGreater(len(resp.config), 0)
def test_has_channelz(self):
stub = channelz_pb2_grpc.ChannelzStub(self._channel)
resp = stub.GetTopChannels(channelz_pb2.GetTopChannelsRequest())
# No exception raised and the response is valid
self.assertGreater(len(resp.channel), 0)
if __name__ == "__main__":
logging.basicConfig(level=logging.DEBUG)
unittest.main(verbosity=2)
| 2,098
| 33.409836
| 76
|
py
|
grpc
|
grpc-master/src/python/grpcio_tests/tests/http2/negative_http2_client.py
|
# Copyright 2016 gRPC authors.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""The Python client used to test negative http2 conditions."""
import argparse
import time
import grpc
from src.proto.grpc.testing import messages_pb2
from src.proto.grpc.testing import test_pb2_grpc
def _validate_payload_type_and_length(response, expected_type, expected_length):
if response.payload.type is not expected_type:
raise ValueError(
"expected payload type %s, got %s"
% (expected_type, type(response.payload.type))
)
elif len(response.payload.body) != expected_length:
raise ValueError(
"expected payload body size %d, got %d"
% (expected_length, len(response.payload.body))
)
def _expect_status_code(call, expected_code):
if call.code() != expected_code:
raise ValueError(
"expected code %s, got %s" % (expected_code, call.code())
)
def _expect_status_details(call, expected_details):
if call.details() != expected_details:
raise ValueError(
"expected message %s, got %s" % (expected_details, call.details())
)
def _validate_status_code_and_details(call, expected_code, expected_details):
_expect_status_code(call, expected_code)
_expect_status_details(call, expected_details)
# common requests
_REQUEST_SIZE = 314159
_RESPONSE_SIZE = 271828
_SIMPLE_REQUEST = messages_pb2.SimpleRequest(
response_type=messages_pb2.COMPRESSABLE,
response_size=_RESPONSE_SIZE,
payload=messages_pb2.Payload(body=b"\x00" * _REQUEST_SIZE),
)
def _goaway(stub):
first_response = stub.UnaryCall(_SIMPLE_REQUEST)
_validate_payload_type_and_length(
first_response, messages_pb2.COMPRESSABLE, _RESPONSE_SIZE
)
time.sleep(1)
second_response = stub.UnaryCall(_SIMPLE_REQUEST)
_validate_payload_type_and_length(
second_response, messages_pb2.COMPRESSABLE, _RESPONSE_SIZE
)
def _rst_after_header(stub):
resp_future = stub.UnaryCall.future(_SIMPLE_REQUEST)
_validate_status_code_and_details(
resp_future,
grpc.StatusCode.INTERNAL,
"Received RST_STREAM with error code 0",
)
def _rst_during_data(stub):
resp_future = stub.UnaryCall.future(_SIMPLE_REQUEST)
_validate_status_code_and_details(
resp_future,
grpc.StatusCode.INTERNAL,
"Received RST_STREAM with error code 0",
)
def _rst_after_data(stub):
resp_future = stub.UnaryCall.future(_SIMPLE_REQUEST)
_validate_status_code_and_details(
resp_future,
grpc.StatusCode.INTERNAL,
"Received RST_STREAM with error code 0",
)
def _ping(stub):
response = stub.UnaryCall(_SIMPLE_REQUEST)
_validate_payload_type_and_length(
response, messages_pb2.COMPRESSABLE, _RESPONSE_SIZE
)
def _max_streams(stub):
# send one req to ensure server sets MAX_STREAMS
response = stub.UnaryCall(_SIMPLE_REQUEST)
_validate_payload_type_and_length(
response, messages_pb2.COMPRESSABLE, _RESPONSE_SIZE
)
# give the streams a workout
futures = []
for _ in range(15):
futures.append(stub.UnaryCall.future(_SIMPLE_REQUEST))
for future in futures:
_validate_payload_type_and_length(
future.result(), messages_pb2.COMPRESSABLE, _RESPONSE_SIZE
)
def _run_test_case(test_case, stub):
if test_case == "goaway":
_goaway(stub)
elif test_case == "rst_after_header":
_rst_after_header(stub)
elif test_case == "rst_during_data":
_rst_during_data(stub)
elif test_case == "rst_after_data":
_rst_after_data(stub)
elif test_case == "ping":
_ping(stub)
elif test_case == "max_streams":
_max_streams(stub)
else:
raise ValueError("Invalid test case: %s" % test_case)
def _args():
parser = argparse.ArgumentParser()
parser.add_argument(
"--server_host",
help="the host to which to connect",
type=str,
default="127.0.0.1",
)
parser.add_argument(
"--server_port",
help="the port to which to connect",
type=int,
default="8080",
)
parser.add_argument(
"--test_case",
help="the test case to execute",
type=str,
default="goaway",
)
return parser.parse_args()
def _stub(server_host, server_port):
target = "{}:{}".format(server_host, server_port)
channel = grpc.insecure_channel(target)
grpc.channel_ready_future(channel).result()
return test_pb2_grpc.TestServiceStub(channel)
def main():
args = _args()
stub = _stub(args.server_host, args.server_port)
_run_test_case(args.test_case, stub)
if __name__ == "__main__":
main()
| 5,277
| 27.376344
| 80
|
py
|
grpc
|
grpc-master/src/python/grpcio_tests/tests/interop/resources.py
|
# Copyright 2015 gRPC authors.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Constants and functions for data used in interoperability testing."""
import argparse
import os
import pkgutil
_ROOT_CERTIFICATES_RESOURCE_PATH = "credentials/ca.pem"
_PRIVATE_KEY_RESOURCE_PATH = "credentials/server1.key"
_CERTIFICATE_CHAIN_RESOURCE_PATH = "credentials/server1.pem"
def test_root_certificates():
return pkgutil.get_data(__name__, _ROOT_CERTIFICATES_RESOURCE_PATH)
def private_key():
return pkgutil.get_data(__name__, _PRIVATE_KEY_RESOURCE_PATH)
def certificate_chain():
return pkgutil.get_data(__name__, _CERTIFICATE_CHAIN_RESOURCE_PATH)
def parse_bool(value):
if value == "true":
return True
if value == "false":
return False
raise argparse.ArgumentTypeError("Only true/false allowed")
| 1,334
| 30.046512
| 74
|
py
|
grpc
|
grpc-master/src/python/grpcio_tests/tests/interop/_intraop_test_case.py
|
# Copyright 2015 gRPC authors.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Common code for unit tests of the interoperability test code."""
from tests.interop import methods
class IntraopTestCase(object):
"""Unit test methods.
This class must be mixed in with unittest.TestCase and a class that defines
setUp and tearDown methods that manage a stub attribute.
"""
def testEmptyUnary(self):
methods.TestCase.EMPTY_UNARY.test_interoperability(self.stub, None)
def testLargeUnary(self):
methods.TestCase.LARGE_UNARY.test_interoperability(self.stub, None)
def testServerStreaming(self):
methods.TestCase.SERVER_STREAMING.test_interoperability(self.stub, None)
def testClientStreaming(self):
methods.TestCase.CLIENT_STREAMING.test_interoperability(self.stub, None)
def testPingPong(self):
methods.TestCase.PING_PONG.test_interoperability(self.stub, None)
def testCancelAfterBegin(self):
methods.TestCase.CANCEL_AFTER_BEGIN.test_interoperability(
self.stub, None
)
def testCancelAfterFirstResponse(self):
methods.TestCase.CANCEL_AFTER_FIRST_RESPONSE.test_interoperability(
self.stub, None
)
def testTimeoutOnSleepingServer(self):
methods.TestCase.TIMEOUT_ON_SLEEPING_SERVER.test_interoperability(
self.stub, None
)
| 1,898
| 33.527273
| 80
|
py
|
grpc
|
grpc-master/src/python/grpcio_tests/tests/interop/_secure_intraop_test.py
|
# Copyright 2015 gRPC authors.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Secure client-server interoperability as a unit test."""
import sys
import unittest
import grpc
from src.proto.grpc.testing import test_pb2_grpc
from tests.interop import _intraop_test_case
from tests.interop import resources
from tests.interop import service
from tests.unit import test_common
_SERVER_HOST_OVERRIDE = "foo.test.google.fr"
@unittest.skipIf(
sys.version_info[0] < 3, "ProtoBuf descriptor has moved on from Python2"
)
class SecureIntraopTest(_intraop_test_case.IntraopTestCase, unittest.TestCase):
def setUp(self):
self.server = test_common.test_server()
test_pb2_grpc.add_TestServiceServicer_to_server(
service.TestService(), self.server
)
port = self.server.add_secure_port(
"[::]:0",
grpc.ssl_server_credentials(
[(resources.private_key(), resources.certificate_chain())]
),
)
self.server.start()
self.stub = test_pb2_grpc.TestServiceStub(
grpc.secure_channel(
"localhost:{}".format(port),
grpc.ssl_channel_credentials(
resources.test_root_certificates()
),
(
(
"grpc.ssl_target_name_override",
_SERVER_HOST_OVERRIDE,
),
),
)
)
def tearDown(self):
self.server.stop(None)
if __name__ == "__main__":
unittest.main(verbosity=2)
| 2,096
| 30.298507
| 79
|
py
|
grpc
|
grpc-master/src/python/grpcio_tests/tests/interop/server.py
|
# Copyright 2015 gRPC authors.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""The Python implementation of the GRPC interoperability test server."""
import argparse
from concurrent import futures
import logging
import grpc
from src.proto.grpc.testing import test_pb2_grpc
from tests.interop import resources
from tests.interop import service
from tests.unit import test_common
logging.basicConfig()
_LOGGER = logging.getLogger(__name__)
def parse_interop_server_arguments():
parser = argparse.ArgumentParser()
parser.add_argument(
"--port", type=int, required=True, help="the port on which to serve"
)
parser.add_argument(
"--use_tls",
default=False,
type=resources.parse_bool,
help="require a secure connection",
)
parser.add_argument(
"--use_alts",
default=False,
type=resources.parse_bool,
help="require an ALTS connection",
)
return parser.parse_args()
def get_server_credentials(use_tls):
if use_tls:
private_key = resources.private_key()
certificate_chain = resources.certificate_chain()
return grpc.ssl_server_credentials(((private_key, certificate_chain),))
else:
return grpc.alts_server_credentials()
def serve():
args = parse_interop_server_arguments()
server = test_common.test_server()
test_pb2_grpc.add_TestServiceServicer_to_server(
service.TestService(), server
)
if args.use_tls or args.use_alts:
credentials = get_server_credentials(args.use_tls)
server.add_secure_port("[::]:{}".format(args.port), credentials)
else:
server.add_insecure_port("[::]:{}".format(args.port))
server.start()
_LOGGER.info("Server serving.")
server.wait_for_termination()
_LOGGER.info("Server stopped; exiting.")
if __name__ == "__main__":
serve()
| 2,378
| 28.37037
| 79
|
py
|
grpc
|
grpc-master/src/python/grpcio_tests/tests/interop/_insecure_intraop_test.py
|
# Copyright 2015 gRPC authors.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Insecure client-server interoperability as a unit test."""
import sys
import unittest
import grpc
from src.proto.grpc.testing import test_pb2_grpc
from tests.interop import _intraop_test_case
from tests.interop import server
from tests.interop import service
from tests.unit import test_common
@unittest.skipIf(
sys.version_info[0] < 3, "ProtoBuf descriptor has moved on from Python2"
)
class InsecureIntraopTest(
_intraop_test_case.IntraopTestCase, unittest.TestCase
):
def setUp(self):
self.server = test_common.test_server()
test_pb2_grpc.add_TestServiceServicer_to_server(
service.TestService(), self.server
)
port = self.server.add_insecure_port("[::]:0")
self.server.start()
self.stub = test_pb2_grpc.TestServiceStub(
grpc.insecure_channel("localhost:{}".format(port))
)
def tearDown(self):
self.server.stop(None)
if __name__ == "__main__":
unittest.main(verbosity=2)
| 1,570
| 29.803922
| 76
|
py
|
grpc
|
grpc-master/src/python/grpcio_tests/tests/interop/service.py
|
# Copyright 2019 The gRPC Authors
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""The Python implementation of the TestServicer."""
import time
import grpc
from src.proto.grpc.testing import empty_pb2
from src.proto.grpc.testing import messages_pb2
from src.proto.grpc.testing import test_pb2_grpc
_INITIAL_METADATA_KEY = "x-grpc-test-echo-initial"
_TRAILING_METADATA_KEY = "x-grpc-test-echo-trailing-bin"
_US_IN_A_SECOND = 1000 * 1000
def _maybe_echo_metadata(servicer_context):
"""Copies metadata from request to response if it is present."""
invocation_metadata = dict(servicer_context.invocation_metadata())
if _INITIAL_METADATA_KEY in invocation_metadata:
initial_metadatum = (
_INITIAL_METADATA_KEY,
invocation_metadata[_INITIAL_METADATA_KEY],
)
servicer_context.send_initial_metadata((initial_metadatum,))
if _TRAILING_METADATA_KEY in invocation_metadata:
trailing_metadatum = (
_TRAILING_METADATA_KEY,
invocation_metadata[_TRAILING_METADATA_KEY],
)
servicer_context.set_trailing_metadata((trailing_metadatum,))
def _maybe_echo_status_and_message(request, servicer_context):
"""Sets the response context code and details if the request asks for them"""
if request.HasField("response_status"):
servicer_context.set_code(request.response_status.code)
servicer_context.set_details(request.response_status.message)
class TestService(test_pb2_grpc.TestServiceServicer):
def EmptyCall(self, request, context):
_maybe_echo_metadata(context)
return empty_pb2.Empty()
def UnaryCall(self, request, context):
_maybe_echo_metadata(context)
_maybe_echo_status_and_message(request, context)
return messages_pb2.SimpleResponse(
payload=messages_pb2.Payload(
type=messages_pb2.COMPRESSABLE,
body=b"\x00" * request.response_size,
)
)
def StreamingOutputCall(self, request, context):
_maybe_echo_status_and_message(request, context)
for response_parameters in request.response_parameters:
if response_parameters.interval_us != 0:
time.sleep(response_parameters.interval_us / _US_IN_A_SECOND)
yield messages_pb2.StreamingOutputCallResponse(
payload=messages_pb2.Payload(
type=request.response_type,
body=b"\x00" * response_parameters.size,
)
)
def StreamingInputCall(self, request_iterator, context):
aggregate_size = 0
for request in request_iterator:
if request.payload is not None and request.payload.body:
aggregate_size += len(request.payload.body)
return messages_pb2.StreamingInputCallResponse(
aggregated_payload_size=aggregate_size
)
def FullDuplexCall(self, request_iterator, context):
_maybe_echo_metadata(context)
for request in request_iterator:
_maybe_echo_status_and_message(request, context)
for response_parameters in request.response_parameters:
if response_parameters.interval_us != 0:
time.sleep(
response_parameters.interval_us / _US_IN_A_SECOND
)
yield messages_pb2.StreamingOutputCallResponse(
payload=messages_pb2.Payload(
type=request.payload.type,
body=b"\x00" * response_parameters.size,
)
)
# NOTE(nathaniel): Apparently this is the same as the full-duplex call?
# NOTE(atash): It isn't even called in the interop spec (Oct 22 2015)...
def HalfDuplexCall(self, request_iterator, context):
return self.FullDuplexCall(request_iterator, context)
| 4,407
| 39.440367
| 81
|
py
|
grpc
|
grpc-master/src/python/grpcio_tests/tests/interop/client.py
|
# Copyright 2015 gRPC authors.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""The Python implementation of the GRPC interoperability test client."""
import argparse
import os
from google import auth as google_auth
from google.auth import jwt as google_auth_jwt
import grpc
from src.proto.grpc.testing import test_pb2_grpc
from tests.interop import methods
from tests.interop import resources
def parse_interop_client_args():
parser = argparse.ArgumentParser()
parser.add_argument(
"--server_host",
default="localhost",
type=str,
help="the host to which to connect",
)
parser.add_argument(
"--server_port",
type=int,
required=True,
help="the port to which to connect",
)
parser.add_argument(
"--test_case",
default="large_unary",
type=str,
help="the test case to execute",
)
parser.add_argument(
"--use_tls",
default=False,
type=resources.parse_bool,
help="require a secure connection",
)
parser.add_argument(
"--use_alts",
default=False,
type=resources.parse_bool,
help="require an ALTS secure connection",
)
parser.add_argument(
"--use_test_ca",
default=False,
type=resources.parse_bool,
help="replace platform root CAs with ca.pem",
)
parser.add_argument(
"--custom_credentials_type",
choices=["compute_engine_channel_creds"],
default=None,
help="use google default credentials",
)
parser.add_argument(
"--server_host_override",
type=str,
help="the server host to which to claim to connect",
)
parser.add_argument(
"--oauth_scope", type=str, help="scope for OAuth tokens"
)
parser.add_argument(
"--default_service_account",
type=str,
help="email address of the default service account",
)
parser.add_argument(
"--grpc_test_use_grpclb_with_child_policy",
type=str,
help=(
"If non-empty, set a static service config on channels created by "
+ "grpc::CreateTestChannel, that configures the grpclb LB policy "
+ "with a child policy being the value of this flag (e.g."
" round_robin " + "or pick_first)."
),
)
return parser.parse_args()
def _create_call_credentials(args):
if args.test_case == "oauth2_auth_token":
google_credentials, unused_project_id = google_auth.default(
scopes=[args.oauth_scope]
)
google_credentials.refresh(google_auth.transport.requests.Request())
return grpc.access_token_call_credentials(google_credentials.token)
elif args.test_case == "compute_engine_creds":
google_credentials, unused_project_id = google_auth.default(
scopes=[args.oauth_scope]
)
return grpc.metadata_call_credentials(
google_auth.transport.grpc.AuthMetadataPlugin(
credentials=google_credentials,
request=google_auth.transport.requests.Request(),
)
)
elif args.test_case == "jwt_token_creds":
google_credentials = (
google_auth_jwt.OnDemandCredentials.from_service_account_file(
os.environ[google_auth.environment_vars.CREDENTIALS]
)
)
return grpc.metadata_call_credentials(
google_auth.transport.grpc.AuthMetadataPlugin(
credentials=google_credentials, request=None
)
)
else:
return None
def get_secure_channel_parameters(args):
call_credentials = _create_call_credentials(args)
channel_opts = ()
if args.grpc_test_use_grpclb_with_child_policy:
channel_opts += (
(
"grpc.service_config",
'{"loadBalancingConfig": [{"grpclb": {"childPolicy": [{"%s":'
" {}}]}}]}" % args.grpc_test_use_grpclb_with_child_policy,
),
)
if args.custom_credentials_type is not None:
if args.custom_credentials_type == "compute_engine_channel_creds":
assert call_credentials is None
google_credentials, unused_project_id = google_auth.default(
scopes=[args.oauth_scope]
)
call_creds = grpc.metadata_call_credentials(
google_auth.transport.grpc.AuthMetadataPlugin(
credentials=google_credentials,
request=google_auth.transport.requests.Request(),
)
)
channel_credentials = grpc.compute_engine_channel_credentials(
call_creds
)
else:
raise ValueError(
"Unknown credentials type '{}'".format(
args.custom_credentials_type
)
)
elif args.use_tls:
if args.use_test_ca:
root_certificates = resources.test_root_certificates()
else:
root_certificates = None # will load default roots.
channel_credentials = grpc.ssl_channel_credentials(root_certificates)
if call_credentials is not None:
channel_credentials = grpc.composite_channel_credentials(
channel_credentials, call_credentials
)
if args.server_host_override:
channel_opts += (
(
"grpc.ssl_target_name_override",
args.server_host_override,
),
)
elif args.use_alts:
channel_credentials = grpc.alts_channel_credentials()
return channel_credentials, channel_opts
def _create_channel(args):
target = "{}:{}".format(args.server_host, args.server_port)
if (
args.use_tls
or args.use_alts
or args.custom_credentials_type is not None
):
channel_credentials, options = get_secure_channel_parameters(args)
return grpc.secure_channel(target, channel_credentials, options)
else:
return grpc.insecure_channel(target)
def create_stub(channel, args):
if args.test_case == "unimplemented_service":
return test_pb2_grpc.UnimplementedServiceStub(channel)
else:
return test_pb2_grpc.TestServiceStub(channel)
def _test_case_from_arg(test_case_arg):
for test_case in methods.TestCase:
if test_case_arg == test_case.value:
return test_case
else:
raise ValueError('No test case "%s"!' % test_case_arg)
def test_interoperability():
args = parse_interop_client_args()
channel = _create_channel(args)
stub = create_stub(channel, args)
test_case = _test_case_from_arg(args.test_case)
test_case.test_interoperability(stub, args)
if __name__ == "__main__":
test_interoperability()
| 7,386
| 31.54185
| 79
|
py
|
grpc
|
grpc-master/src/python/grpcio_tests/tests/interop/methods.py
|
# Copyright 2015 gRPC authors.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Implementations of interoperability test methods."""
# NOTE(lidiz) This module only exists in Bazel BUILD file, for more details
# please refer to comments in the "bazel_namespace_package_hack" module.
try:
from tests import bazel_namespace_package_hack
bazel_namespace_package_hack.sys_path_to_site_dir_hack()
except ImportError:
pass
import enum
import json
import os
import threading
import time
from google import auth as google_auth
from google.auth import environment_vars as google_auth_environment_vars
from google.auth.transport import grpc as google_auth_transport_grpc
from google.auth.transport import requests as google_auth_transport_requests
import grpc
from src.proto.grpc.testing import empty_pb2
from src.proto.grpc.testing import messages_pb2
_INITIAL_METADATA_KEY = "x-grpc-test-echo-initial"
_TRAILING_METADATA_KEY = "x-grpc-test-echo-trailing-bin"
def _expect_status_code(call, expected_code):
if call.code() != expected_code:
raise ValueError(
"expected code %s, got %s" % (expected_code, call.code())
)
def _expect_status_details(call, expected_details):
if call.details() != expected_details:
raise ValueError(
"expected message %s, got %s" % (expected_details, call.details())
)
def _validate_status_code_and_details(call, expected_code, expected_details):
_expect_status_code(call, expected_code)
_expect_status_details(call, expected_details)
def _validate_payload_type_and_length(response, expected_type, expected_length):
if response.payload.type is not expected_type:
raise ValueError(
"expected payload type %s, got %s"
% (expected_type, type(response.payload.type))
)
elif len(response.payload.body) != expected_length:
raise ValueError(
"expected payload body size %d, got %d"
% (expected_length, len(response.payload.body))
)
def _large_unary_common_behavior(
stub, fill_username, fill_oauth_scope, call_credentials
):
size = 314159
request = messages_pb2.SimpleRequest(
response_type=messages_pb2.COMPRESSABLE,
response_size=size,
payload=messages_pb2.Payload(body=b"\x00" * 271828),
fill_username=fill_username,
fill_oauth_scope=fill_oauth_scope,
)
response_future = stub.UnaryCall.future(
request, credentials=call_credentials
)
response = response_future.result()
_validate_payload_type_and_length(response, messages_pb2.COMPRESSABLE, size)
return response
def _empty_unary(stub):
response = stub.EmptyCall(empty_pb2.Empty())
if not isinstance(response, empty_pb2.Empty):
raise TypeError(
'response is of type "%s", not empty_pb2.Empty!' % type(response)
)
def _large_unary(stub):
_large_unary_common_behavior(stub, False, False, None)
def _client_streaming(stub):
payload_body_sizes = (
27182,
8,
1828,
45904,
)
payloads = (
messages_pb2.Payload(body=b"\x00" * size) for size in payload_body_sizes
)
requests = (
messages_pb2.StreamingInputCallRequest(payload=payload)
for payload in payloads
)
response = stub.StreamingInputCall(requests)
if response.aggregated_payload_size != 74922:
raise ValueError(
"incorrect size %d!" % response.aggregated_payload_size
)
def _server_streaming(stub):
sizes = (
31415,
9,
2653,
58979,
)
request = messages_pb2.StreamingOutputCallRequest(
response_type=messages_pb2.COMPRESSABLE,
response_parameters=(
messages_pb2.ResponseParameters(size=sizes[0]),
messages_pb2.ResponseParameters(size=sizes[1]),
messages_pb2.ResponseParameters(size=sizes[2]),
messages_pb2.ResponseParameters(size=sizes[3]),
),
)
response_iterator = stub.StreamingOutputCall(request)
for index, response in enumerate(response_iterator):
_validate_payload_type_and_length(
response, messages_pb2.COMPRESSABLE, sizes[index]
)
class _Pipe(object):
def __init__(self):
self._condition = threading.Condition()
self._values = []
self._open = True
def __iter__(self):
return self
def __next__(self):
return self.next()
def next(self):
with self._condition:
while not self._values and self._open:
self._condition.wait()
if self._values:
return self._values.pop(0)
else:
raise StopIteration()
def add(self, value):
with self._condition:
self._values.append(value)
self._condition.notify()
def close(self):
with self._condition:
self._open = False
self._condition.notify()
def __enter__(self):
return self
def __exit__(self, type, value, traceback):
self.close()
def _ping_pong(stub):
request_response_sizes = (
31415,
9,
2653,
58979,
)
request_payload_sizes = (
27182,
8,
1828,
45904,
)
with _Pipe() as pipe:
response_iterator = stub.FullDuplexCall(pipe)
for response_size, payload_size in zip(
request_response_sizes, request_payload_sizes
):
request = messages_pb2.StreamingOutputCallRequest(
response_type=messages_pb2.COMPRESSABLE,
response_parameters=(
messages_pb2.ResponseParameters(size=response_size),
),
payload=messages_pb2.Payload(body=b"\x00" * payload_size),
)
pipe.add(request)
response = next(response_iterator)
_validate_payload_type_and_length(
response, messages_pb2.COMPRESSABLE, response_size
)
def _cancel_after_begin(stub):
with _Pipe() as pipe:
response_future = stub.StreamingInputCall.future(pipe)
response_future.cancel()
if not response_future.cancelled():
raise ValueError("expected cancelled method to return True")
if response_future.code() is not grpc.StatusCode.CANCELLED:
raise ValueError("expected status code CANCELLED")
def _cancel_after_first_response(stub):
request_response_sizes = (
31415,
9,
2653,
58979,
)
request_payload_sizes = (
27182,
8,
1828,
45904,
)
with _Pipe() as pipe:
response_iterator = stub.FullDuplexCall(pipe)
response_size = request_response_sizes[0]
payload_size = request_payload_sizes[0]
request = messages_pb2.StreamingOutputCallRequest(
response_type=messages_pb2.COMPRESSABLE,
response_parameters=(
messages_pb2.ResponseParameters(size=response_size),
),
payload=messages_pb2.Payload(body=b"\x00" * payload_size),
)
pipe.add(request)
response = next(response_iterator)
# We test the contents of `response` in the Ping Pong test - don't check
# them here.
response_iterator.cancel()
try:
next(response_iterator)
except grpc.RpcError as rpc_error:
if rpc_error.code() is not grpc.StatusCode.CANCELLED:
raise
else:
raise ValueError("expected call to be cancelled")
def _timeout_on_sleeping_server(stub):
request_payload_size = 27182
with _Pipe() as pipe:
response_iterator = stub.FullDuplexCall(pipe, timeout=0.001)
request = messages_pb2.StreamingOutputCallRequest(
response_type=messages_pb2.COMPRESSABLE,
payload=messages_pb2.Payload(body=b"\x00" * request_payload_size),
)
pipe.add(request)
try:
next(response_iterator)
except grpc.RpcError as rpc_error:
if rpc_error.code() is not grpc.StatusCode.DEADLINE_EXCEEDED:
raise
else:
raise ValueError("expected call to exceed deadline")
def _empty_stream(stub):
with _Pipe() as pipe:
response_iterator = stub.FullDuplexCall(pipe)
pipe.close()
try:
next(response_iterator)
raise ValueError("expected exactly 0 responses")
except StopIteration:
pass
def _status_code_and_message(stub):
details = "test status message"
code = 2
status = grpc.StatusCode.UNKNOWN # code = 2
# Test with a UnaryCall
request = messages_pb2.SimpleRequest(
response_type=messages_pb2.COMPRESSABLE,
response_size=1,
payload=messages_pb2.Payload(body=b"\x00"),
response_status=messages_pb2.EchoStatus(code=code, message=details),
)
response_future = stub.UnaryCall.future(request)
_validate_status_code_and_details(response_future, status, details)
# Test with a FullDuplexCall
with _Pipe() as pipe:
response_iterator = stub.FullDuplexCall(pipe)
request = messages_pb2.StreamingOutputCallRequest(
response_type=messages_pb2.COMPRESSABLE,
response_parameters=(messages_pb2.ResponseParameters(size=1),),
payload=messages_pb2.Payload(body=b"\x00"),
response_status=messages_pb2.EchoStatus(code=code, message=details),
)
pipe.add(request) # sends the initial request.
try:
next(response_iterator)
except grpc.RpcError as rpc_error:
assert rpc_error.code() == status
# Dropping out of with block closes the pipe
_validate_status_code_and_details(response_iterator, status, details)
def _unimplemented_method(test_service_stub):
response_future = test_service_stub.UnimplementedCall.future(
empty_pb2.Empty()
)
_expect_status_code(response_future, grpc.StatusCode.UNIMPLEMENTED)
def _unimplemented_service(unimplemented_service_stub):
response_future = unimplemented_service_stub.UnimplementedCall.future(
empty_pb2.Empty()
)
_expect_status_code(response_future, grpc.StatusCode.UNIMPLEMENTED)
def _custom_metadata(stub):
initial_metadata_value = "test_initial_metadata_value"
trailing_metadata_value = b"\x0a\x0b\x0a\x0b\x0a\x0b"
metadata = (
(_INITIAL_METADATA_KEY, initial_metadata_value),
(_TRAILING_METADATA_KEY, trailing_metadata_value),
)
def _validate_metadata(response):
initial_metadata = dict(response.initial_metadata())
if initial_metadata[_INITIAL_METADATA_KEY] != initial_metadata_value:
raise ValueError(
"expected initial metadata %s, got %s"
% (
initial_metadata_value,
initial_metadata[_INITIAL_METADATA_KEY],
)
)
trailing_metadata = dict(response.trailing_metadata())
if trailing_metadata[_TRAILING_METADATA_KEY] != trailing_metadata_value:
raise ValueError(
"expected trailing metadata %s, got %s"
% (
trailing_metadata_value,
trailing_metadata[_TRAILING_METADATA_KEY],
)
)
# Testing with UnaryCall
request = messages_pb2.SimpleRequest(
response_type=messages_pb2.COMPRESSABLE,
response_size=1,
payload=messages_pb2.Payload(body=b"\x00"),
)
response_future = stub.UnaryCall.future(request, metadata=metadata)
_validate_metadata(response_future)
# Testing with FullDuplexCall
with _Pipe() as pipe:
response_iterator = stub.FullDuplexCall(pipe, metadata=metadata)
request = messages_pb2.StreamingOutputCallRequest(
response_type=messages_pb2.COMPRESSABLE,
response_parameters=(messages_pb2.ResponseParameters(size=1),),
)
pipe.add(request) # Sends the request
next(response_iterator) # Causes server to send trailing metadata
# Dropping out of the with block closes the pipe
_validate_metadata(response_iterator)
def _compute_engine_creds(stub, args):
response = _large_unary_common_behavior(stub, True, True, None)
if args.default_service_account != response.username:
raise ValueError(
"expected username %s, got %s"
% (args.default_service_account, response.username)
)
def _oauth2_auth_token(stub, args):
json_key_filename = os.environ[google_auth_environment_vars.CREDENTIALS]
wanted_email = json.load(open(json_key_filename, "r"))["client_email"]
response = _large_unary_common_behavior(stub, True, True, None)
if wanted_email != response.username:
raise ValueError(
"expected username %s, got %s" % (wanted_email, response.username)
)
if args.oauth_scope.find(response.oauth_scope) == -1:
raise ValueError(
'expected to find oauth scope "{}" in received "{}"'.format(
response.oauth_scope, args.oauth_scope
)
)
def _jwt_token_creds(stub, args):
json_key_filename = os.environ[google_auth_environment_vars.CREDENTIALS]
wanted_email = json.load(open(json_key_filename, "r"))["client_email"]
response = _large_unary_common_behavior(stub, True, False, None)
if wanted_email != response.username:
raise ValueError(
"expected username %s, got %s" % (wanted_email, response.username)
)
def _per_rpc_creds(stub, args):
json_key_filename = os.environ[google_auth_environment_vars.CREDENTIALS]
wanted_email = json.load(open(json_key_filename, "r"))["client_email"]
google_credentials, unused_project_id = google_auth.default(
scopes=[args.oauth_scope]
)
call_credentials = grpc.metadata_call_credentials(
google_auth_transport_grpc.AuthMetadataPlugin(
credentials=google_credentials,
request=google_auth_transport_requests.Request(),
)
)
response = _large_unary_common_behavior(stub, True, False, call_credentials)
if wanted_email != response.username:
raise ValueError(
"expected username %s, got %s" % (wanted_email, response.username)
)
def _special_status_message(stub, args):
details = (
b"\t\ntest with whitespace\r\nand Unicode BMP \xe2\x98\xba and non-BMP"
b" \xf0\x9f\x98\x88\t\n".decode("utf-8")
)
code = 2
status = grpc.StatusCode.UNKNOWN # code = 2
# Test with a UnaryCall
request = messages_pb2.SimpleRequest(
response_type=messages_pb2.COMPRESSABLE,
response_size=1,
payload=messages_pb2.Payload(body=b"\x00"),
response_status=messages_pb2.EchoStatus(code=code, message=details),
)
response_future = stub.UnaryCall.future(request)
_validate_status_code_and_details(response_future, status, details)
@enum.unique
class TestCase(enum.Enum):
EMPTY_UNARY = "empty_unary"
LARGE_UNARY = "large_unary"
SERVER_STREAMING = "server_streaming"
CLIENT_STREAMING = "client_streaming"
PING_PONG = "ping_pong"
CANCEL_AFTER_BEGIN = "cancel_after_begin"
CANCEL_AFTER_FIRST_RESPONSE = "cancel_after_first_response"
EMPTY_STREAM = "empty_stream"
STATUS_CODE_AND_MESSAGE = "status_code_and_message"
UNIMPLEMENTED_METHOD = "unimplemented_method"
UNIMPLEMENTED_SERVICE = "unimplemented_service"
CUSTOM_METADATA = "custom_metadata"
COMPUTE_ENGINE_CREDS = "compute_engine_creds"
OAUTH2_AUTH_TOKEN = "oauth2_auth_token"
JWT_TOKEN_CREDS = "jwt_token_creds"
PER_RPC_CREDS = "per_rpc_creds"
TIMEOUT_ON_SLEEPING_SERVER = "timeout_on_sleeping_server"
SPECIAL_STATUS_MESSAGE = "special_status_message"
def test_interoperability(self, stub, args):
if self is TestCase.EMPTY_UNARY:
_empty_unary(stub)
elif self is TestCase.LARGE_UNARY:
_large_unary(stub)
elif self is TestCase.SERVER_STREAMING:
_server_streaming(stub)
elif self is TestCase.CLIENT_STREAMING:
_client_streaming(stub)
elif self is TestCase.PING_PONG:
_ping_pong(stub)
elif self is TestCase.CANCEL_AFTER_BEGIN:
_cancel_after_begin(stub)
elif self is TestCase.CANCEL_AFTER_FIRST_RESPONSE:
_cancel_after_first_response(stub)
elif self is TestCase.TIMEOUT_ON_SLEEPING_SERVER:
_timeout_on_sleeping_server(stub)
elif self is TestCase.EMPTY_STREAM:
_empty_stream(stub)
elif self is TestCase.STATUS_CODE_AND_MESSAGE:
_status_code_and_message(stub)
elif self is TestCase.UNIMPLEMENTED_METHOD:
_unimplemented_method(stub)
elif self is TestCase.UNIMPLEMENTED_SERVICE:
_unimplemented_service(stub)
elif self is TestCase.CUSTOM_METADATA:
_custom_metadata(stub)
elif self is TestCase.COMPUTE_ENGINE_CREDS:
_compute_engine_creds(stub, args)
elif self is TestCase.OAUTH2_AUTH_TOKEN:
_oauth2_auth_token(stub, args)
elif self is TestCase.JWT_TOKEN_CREDS:
_jwt_token_creds(stub, args)
elif self is TestCase.PER_RPC_CREDS:
_per_rpc_creds(stub, args)
elif self is TestCase.SPECIAL_STATUS_MESSAGE:
_special_status_message(stub, args)
else:
raise NotImplementedError(
'Test case "%s" not implemented!' % self.name
)
| 18,222
| 33.061682
| 80
|
py
|
grpc
|
grpc-master/src/python/grpcio_tests/tests/interop/__init__.py
|
# Copyright 2015 gRPC authors.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
| 577
| 40.285714
| 74
|
py
|
grpc
|
grpc-master/src/python/grpcio_tests/tests/fork/_fork_interop_test.py
|
# Copyright 2019 gRPC authors.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Client-side fork interop tests as a unit test."""
import os
import subprocess
import sys
import tempfile
import threading
import time
import unittest
from grpc._cython import cygrpc
from tests.fork import methods
def _dump_streams(name, streams):
assert len(streams) == 2
for stream_name, stream in zip(("STDOUT", "STDERR"), streams):
stream.seek(0)
sys.stderr.write(
"{} {}:\n{}\n".format(
name, stream_name, stream.read().decode("ascii")
)
)
stream.close()
sys.stderr.flush()
# New instance of multiprocessing.Process using fork without exec can and will
# freeze if the Python process has any other threads running. This includes the
# additional thread spawned by our _runner.py class. So in order to test our
# compatibility with multiprocessing, we first fork+exec a new process to ensure
# we don't have any conflicting background threads.
_CLIENT_FORK_SCRIPT_TEMPLATE = """if True:
import os
from grpc._cython import cygrpc
from tests.fork import methods
from src.python.grpcio_tests.tests.fork import native_debug
native_debug.install_failure_signal_handler()
cygrpc._GRPC_ENABLE_FORK_SUPPORT = True
os.environ['GRPC_POLL_STRATEGY'] = 'epoll1'
os.environ['GRPC_ENABLE_FORK_SUPPORT'] = 'true'
methods.TestCase.%s.run_test({
'server_host': 'localhost',
'server_port': %d,
'use_tls': False
})
"""
_SUBPROCESS_TIMEOUT_S = 80
_GDB_TIMEOUT_S = 60
@unittest.skipUnless(
sys.platform.startswith("linux"),
"not supported on windows, and fork+exec networking blocked on mac",
)
@unittest.skipUnless(
os.getenv("GRPC_ENABLE_FORK_SUPPORT") is not None,
"Core must be built with fork support to run this test.",
)
class ForkInteropTest(unittest.TestCase):
def setUp(self):
self._port = None
start_server_script = """if True:
import sys
import time
import grpc
from src.proto.grpc.testing import test_pb2_grpc
from tests.interop import service as interop_service
from tests.unit import test_common
server = test_common.test_server()
test_pb2_grpc.add_TestServiceServicer_to_server(
interop_service.TestService(), server)
port = server.add_insecure_port('[::]:0')
server.start()
print(port)
sys.stdout.flush()
while True:
time.sleep(1)
"""
self._streams = tuple(tempfile.TemporaryFile() for _ in range(2))
self._server_process = subprocess.Popen(
[sys.executable, "-c", start_server_script],
stdout=self._streams[0],
stderr=self._streams[1],
)
timer = threading.Timer(
_SUBPROCESS_TIMEOUT_S, self._server_process.kill
)
interval_secs = 2.0
cumulative_secs = 0.0
try:
timer.start()
while cumulative_secs < _SUBPROCESS_TIMEOUT_S:
self._streams[0].seek(0)
s = self._streams[0].readline()
if s:
self._port = int(s)
break
time.sleep(interval_secs)
cumulative_secs += interval_secs
if self._port is None:
# Timeout
self._streams[0].seek(0)
sys.stderr.write(
"Server STDOUT:\n{}\n".format(self._streams[0].read())
)
self._streams[1].seek(0)
sys.stderr.write(
"Server STDERR:\n{}\n".format(self._streams[1].read())
)
sys.stderr.flush()
raise Exception("Failed to get port from server.")
except ValueError:
raise Exception("Failed to get port from server")
finally:
timer.cancel()
def testConnectivityWatch(self):
self._verifyTestCase(methods.TestCase.CONNECTIVITY_WATCH)
def testCloseChannelBeforeFork(self):
self._verifyTestCase(methods.TestCase.CLOSE_CHANNEL_BEFORE_FORK)
def testAsyncUnarySameChannel(self):
self._verifyTestCase(methods.TestCase.ASYNC_UNARY_SAME_CHANNEL)
def testAsyncUnaryNewChannel(self):
self._verifyTestCase(methods.TestCase.ASYNC_UNARY_NEW_CHANNEL)
def testBlockingUnarySameChannel(self):
self._verifyTestCase(methods.TestCase.BLOCKING_UNARY_SAME_CHANNEL)
def testBlockingUnaryNewChannel(self):
self._verifyTestCase(methods.TestCase.BLOCKING_UNARY_NEW_CHANNEL)
def testInProgressBidiContinueCall(self):
self._verifyTestCase(methods.TestCase.IN_PROGRESS_BIDI_CONTINUE_CALL)
def testInProgressBidiSameChannelAsyncCall(self):
self._verifyTestCase(
methods.TestCase.IN_PROGRESS_BIDI_SAME_CHANNEL_ASYNC_CALL
)
def testInProgressBidiSameChannelBlockingCall(self):
self._verifyTestCase(
methods.TestCase.IN_PROGRESS_BIDI_SAME_CHANNEL_BLOCKING_CALL
)
def testInProgressBidiNewChannelAsyncCall(self):
self._verifyTestCase(
methods.TestCase.IN_PROGRESS_BIDI_NEW_CHANNEL_ASYNC_CALL
)
def testInProgressBidiNewChannelBlockingCall(self):
self._verifyTestCase(
methods.TestCase.IN_PROGRESS_BIDI_NEW_CHANNEL_BLOCKING_CALL
)
def tearDown(self):
self._server_process.kill()
for stream in self._streams:
stream.close()
def _print_backtraces(self, pid):
cmd = [
"gdb",
"-ex",
"set confirm off",
"-ex",
"echo attaching",
"-ex",
"attach {}".format(pid),
"-ex",
"echo print_backtrace",
"-ex",
"thread apply all bt",
"-ex",
"echo printed_backtrace",
"-ex",
"quit",
]
streams = tuple(tempfile.TemporaryFile() for _ in range(2))
sys.stderr.write("Invoking gdb\n")
sys.stderr.flush()
process = subprocess.Popen(cmd, stdout=streams[0], stderr=streams[1])
try:
process.wait(timeout=_GDB_TIMEOUT_S)
except subprocess.TimeoutExpired:
sys.stderr.write("gdb stacktrace generation timed out.\n")
finally:
_dump_streams("gdb", streams)
def _verifyTestCase(self, test_case):
script = _CLIENT_FORK_SCRIPT_TEMPLATE % (test_case.name, self._port)
streams = tuple(tempfile.TemporaryFile() for _ in range(2))
process = subprocess.Popen(
[sys.executable, "-c", script], stdout=streams[0], stderr=streams[1]
)
try:
process.wait(timeout=_SUBPROCESS_TIMEOUT_S)
self.assertEqual(0, process.returncode)
except subprocess.TimeoutExpired:
self._print_backtraces(process.pid)
process.kill()
raise AssertionError("Parent process timed out.")
finally:
_dump_streams("Parent", streams)
_dump_streams("Server", self._streams)
if __name__ == "__main__":
unittest.main(verbosity=2)
| 7,811
| 32.527897
| 80
|
py
|
grpc
|
grpc-master/src/python/grpcio_tests/tests/fork/client.py
|
# Copyright 2018 gRPC authors.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""The Python implementation of the GRPC interoperability test client."""
import argparse
import logging
import sys
from tests.fork import methods
def _args():
def parse_bool(value):
if value == "true":
return True
if value == "false":
return False
raise argparse.ArgumentTypeError("Only true/false allowed")
parser = argparse.ArgumentParser()
parser.add_argument(
"--server_host",
default="localhost",
type=str,
help="the host to which to connect",
)
parser.add_argument(
"--server_port",
type=int,
required=True,
help="the port to which to connect",
)
parser.add_argument(
"--test_case",
default="large_unary",
type=str,
help="the test case to execute",
)
parser.add_argument(
"--use_tls",
default=False,
type=parse_bool,
help="require a secure connection",
)
return parser.parse_args()
def _test_case_from_arg(test_case_arg):
for test_case in methods.TestCase:
if test_case_arg == test_case.value:
return test_case
else:
raise ValueError('No test case "%s"!' % test_case_arg)
def test_fork():
logging.basicConfig(level=logging.INFO)
args = vars(_args())
if args["test_case"] == "all":
for test_case in methods.TestCase:
test_case.run_test(args)
else:
test_case = _test_case_from_arg(args["test_case"])
test_case.run_test(args)
if __name__ == "__main__":
test_fork()
| 2,169
| 26.125
| 74
|
py
|
grpc
|
grpc-master/src/python/grpcio_tests/tests/fork/methods.py
|
# Copyright 2018 gRPC authors.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Implementations of fork support test methods."""
import enum
import json
import logging
import multiprocessing
import os
import queue
import subprocess
import sys
import tempfile
import threading
import time
import traceback
import grpc
from src.proto.grpc.testing import empty_pb2
from src.proto.grpc.testing import messages_pb2
from src.proto.grpc.testing import test_pb2_grpc
_LOGGER = logging.getLogger(__name__)
_RPC_TIMEOUT_S = 10
_CHILD_FINISH_TIMEOUT_S = 20
_GDB_TIMEOUT_S = 60
def _channel(args):
target = "{}:{}".format(args["server_host"], args["server_port"])
if args["use_tls"]:
channel_credentials = grpc.ssl_channel_credentials()
channel = grpc.secure_channel(target, channel_credentials)
else:
channel = grpc.insecure_channel(target)
return channel
def _validate_payload_type_and_length(response, expected_type, expected_length):
if response.payload.type is not expected_type:
raise ValueError(
"expected payload type %s, got %s"
% (expected_type, type(response.payload.type))
)
elif len(response.payload.body) != expected_length:
raise ValueError(
"expected payload body size %d, got %d"
% (expected_length, len(response.payload.body))
)
def _async_unary(stub):
size = 314159
request = messages_pb2.SimpleRequest(
response_type=messages_pb2.COMPRESSABLE,
response_size=size,
payload=messages_pb2.Payload(body=b"\x00" * 271828),
)
response_future = stub.UnaryCall.future(request, timeout=_RPC_TIMEOUT_S)
response = response_future.result()
_validate_payload_type_and_length(response, messages_pb2.COMPRESSABLE, size)
def _blocking_unary(stub):
size = 314159
request = messages_pb2.SimpleRequest(
response_type=messages_pb2.COMPRESSABLE,
response_size=size,
payload=messages_pb2.Payload(body=b"\x00" * 271828),
)
response = stub.UnaryCall(request, timeout=_RPC_TIMEOUT_S)
_validate_payload_type_and_length(response, messages_pb2.COMPRESSABLE, size)
class _Pipe(object):
def __init__(self):
self._condition = threading.Condition()
self._values = []
self._open = True
def __iter__(self):
return self
def __next__(self):
return self.next()
def next(self):
with self._condition:
while not self._values and self._open:
self._condition.wait()
if self._values:
return self._values.pop(0)
else:
raise StopIteration()
def add(self, value):
with self._condition:
self._values.append(value)
self._condition.notify()
def close(self):
with self._condition:
self._open = False
self._condition.notify()
def __enter__(self):
return self
def __exit__(self, type, value, traceback):
self.close()
class _ChildProcess(object):
def __init__(self, task, args=None):
if args is None:
args = ()
self._exceptions = multiprocessing.Queue()
self._stdout_path = tempfile.mkstemp()[1]
self._stderr_path = tempfile.mkstemp()[1]
self._child_pid = None
self._rc = None
self._args = args
self._task = task
def _child_main(self):
import faulthandler
faulthandler.enable(all_threads=True)
try:
self._task(*self._args)
except grpc.RpcError as rpc_error:
traceback.print_exc()
self._exceptions.put("RpcError: %s" % rpc_error)
except Exception as e: # pylint: disable=broad-except
traceback.print_exc()
self._exceptions.put(e)
sys.exit(0)
def _orchestrate_child_gdb(self):
cmd = [
"gdb",
"-ex",
"set confirm off",
"-ex",
"attach {}".format(os.getpid()),
"-ex",
"set follow-fork-mode child",
"-ex",
"continue",
"-ex",
"bt",
]
streams = tuple(tempfile.TemporaryFile() for _ in range(2))
sys.stderr.write("Invoking gdb\n")
sys.stderr.flush()
process = subprocess.Popen(cmd, stdout=sys.stderr, stderr=sys.stderr)
time.sleep(5)
def start(self):
# NOTE: Try uncommenting the following line if the child is segfaulting.
# self._orchestrate_child_gdb()
ret = os.fork()
if ret == 0:
self._child_main()
else:
self._child_pid = ret
def wait(self, timeout):
total = 0.0
wait_interval = 1.0
while total < timeout:
ret, termination = os.waitpid(self._child_pid, os.WNOHANG)
if ret == self._child_pid:
self._rc = termination
return True
time.sleep(wait_interval)
total += wait_interval
else:
return False
def _print_backtraces(self):
cmd = [
"gdb",
"-ex",
"set confirm off",
"-ex",
"echo attaching",
"-ex",
"attach {}".format(self._child_pid),
"-ex",
"echo print_backtrace",
"-ex",
"thread apply all bt",
"-ex",
"echo printed_backtrace",
"-ex",
"quit",
]
streams = tuple(tempfile.TemporaryFile() for _ in range(2))
sys.stderr.write("Invoking gdb\n")
sys.stderr.flush()
process = subprocess.Popen(cmd, stdout=streams[0], stderr=streams[1])
try:
process.wait(timeout=_GDB_TIMEOUT_S)
except subprocess.TimeoutExpired:
sys.stderr.write("gdb stacktrace generation timed out.\n")
finally:
for stream_name, stream in zip(("STDOUT", "STDERR"), streams):
stream.seek(0)
sys.stderr.write(
"gdb {}:\n{}\n".format(
stream_name, stream.read().decode("ascii")
)
)
stream.close()
sys.stderr.flush()
def finish(self):
terminated = self.wait(_CHILD_FINISH_TIMEOUT_S)
sys.stderr.write("Exit code: {}\n".format(self._rc))
if not terminated:
self._print_backtraces()
raise RuntimeError("Child process did not terminate")
if self._rc != 0:
raise ValueError("Child process failed with exitcode %d" % self._rc)
try:
exception = self._exceptions.get(block=False)
raise ValueError(
'Child process failed: "%s": "%s"'
% (repr(exception), exception)
)
except queue.Empty:
pass
def _async_unary_same_channel(channel):
def child_target():
try:
_async_unary(stub)
raise Exception(
"Child should not be able to re-use channel after fork"
)
except ValueError as expected_value_error:
pass
stub = test_pb2_grpc.TestServiceStub(channel)
_async_unary(stub)
child_process = _ChildProcess(child_target)
child_process.start()
_async_unary(stub)
child_process.finish()
def _async_unary_new_channel(channel, args):
def child_target():
with _channel(args) as child_channel:
child_stub = test_pb2_grpc.TestServiceStub(child_channel)
_async_unary(child_stub)
child_channel.close()
stub = test_pb2_grpc.TestServiceStub(channel)
_async_unary(stub)
child_process = _ChildProcess(child_target)
child_process.start()
_async_unary(stub)
child_process.finish()
def _blocking_unary_same_channel(channel):
def child_target():
try:
_blocking_unary(stub)
raise Exception(
"Child should not be able to re-use channel after fork"
)
except ValueError as expected_value_error:
pass
stub = test_pb2_grpc.TestServiceStub(channel)
_blocking_unary(stub)
child_process = _ChildProcess(child_target)
child_process.start()
child_process.finish()
def _blocking_unary_new_channel(channel, args):
def child_target():
with _channel(args) as child_channel:
child_stub = test_pb2_grpc.TestServiceStub(child_channel)
_blocking_unary(child_stub)
stub = test_pb2_grpc.TestServiceStub(channel)
_blocking_unary(stub)
child_process = _ChildProcess(child_target)
child_process.start()
_blocking_unary(stub)
child_process.finish()
# Verify that the fork channel registry can handle already closed channels
def _close_channel_before_fork(channel, args):
def child_target():
new_channel.close()
with _channel(args) as child_channel:
child_stub = test_pb2_grpc.TestServiceStub(child_channel)
_blocking_unary(child_stub)
stub = test_pb2_grpc.TestServiceStub(channel)
_blocking_unary(stub)
channel.close()
with _channel(args) as new_channel:
new_stub = test_pb2_grpc.TestServiceStub(new_channel)
child_process = _ChildProcess(child_target)
child_process.start()
_blocking_unary(new_stub)
child_process.finish()
def _connectivity_watch(channel, args):
parent_states = []
parent_channel_ready_event = threading.Event()
def child_target():
child_channel_ready_event = threading.Event()
def child_connectivity_callback(state):
if state is grpc.ChannelConnectivity.READY:
child_channel_ready_event.set()
with _channel(args) as child_channel:
child_stub = test_pb2_grpc.TestServiceStub(child_channel)
child_channel.subscribe(child_connectivity_callback)
_async_unary(child_stub)
if not child_channel_ready_event.wait(timeout=_RPC_TIMEOUT_S):
raise ValueError("Channel did not move to READY")
if len(parent_states) > 1:
raise ValueError(
"Received connectivity updates on parent callback",
parent_states,
)
child_channel.unsubscribe(child_connectivity_callback)
def parent_connectivity_callback(state):
parent_states.append(state)
if state is grpc.ChannelConnectivity.READY:
parent_channel_ready_event.set()
channel.subscribe(parent_connectivity_callback)
stub = test_pb2_grpc.TestServiceStub(channel)
child_process = _ChildProcess(child_target)
child_process.start()
_async_unary(stub)
if not parent_channel_ready_event.wait(timeout=_RPC_TIMEOUT_S):
raise ValueError("Channel did not move to READY")
channel.unsubscribe(parent_connectivity_callback)
child_process.finish()
def _ping_pong_with_child_processes_after_first_response(
channel, args, child_target, run_after_close=True
):
request_response_sizes = (
31415,
9,
2653,
58979,
)
request_payload_sizes = (
27182,
8,
1828,
45904,
)
stub = test_pb2_grpc.TestServiceStub(channel)
pipe = _Pipe()
parent_bidi_call = stub.FullDuplexCall(pipe)
child_processes = []
first_message_received = False
for response_size, payload_size in zip(
request_response_sizes, request_payload_sizes
):
request = messages_pb2.StreamingOutputCallRequest(
response_type=messages_pb2.COMPRESSABLE,
response_parameters=(
messages_pb2.ResponseParameters(size=response_size),
),
payload=messages_pb2.Payload(body=b"\x00" * payload_size),
)
pipe.add(request)
if first_message_received:
child_process = _ChildProcess(
child_target, (parent_bidi_call, channel, args)
)
child_process.start()
child_processes.append(child_process)
response = next(parent_bidi_call)
first_message_received = True
child_process = _ChildProcess(
child_target, (parent_bidi_call, channel, args)
)
child_process.start()
child_processes.append(child_process)
_validate_payload_type_and_length(
response, messages_pb2.COMPRESSABLE, response_size
)
pipe.close()
if run_after_close:
child_process = _ChildProcess(
child_target, (parent_bidi_call, channel, args)
)
child_process.start()
child_processes.append(child_process)
for child_process in child_processes:
child_process.finish()
def _in_progress_bidi_continue_call(channel):
def child_target(parent_bidi_call, parent_channel, args):
stub = test_pb2_grpc.TestServiceStub(parent_channel)
try:
_async_unary(stub)
raise Exception(
"Child should not be able to re-use channel after fork"
)
except ValueError as expected_value_error:
pass
inherited_code = parent_bidi_call.code()
inherited_details = parent_bidi_call.details()
if inherited_code != grpc.StatusCode.CANCELLED:
raise ValueError(
"Expected inherited code CANCELLED, got %s" % inherited_code
)
if inherited_details != "Channel closed due to fork":
raise ValueError(
"Expected inherited details Channel closed due to fork, got %s"
% inherited_details
)
# Don't run child_target after closing the parent call, as the call may have
# received a status from the server before fork occurs.
_ping_pong_with_child_processes_after_first_response(
channel, None, child_target, run_after_close=False
)
def _in_progress_bidi_same_channel_async_call(channel):
def child_target(parent_bidi_call, parent_channel, args):
stub = test_pb2_grpc.TestServiceStub(parent_channel)
try:
_async_unary(stub)
raise Exception(
"Child should not be able to re-use channel after fork"
)
except ValueError as expected_value_error:
pass
_ping_pong_with_child_processes_after_first_response(
channel, None, child_target
)
def _in_progress_bidi_same_channel_blocking_call(channel):
def child_target(parent_bidi_call, parent_channel, args):
stub = test_pb2_grpc.TestServiceStub(parent_channel)
try:
_blocking_unary(stub)
raise Exception(
"Child should not be able to re-use channel after fork"
)
except ValueError as expected_value_error:
pass
_ping_pong_with_child_processes_after_first_response(
channel, None, child_target
)
def _in_progress_bidi_new_channel_async_call(channel, args):
def child_target(parent_bidi_call, parent_channel, args):
with _channel(args) as channel:
stub = test_pb2_grpc.TestServiceStub(channel)
_async_unary(stub)
_ping_pong_with_child_processes_after_first_response(
channel, args, child_target
)
def _in_progress_bidi_new_channel_blocking_call(channel, args):
def child_target(parent_bidi_call, parent_channel, args):
with _channel(args) as channel:
stub = test_pb2_grpc.TestServiceStub(channel)
_blocking_unary(stub)
_ping_pong_with_child_processes_after_first_response(
channel, args, child_target
)
@enum.unique
class TestCase(enum.Enum):
CONNECTIVITY_WATCH = "connectivity_watch"
CLOSE_CHANNEL_BEFORE_FORK = "close_channel_before_fork"
ASYNC_UNARY_SAME_CHANNEL = "async_unary_same_channel"
ASYNC_UNARY_NEW_CHANNEL = "async_unary_new_channel"
BLOCKING_UNARY_SAME_CHANNEL = "blocking_unary_same_channel"
BLOCKING_UNARY_NEW_CHANNEL = "blocking_unary_new_channel"
IN_PROGRESS_BIDI_CONTINUE_CALL = "in_progress_bidi_continue_call"
IN_PROGRESS_BIDI_SAME_CHANNEL_ASYNC_CALL = (
"in_progress_bidi_same_channel_async_call"
)
IN_PROGRESS_BIDI_SAME_CHANNEL_BLOCKING_CALL = (
"in_progress_bidi_same_channel_blocking_call"
)
IN_PROGRESS_BIDI_NEW_CHANNEL_ASYNC_CALL = (
"in_progress_bidi_new_channel_async_call"
)
IN_PROGRESS_BIDI_NEW_CHANNEL_BLOCKING_CALL = (
"in_progress_bidi_new_channel_blocking_call"
)
def run_test(self, args):
_LOGGER.info("Running %s", self)
channel = _channel(args)
if self is TestCase.ASYNC_UNARY_SAME_CHANNEL:
_async_unary_same_channel(channel)
elif self is TestCase.ASYNC_UNARY_NEW_CHANNEL:
_async_unary_new_channel(channel, args)
elif self is TestCase.BLOCKING_UNARY_SAME_CHANNEL:
_blocking_unary_same_channel(channel)
elif self is TestCase.BLOCKING_UNARY_NEW_CHANNEL:
_blocking_unary_new_channel(channel, args)
elif self is TestCase.CLOSE_CHANNEL_BEFORE_FORK:
_close_channel_before_fork(channel, args)
elif self is TestCase.CONNECTIVITY_WATCH:
_connectivity_watch(channel, args)
elif self is TestCase.IN_PROGRESS_BIDI_CONTINUE_CALL:
_in_progress_bidi_continue_call(channel)
elif self is TestCase.IN_PROGRESS_BIDI_SAME_CHANNEL_ASYNC_CALL:
_in_progress_bidi_same_channel_async_call(channel)
elif self is TestCase.IN_PROGRESS_BIDI_SAME_CHANNEL_BLOCKING_CALL:
_in_progress_bidi_same_channel_blocking_call(channel)
elif self is TestCase.IN_PROGRESS_BIDI_NEW_CHANNEL_ASYNC_CALL:
_in_progress_bidi_new_channel_async_call(channel, args)
elif self is TestCase.IN_PROGRESS_BIDI_NEW_CHANNEL_BLOCKING_CALL:
_in_progress_bidi_new_channel_blocking_call(channel, args)
else:
raise NotImplementedError(
'Test case "%s" not implemented!' % self.name
)
channel.close()
# Useful if needing to find a block of code from an address in an SO.
def dump_object_map():
with open("/proc/self/maps", "r") as f:
sys.stderr.write("=============== /proc/self/maps ===============\n")
sys.stderr.write(f.read())
sys.stderr.write("\n")
sys.stderr.flush()
| 19,084
| 32.076256
| 80
|
py
|
grpc
|
grpc-master/src/python/grpcio_tests/tests/fork/__init__.py
|
# Copyright 2018 gRPC authors.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
| 577
| 40.285714
| 74
|
py
|
grpc
|
grpc-master/src/python/grpcio_tests/tests_aio/__init__.py
|
# Copyright 2019 gRPC authors.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
from __future__ import absolute_import
from tests import _loader
from tests import _runner
Loader = _loader.Loader
Runner = _runner.Runner
| 719
| 31.727273
| 74
|
py
|
grpc
|
grpc-master/src/python/grpcio_tests/tests_aio/unit/channel_test.py
|
# Copyright 2019 The gRPC Authors.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Tests behavior of the grpc.aio.Channel class."""
import logging
import os
import unittest
import grpc
from grpc.experimental import aio
from src.proto.grpc.testing import messages_pb2
from src.proto.grpc.testing import test_pb2_grpc
from tests.unit.framework.common import test_constants
from tests_aio.unit._constants import UNARY_CALL_WITH_SLEEP_VALUE
from tests_aio.unit._constants import UNREACHABLE_TARGET
from tests_aio.unit._test_base import AioTestBase
from tests_aio.unit._test_server import start_test_server
_UNARY_CALL_METHOD = "/grpc.testing.TestService/UnaryCall"
_UNARY_CALL_METHOD_WITH_SLEEP = "/grpc.testing.TestService/UnaryCallWithSleep"
_STREAMING_OUTPUT_CALL_METHOD = "/grpc.testing.TestService/StreamingOutputCall"
_INVOCATION_METADATA = (
("x-grpc-test-echo-initial", "initial-md-value"),
("x-grpc-test-echo-trailing-bin", b"\x00\x02"),
)
_NUM_STREAM_RESPONSES = 5
_REQUEST_PAYLOAD_SIZE = 7
_RESPONSE_PAYLOAD_SIZE = 42
class TestChannel(AioTestBase):
async def setUp(self):
self._server_target, self._server = await start_test_server()
async def tearDown(self):
await self._server.stop(None)
async def test_async_context(self):
async with aio.insecure_channel(self._server_target) as channel:
hi = channel.unary_unary(
_UNARY_CALL_METHOD,
request_serializer=messages_pb2.SimpleRequest.SerializeToString,
response_deserializer=messages_pb2.SimpleResponse.FromString,
)
await hi(messages_pb2.SimpleRequest())
async def test_unary_unary(self):
async with aio.insecure_channel(self._server_target) as channel:
hi = channel.unary_unary(
_UNARY_CALL_METHOD,
request_serializer=messages_pb2.SimpleRequest.SerializeToString,
response_deserializer=messages_pb2.SimpleResponse.FromString,
)
response = await hi(messages_pb2.SimpleRequest())
self.assertIsInstance(response, messages_pb2.SimpleResponse)
async def test_unary_call_times_out(self):
async with aio.insecure_channel(self._server_target) as channel:
hi = channel.unary_unary(
_UNARY_CALL_METHOD_WITH_SLEEP,
request_serializer=messages_pb2.SimpleRequest.SerializeToString,
response_deserializer=messages_pb2.SimpleResponse.FromString,
)
with self.assertRaises(grpc.RpcError) as exception_context:
await hi(
messages_pb2.SimpleRequest(),
timeout=UNARY_CALL_WITH_SLEEP_VALUE / 2,
)
(
_,
details,
) = (
grpc.StatusCode.DEADLINE_EXCEEDED.value
) # pylint: disable=unused-variable
self.assertEqual(
grpc.StatusCode.DEADLINE_EXCEEDED,
exception_context.exception.code(),
)
self.assertEqual(
details.title(), exception_context.exception.details()
)
self.assertIsNotNone(exception_context.exception.initial_metadata())
self.assertIsNotNone(
exception_context.exception.trailing_metadata()
)
@unittest.skipIf(
os.name == "nt", "TODO: https://github.com/grpc/grpc/issues/21658"
)
async def test_unary_call_does_not_times_out(self):
async with aio.insecure_channel(self._server_target) as channel:
hi = channel.unary_unary(
_UNARY_CALL_METHOD_WITH_SLEEP,
request_serializer=messages_pb2.SimpleRequest.SerializeToString,
response_deserializer=messages_pb2.SimpleResponse.FromString,
)
call = hi(
messages_pb2.SimpleRequest(),
timeout=UNARY_CALL_WITH_SLEEP_VALUE * 5,
)
self.assertEqual(await call.code(), grpc.StatusCode.OK)
async def test_unary_stream(self):
channel = aio.insecure_channel(self._server_target)
stub = test_pb2_grpc.TestServiceStub(channel)
# Prepares the request
request = messages_pb2.StreamingOutputCallRequest()
for _ in range(_NUM_STREAM_RESPONSES):
request.response_parameters.append(
messages_pb2.ResponseParameters(size=_RESPONSE_PAYLOAD_SIZE)
)
# Invokes the actual RPC
call = stub.StreamingOutputCall(request)
# Validates the responses
response_cnt = 0
async for response in call:
response_cnt += 1
self.assertIs(
type(response), messages_pb2.StreamingOutputCallResponse
)
self.assertEqual(_RESPONSE_PAYLOAD_SIZE, len(response.payload.body))
self.assertEqual(_NUM_STREAM_RESPONSES, response_cnt)
self.assertEqual(await call.code(), grpc.StatusCode.OK)
await channel.close()
async def test_stream_unary_using_write(self):
channel = aio.insecure_channel(self._server_target)
stub = test_pb2_grpc.TestServiceStub(channel)
# Invokes the actual RPC
call = stub.StreamingInputCall()
# Prepares the request
payload = messages_pb2.Payload(body=b"\0" * _REQUEST_PAYLOAD_SIZE)
request = messages_pb2.StreamingInputCallRequest(payload=payload)
# Sends out requests
for _ in range(_NUM_STREAM_RESPONSES):
await call.write(request)
await call.done_writing()
# Validates the responses
response = await call
self.assertIsInstance(response, messages_pb2.StreamingInputCallResponse)
self.assertEqual(
_NUM_STREAM_RESPONSES * _REQUEST_PAYLOAD_SIZE,
response.aggregated_payload_size,
)
self.assertEqual(await call.code(), grpc.StatusCode.OK)
await channel.close()
async def test_stream_unary_using_async_gen(self):
channel = aio.insecure_channel(self._server_target)
stub = test_pb2_grpc.TestServiceStub(channel)
# Prepares the request
payload = messages_pb2.Payload(body=b"\0" * _REQUEST_PAYLOAD_SIZE)
request = messages_pb2.StreamingInputCallRequest(payload=payload)
async def gen():
for _ in range(_NUM_STREAM_RESPONSES):
yield request
# Invokes the actual RPC
call = stub.StreamingInputCall(gen())
# Validates the responses
response = await call
self.assertIsInstance(response, messages_pb2.StreamingInputCallResponse)
self.assertEqual(
_NUM_STREAM_RESPONSES * _REQUEST_PAYLOAD_SIZE,
response.aggregated_payload_size,
)
self.assertEqual(await call.code(), grpc.StatusCode.OK)
await channel.close()
async def test_stream_stream_using_read_write(self):
channel = aio.insecure_channel(self._server_target)
stub = test_pb2_grpc.TestServiceStub(channel)
# Invokes the actual RPC
call = stub.FullDuplexCall()
# Prepares the request
request = messages_pb2.StreamingOutputCallRequest()
request.response_parameters.append(
messages_pb2.ResponseParameters(size=_RESPONSE_PAYLOAD_SIZE)
)
for _ in range(_NUM_STREAM_RESPONSES):
await call.write(request)
response = await call.read()
self.assertIsInstance(
response, messages_pb2.StreamingOutputCallResponse
)
self.assertEqual(_RESPONSE_PAYLOAD_SIZE, len(response.payload.body))
await call.done_writing()
self.assertEqual(grpc.StatusCode.OK, await call.code())
await channel.close()
async def test_stream_stream_using_async_gen(self):
channel = aio.insecure_channel(self._server_target)
stub = test_pb2_grpc.TestServiceStub(channel)
# Prepares the request
request = messages_pb2.StreamingOutputCallRequest()
request.response_parameters.append(
messages_pb2.ResponseParameters(size=_RESPONSE_PAYLOAD_SIZE)
)
async def gen():
for _ in range(_NUM_STREAM_RESPONSES):
yield request
# Invokes the actual RPC
call = stub.FullDuplexCall(gen())
async for response in call:
self.assertIsInstance(
response, messages_pb2.StreamingOutputCallResponse
)
self.assertEqual(_RESPONSE_PAYLOAD_SIZE, len(response.payload.body))
self.assertEqual(grpc.StatusCode.OK, await call.code())
await channel.close()
if __name__ == "__main__":
logging.basicConfig(level=logging.DEBUG)
unittest.main(verbosity=2)
| 9,380
| 35.501946
| 80
|
py
|
grpc
|
grpc-master/src/python/grpcio_tests/tests_aio/unit/server_interceptor_test.py
|
# Copyright 2020 The gRPC Authors
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Test the functionality of server interceptors."""
import asyncio
import functools
import logging
from typing import Any, Awaitable, Callable, Tuple
import unittest
import grpc
from grpc.experimental import aio
from grpc.experimental import wrap_server_method_handler
from src.proto.grpc.testing import messages_pb2
from src.proto.grpc.testing import test_pb2_grpc
from tests_aio.unit._test_base import AioTestBase
from tests_aio.unit._test_server import start_test_server
_NUM_STREAM_RESPONSES = 5
_REQUEST_PAYLOAD_SIZE = 7
_RESPONSE_PAYLOAD_SIZE = 42
class _LoggingInterceptor(aio.ServerInterceptor):
def __init__(self, tag: str, record: list) -> None:
self.tag = tag
self.record = record
async def intercept_service(
self,
continuation: Callable[
[grpc.HandlerCallDetails], Awaitable[grpc.RpcMethodHandler]
],
handler_call_details: grpc.HandlerCallDetails,
) -> grpc.RpcMethodHandler:
self.record.append(self.tag + ":intercept_service")
return await continuation(handler_call_details)
class _GenericInterceptor(aio.ServerInterceptor):
def __init__(
self,
fn: Callable[
[
Callable[
[grpc.HandlerCallDetails], Awaitable[grpc.RpcMethodHandler]
],
grpc.HandlerCallDetails,
],
Any,
],
) -> None:
self._fn = fn
async def intercept_service(
self,
continuation: Callable[
[grpc.HandlerCallDetails], Awaitable[grpc.RpcMethodHandler]
],
handler_call_details: grpc.HandlerCallDetails,
) -> grpc.RpcMethodHandler:
return await self._fn(continuation, handler_call_details)
def _filter_server_interceptor(
condition: Callable, interceptor: aio.ServerInterceptor
) -> aio.ServerInterceptor:
async def intercept_service(
continuation: Callable[
[grpc.HandlerCallDetails], Awaitable[grpc.RpcMethodHandler]
],
handler_call_details: grpc.HandlerCallDetails,
) -> grpc.RpcMethodHandler:
if condition(handler_call_details):
return await interceptor.intercept_service(
continuation, handler_call_details
)
return await continuation(handler_call_details)
return _GenericInterceptor(intercept_service)
class _CacheInterceptor(aio.ServerInterceptor):
"""An interceptor that caches response based on request message."""
def __init__(self, cache_store=None):
self.cache_store = cache_store or {}
async def intercept_service(
self,
continuation: Callable[
[grpc.HandlerCallDetails], Awaitable[grpc.RpcMethodHandler]
],
handler_call_details: grpc.HandlerCallDetails,
) -> grpc.RpcMethodHandler:
# Get the actual handler
handler = await continuation(handler_call_details)
# Only intercept unary call RPCs
if handler and (
handler.request_streaming
or handler.response_streaming # pytype: disable=attribute-error
): # pytype: disable=attribute-error
return handler
def wrapper(
behavior: Callable[
[messages_pb2.SimpleRequest, aio.ServicerContext],
messages_pb2.SimpleResponse,
]
):
@functools.wraps(behavior)
async def wrapper(
request: messages_pb2.SimpleRequest,
context: aio.ServicerContext,
) -> messages_pb2.SimpleResponse:
if request.response_size not in self.cache_store:
self.cache_store[request.response_size] = await behavior(
request, context
)
return self.cache_store[request.response_size]
return wrapper
return wrap_server_method_handler(wrapper, handler)
async def _create_server_stub_pair(
*interceptors: aio.ServerInterceptor,
) -> Tuple[aio.Server, test_pb2_grpc.TestServiceStub]:
"""Creates a server-stub pair with given interceptors.
Returning the server object to protect it from being garbage collected.
"""
server_target, server = await start_test_server(interceptors=interceptors)
channel = aio.insecure_channel(server_target)
return server, test_pb2_grpc.TestServiceStub(channel)
class TestServerInterceptor(AioTestBase):
async def test_invalid_interceptor(self):
class InvalidInterceptor:
"""Just an invalid Interceptor"""
with self.assertRaises(ValueError):
server_target, _ = await start_test_server(
interceptors=(InvalidInterceptor(),)
)
async def test_executed_right_order(self):
record = []
server_target, _ = await start_test_server(
interceptors=(
_LoggingInterceptor("log1", record),
_LoggingInterceptor("log2", record),
)
)
async with aio.insecure_channel(server_target) as channel:
multicallable = channel.unary_unary(
"/grpc.testing.TestService/UnaryCall",
request_serializer=messages_pb2.SimpleRequest.SerializeToString,
response_deserializer=messages_pb2.SimpleResponse.FromString,
)
call = multicallable(messages_pb2.SimpleRequest())
response = await call
# Check that all interceptors were executed, and were executed
# in the right order.
self.assertSequenceEqual(
[
"log1:intercept_service",
"log2:intercept_service",
],
record,
)
self.assertIsInstance(response, messages_pb2.SimpleResponse)
async def test_response_ok(self):
record = []
server_target, _ = await start_test_server(
interceptors=(_LoggingInterceptor("log1", record),)
)
async with aio.insecure_channel(server_target) as channel:
multicallable = channel.unary_unary(
"/grpc.testing.TestService/UnaryCall",
request_serializer=messages_pb2.SimpleRequest.SerializeToString,
response_deserializer=messages_pb2.SimpleResponse.FromString,
)
call = multicallable(messages_pb2.SimpleRequest())
response = await call
code = await call.code()
self.assertSequenceEqual(["log1:intercept_service"], record)
self.assertIsInstance(response, messages_pb2.SimpleResponse)
self.assertEqual(code, grpc.StatusCode.OK)
async def test_apply_different_interceptors_by_metadata(self):
record = []
conditional_interceptor = _filter_server_interceptor(
lambda x: ("secret", "42") in x.invocation_metadata,
_LoggingInterceptor("log3", record),
)
server_target, _ = await start_test_server(
interceptors=(
_LoggingInterceptor("log1", record),
conditional_interceptor,
_LoggingInterceptor("log2", record),
)
)
async with aio.insecure_channel(server_target) as channel:
multicallable = channel.unary_unary(
"/grpc.testing.TestService/UnaryCall",
request_serializer=messages_pb2.SimpleRequest.SerializeToString,
response_deserializer=messages_pb2.SimpleResponse.FromString,
)
metadata = aio.Metadata(
("key", "value"),
)
call = multicallable(
messages_pb2.SimpleRequest(), metadata=metadata
)
await call
self.assertSequenceEqual(
[
"log1:intercept_service",
"log2:intercept_service",
],
record,
)
record.clear()
metadata = aio.Metadata(("key", "value"), ("secret", "42"))
call = multicallable(
messages_pb2.SimpleRequest(), metadata=metadata
)
await call
self.assertSequenceEqual(
[
"log1:intercept_service",
"log3:intercept_service",
"log2:intercept_service",
],
record,
)
async def test_response_caching(self):
# Prepares a preset value to help testing
interceptor = _CacheInterceptor(
{
42: messages_pb2.SimpleResponse(
payload=messages_pb2.Payload(body=b"\x42")
)
}
)
# Constructs a server with the cache interceptor
server, stub = await _create_server_stub_pair(interceptor)
# Tests if the cache store is used
response = await stub.UnaryCall(
messages_pb2.SimpleRequest(response_size=42)
)
self.assertEqual(1, len(interceptor.cache_store[42].payload.body))
self.assertEqual(interceptor.cache_store[42], response)
# Tests response can be cached
response = await stub.UnaryCall(
messages_pb2.SimpleRequest(response_size=1337)
)
self.assertEqual(1337, len(interceptor.cache_store[1337].payload.body))
self.assertEqual(interceptor.cache_store[1337], response)
response = await stub.UnaryCall(
messages_pb2.SimpleRequest(response_size=1337)
)
self.assertEqual(interceptor.cache_store[1337], response)
async def test_interceptor_unary_stream(self):
record = []
server, stub = await _create_server_stub_pair(
_LoggingInterceptor("log_unary_stream", record)
)
# Prepares the request
request = messages_pb2.StreamingOutputCallRequest()
for _ in range(_NUM_STREAM_RESPONSES):
request.response_parameters.append(
messages_pb2.ResponseParameters(
size=_RESPONSE_PAYLOAD_SIZE,
)
)
# Tests if the cache store is used
call = stub.StreamingOutputCall(request)
# Ensures the RPC goes fine
async for response in call:
self.assertEqual(_RESPONSE_PAYLOAD_SIZE, len(response.payload.body))
self.assertEqual(await call.code(), grpc.StatusCode.OK)
self.assertSequenceEqual(
[
"log_unary_stream:intercept_service",
],
record,
)
async def test_interceptor_stream_unary(self):
record = []
server, stub = await _create_server_stub_pair(
_LoggingInterceptor("log_stream_unary", record)
)
# Invokes the actual RPC
call = stub.StreamingInputCall()
# Prepares the request
payload = messages_pb2.Payload(body=b"\0" * _REQUEST_PAYLOAD_SIZE)
request = messages_pb2.StreamingInputCallRequest(payload=payload)
# Sends out requests
for _ in range(_NUM_STREAM_RESPONSES):
await call.write(request)
await call.done_writing()
# Validates the responses
response = await call
self.assertIsInstance(response, messages_pb2.StreamingInputCallResponse)
self.assertEqual(
_NUM_STREAM_RESPONSES * _REQUEST_PAYLOAD_SIZE,
response.aggregated_payload_size,
)
self.assertEqual(await call.code(), grpc.StatusCode.OK)
self.assertSequenceEqual(
[
"log_stream_unary:intercept_service",
],
record,
)
async def test_interceptor_stream_stream(self):
record = []
server, stub = await _create_server_stub_pair(
_LoggingInterceptor("log_stream_stream", record)
)
# Prepares the request
payload = messages_pb2.Payload(body=b"\0" * _REQUEST_PAYLOAD_SIZE)
request = messages_pb2.StreamingInputCallRequest(payload=payload)
async def gen():
for _ in range(_NUM_STREAM_RESPONSES):
yield request
# Invokes the actual RPC
call = stub.StreamingInputCall(gen())
# Validates the responses
response = await call
self.assertIsInstance(response, messages_pb2.StreamingInputCallResponse)
self.assertEqual(
_NUM_STREAM_RESPONSES * _REQUEST_PAYLOAD_SIZE,
response.aggregated_payload_size,
)
self.assertEqual(await call.code(), grpc.StatusCode.OK)
self.assertSequenceEqual(
[
"log_stream_stream:intercept_service",
],
record,
)
if __name__ == "__main__":
logging.basicConfig(level=logging.DEBUG)
unittest.main(verbosity=2)
| 13,545
| 33.207071
| 80
|
py
|
grpc
|
grpc-master/src/python/grpcio_tests/tests_aio/unit/close_channel_test.py
|
# Copyright 2020 The gRPC Authors.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Tests behavior of closing a grpc.aio.Channel."""
import asyncio
import logging
import unittest
import grpc
from grpc.aio import _base_call
from grpc.experimental import aio
from src.proto.grpc.testing import messages_pb2
from src.proto.grpc.testing import test_pb2_grpc
from tests_aio.unit._test_base import AioTestBase
from tests_aio.unit._test_server import start_test_server
_UNARY_CALL_METHOD_WITH_SLEEP = "/grpc.testing.TestService/UnaryCallWithSleep"
_LONG_TIMEOUT_THAT_SHOULD_NOT_EXPIRE = 60
class TestCloseChannel(AioTestBase):
async def setUp(self):
self._server_target, self._server = await start_test_server()
async def tearDown(self):
await self._server.stop(None)
async def test_graceful_close(self):
channel = aio.insecure_channel(self._server_target)
UnaryCallWithSleep = channel.unary_unary(
_UNARY_CALL_METHOD_WITH_SLEEP,
request_serializer=messages_pb2.SimpleRequest.SerializeToString,
response_deserializer=messages_pb2.SimpleResponse.FromString,
)
call = UnaryCallWithSleep(messages_pb2.SimpleRequest())
await channel.close(grace=_LONG_TIMEOUT_THAT_SHOULD_NOT_EXPIRE)
self.assertEqual(grpc.StatusCode.OK, await call.code())
async def test_none_graceful_close(self):
channel = aio.insecure_channel(self._server_target)
UnaryCallWithSleep = channel.unary_unary(
_UNARY_CALL_METHOD_WITH_SLEEP,
request_serializer=messages_pb2.SimpleRequest.SerializeToString,
response_deserializer=messages_pb2.SimpleResponse.FromString,
)
call = UnaryCallWithSleep(messages_pb2.SimpleRequest())
await channel.close(None)
self.assertEqual(grpc.StatusCode.CANCELLED, await call.code())
async def test_close_unary_unary(self):
channel = aio.insecure_channel(self._server_target)
stub = test_pb2_grpc.TestServiceStub(channel)
calls = [stub.UnaryCall(messages_pb2.SimpleRequest()) for _ in range(2)]
await channel.close()
for call in calls:
self.assertTrue(call.cancelled())
async def test_close_unary_stream(self):
channel = aio.insecure_channel(self._server_target)
stub = test_pb2_grpc.TestServiceStub(channel)
request = messages_pb2.StreamingOutputCallRequest()
calls = [stub.StreamingOutputCall(request) for _ in range(2)]
await channel.close()
for call in calls:
self.assertTrue(call.cancelled())
async def test_close_stream_unary(self):
channel = aio.insecure_channel(self._server_target)
stub = test_pb2_grpc.TestServiceStub(channel)
calls = [stub.StreamingInputCall() for _ in range(2)]
await channel.close()
for call in calls:
self.assertTrue(call.cancelled())
async def test_close_stream_stream(self):
channel = aio.insecure_channel(self._server_target)
stub = test_pb2_grpc.TestServiceStub(channel)
calls = [stub.FullDuplexCall() for _ in range(2)]
await channel.close()
for call in calls:
self.assertTrue(call.cancelled())
async def test_close_async_context(self):
async with aio.insecure_channel(self._server_target) as channel:
stub = test_pb2_grpc.TestServiceStub(channel)
calls = [
stub.UnaryCall(messages_pb2.SimpleRequest()) for _ in range(2)
]
for call in calls:
self.assertTrue(call.cancelled())
async def test_channel_isolation(self):
async with aio.insecure_channel(self._server_target) as channel1:
async with aio.insecure_channel(self._server_target) as channel2:
stub1 = test_pb2_grpc.TestServiceStub(channel1)
stub2 = test_pb2_grpc.TestServiceStub(channel2)
call1 = stub1.UnaryCall(messages_pb2.SimpleRequest())
call2 = stub2.UnaryCall(messages_pb2.SimpleRequest())
self.assertFalse(call1.cancelled())
self.assertTrue(call2.cancelled())
if __name__ == "__main__":
logging.basicConfig(level=logging.DEBUG)
unittest.main(verbosity=2)
| 4,813
| 33.633094
| 80
|
py
|
grpc
|
grpc-master/src/python/grpcio_tests/tests_aio/unit/client_stream_stream_interceptor_test.py
|
# Copyright 2020 The gRPC Authors.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import logging
import unittest
import grpc
from grpc.experimental import aio
from src.proto.grpc.testing import messages_pb2
from src.proto.grpc.testing import test_pb2_grpc
from tests_aio.unit._common import CountingRequestIterator
from tests_aio.unit._common import CountingResponseIterator
from tests_aio.unit._test_base import AioTestBase
from tests_aio.unit._test_server import start_test_server
_NUM_STREAM_RESPONSES = 5
_NUM_STREAM_REQUESTS = 5
_RESPONSE_PAYLOAD_SIZE = 7
class _StreamStreamInterceptorEmpty(aio.StreamStreamClientInterceptor):
async def intercept_stream_stream(
self, continuation, client_call_details, request_iterator
):
return await continuation(client_call_details, request_iterator)
def assert_in_final_state(self, test: unittest.TestCase):
pass
class _StreamStreamInterceptorWithRequestAndResponseIterator(
aio.StreamStreamClientInterceptor
):
async def intercept_stream_stream(
self, continuation, client_call_details, request_iterator
):
self.request_iterator = CountingRequestIterator(request_iterator)
call = await continuation(client_call_details, self.request_iterator)
self.response_iterator = CountingResponseIterator(call)
return self.response_iterator
def assert_in_final_state(self, test: unittest.TestCase):
test.assertEqual(
_NUM_STREAM_REQUESTS, self.request_iterator.request_cnt
)
test.assertEqual(
_NUM_STREAM_RESPONSES, self.response_iterator.response_cnt
)
class TestStreamStreamClientInterceptor(AioTestBase):
async def setUp(self):
self._server_target, self._server = await start_test_server()
async def tearDown(self):
await self._server.stop(None)
async def test_intercepts(self):
for interceptor_class in (
_StreamStreamInterceptorEmpty,
_StreamStreamInterceptorWithRequestAndResponseIterator,
):
with self.subTest(name=interceptor_class):
interceptor = interceptor_class()
channel = aio.insecure_channel(
self._server_target, interceptors=[interceptor]
)
stub = test_pb2_grpc.TestServiceStub(channel)
# Prepares the request
request = messages_pb2.StreamingOutputCallRequest()
request.response_parameters.append(
messages_pb2.ResponseParameters(size=_RESPONSE_PAYLOAD_SIZE)
)
async def request_iterator():
for _ in range(_NUM_STREAM_REQUESTS):
yield request
call = stub.FullDuplexCall(request_iterator())
await call.wait_for_connection()
response_cnt = 0
async for response in call:
response_cnt += 1
self.assertIsInstance(
response, messages_pb2.StreamingOutputCallResponse
)
self.assertEqual(
_RESPONSE_PAYLOAD_SIZE, len(response.payload.body)
)
self.assertEqual(response_cnt, _NUM_STREAM_RESPONSES)
self.assertEqual(await call.code(), grpc.StatusCode.OK)
self.assertEqual(await call.initial_metadata(), aio.Metadata())
self.assertEqual(await call.trailing_metadata(), aio.Metadata())
self.assertEqual(await call.details(), "")
self.assertEqual(await call.debug_error_string(), "")
self.assertEqual(call.cancel(), False)
self.assertEqual(call.cancelled(), False)
self.assertEqual(call.done(), True)
interceptor.assert_in_final_state(self)
await channel.close()
async def test_intercepts_using_write_and_read(self):
for interceptor_class in (
_StreamStreamInterceptorEmpty,
_StreamStreamInterceptorWithRequestAndResponseIterator,
):
with self.subTest(name=interceptor_class):
interceptor = interceptor_class()
channel = aio.insecure_channel(
self._server_target, interceptors=[interceptor]
)
stub = test_pb2_grpc.TestServiceStub(channel)
# Prepares the request
request = messages_pb2.StreamingOutputCallRequest()
request.response_parameters.append(
messages_pb2.ResponseParameters(size=_RESPONSE_PAYLOAD_SIZE)
)
call = stub.FullDuplexCall()
for _ in range(_NUM_STREAM_RESPONSES):
await call.write(request)
response = await call.read()
self.assertIsInstance(
response, messages_pb2.StreamingOutputCallResponse
)
self.assertEqual(
_RESPONSE_PAYLOAD_SIZE, len(response.payload.body)
)
await call.done_writing()
self.assertEqual(await call.code(), grpc.StatusCode.OK)
self.assertEqual(await call.initial_metadata(), aio.Metadata())
self.assertEqual(await call.trailing_metadata(), aio.Metadata())
self.assertEqual(await call.details(), "")
self.assertEqual(await call.debug_error_string(), "")
self.assertEqual(call.cancel(), False)
self.assertEqual(call.cancelled(), False)
self.assertEqual(call.done(), True)
interceptor.assert_in_final_state(self)
await channel.close()
async def test_multiple_interceptors_request_iterator(self):
for interceptor_class in (
_StreamStreamInterceptorEmpty,
_StreamStreamInterceptorWithRequestAndResponseIterator,
):
with self.subTest(name=interceptor_class):
interceptors = [interceptor_class(), interceptor_class()]
channel = aio.insecure_channel(
self._server_target, interceptors=interceptors
)
stub = test_pb2_grpc.TestServiceStub(channel)
# Prepares the request
request = messages_pb2.StreamingOutputCallRequest()
request.response_parameters.append(
messages_pb2.ResponseParameters(size=_RESPONSE_PAYLOAD_SIZE)
)
call = stub.FullDuplexCall()
for _ in range(_NUM_STREAM_RESPONSES):
await call.write(request)
response = await call.read()
self.assertIsInstance(
response, messages_pb2.StreamingOutputCallResponse
)
self.assertEqual(
_RESPONSE_PAYLOAD_SIZE, len(response.payload.body)
)
await call.done_writing()
self.assertEqual(await call.code(), grpc.StatusCode.OK)
self.assertEqual(await call.initial_metadata(), aio.Metadata())
self.assertEqual(await call.trailing_metadata(), aio.Metadata())
self.assertEqual(await call.details(), "")
self.assertEqual(await call.debug_error_string(), "")
self.assertEqual(call.cancel(), False)
self.assertEqual(call.cancelled(), False)
self.assertEqual(call.done(), True)
for interceptor in interceptors:
interceptor.assert_in_final_state(self)
await channel.close()
if __name__ == "__main__":
logging.basicConfig(level=logging.DEBUG)
unittest.main(verbosity=2)
| 8,480
| 38.630841
| 80
|
py
|
grpc
|
grpc-master/src/python/grpcio_tests/tests_aio/unit/metadata_test.py
|
# Copyright 2020 The gRPC Authors
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Tests behavior around the metadata mechanism."""
import asyncio
import logging
import platform
import random
import unittest
import grpc
from grpc.experimental import aio
from tests_aio.unit import _common
from tests_aio.unit._test_base import AioTestBase
_TEST_CLIENT_TO_SERVER = "/test/TestClientToServer"
_TEST_SERVER_TO_CLIENT = "/test/TestServerToClient"
_TEST_TRAILING_METADATA = "/test/TestTrailingMetadata"
_TEST_ECHO_INITIAL_METADATA = "/test/TestEchoInitialMetadata"
_TEST_GENERIC_HANDLER = "/test/TestGenericHandler"
_TEST_UNARY_STREAM = "/test/TestUnaryStream"
_TEST_STREAM_UNARY = "/test/TestStreamUnary"
_TEST_STREAM_STREAM = "/test/TestStreamStream"
_TEST_INSPECT_CONTEXT = "/test/TestInspectContext"
_REQUEST = b"\x00\x00\x00"
_RESPONSE = b"\x01\x01\x01"
_INITIAL_METADATA_FROM_CLIENT_TO_SERVER = aio.Metadata(
("client-to-server", "question"),
("client-to-server-bin", b"\x07\x07\x07"),
)
_INITIAL_METADATA_FROM_SERVER_TO_CLIENT = aio.Metadata(
("server-to-client", "answer"),
("server-to-client-bin", b"\x06\x06\x06"),
)
_TRAILING_METADATA = aio.Metadata(
("a-trailing-metadata", "stack-trace"),
("a-trailing-metadata-bin", b"\x05\x05\x05"),
)
_INITIAL_METADATA_FOR_GENERIC_HANDLER = aio.Metadata(
("a-must-have-key", "secret"),
)
_INVALID_METADATA_TEST_CASES = (
(
TypeError,
((42, 42),),
),
(
TypeError,
(({}, {}),),
),
(
TypeError,
((None, {}),),
),
(
TypeError,
(({}, {}),),
),
(
TypeError,
(("normal", object()),),
),
)
_NON_OK_CODE = grpc.StatusCode.NOT_FOUND
_DETAILS = "Test details!"
class _TestGenericHandlerForMethods(grpc.GenericRpcHandler):
def __init__(self):
self._routing_table = {
_TEST_CLIENT_TO_SERVER: grpc.unary_unary_rpc_method_handler(
self._test_client_to_server
),
_TEST_SERVER_TO_CLIENT: grpc.unary_unary_rpc_method_handler(
self._test_server_to_client
),
_TEST_TRAILING_METADATA: grpc.unary_unary_rpc_method_handler(
self._test_trailing_metadata
),
_TEST_UNARY_STREAM: grpc.unary_stream_rpc_method_handler(
self._test_unary_stream
),
_TEST_STREAM_UNARY: grpc.stream_unary_rpc_method_handler(
self._test_stream_unary
),
_TEST_STREAM_STREAM: grpc.stream_stream_rpc_method_handler(
self._test_stream_stream
),
_TEST_INSPECT_CONTEXT: grpc.unary_unary_rpc_method_handler(
self._test_inspect_context
),
}
@staticmethod
async def _test_client_to_server(request, context):
assert _REQUEST == request
assert _common.seen_metadata(
_INITIAL_METADATA_FROM_CLIENT_TO_SERVER,
context.invocation_metadata(),
)
return _RESPONSE
@staticmethod
async def _test_server_to_client(request, context):
assert _REQUEST == request
await context.send_initial_metadata(
_INITIAL_METADATA_FROM_SERVER_TO_CLIENT
)
return _RESPONSE
@staticmethod
async def _test_trailing_metadata(request, context):
assert _REQUEST == request
context.set_trailing_metadata(_TRAILING_METADATA)
return _RESPONSE
@staticmethod
async def _test_unary_stream(request, context):
assert _REQUEST == request
assert _common.seen_metadata(
_INITIAL_METADATA_FROM_CLIENT_TO_SERVER,
context.invocation_metadata(),
)
await context.send_initial_metadata(
_INITIAL_METADATA_FROM_SERVER_TO_CLIENT
)
yield _RESPONSE
context.set_trailing_metadata(_TRAILING_METADATA)
@staticmethod
async def _test_stream_unary(request_iterator, context):
assert _common.seen_metadata(
_INITIAL_METADATA_FROM_CLIENT_TO_SERVER,
context.invocation_metadata(),
)
await context.send_initial_metadata(
_INITIAL_METADATA_FROM_SERVER_TO_CLIENT
)
async for request in request_iterator:
assert _REQUEST == request
context.set_trailing_metadata(_TRAILING_METADATA)
return _RESPONSE
@staticmethod
async def _test_stream_stream(request_iterator, context):
assert _common.seen_metadata(
_INITIAL_METADATA_FROM_CLIENT_TO_SERVER,
context.invocation_metadata(),
)
await context.send_initial_metadata(
_INITIAL_METADATA_FROM_SERVER_TO_CLIENT
)
async for request in request_iterator:
assert _REQUEST == request
yield _RESPONSE
context.set_trailing_metadata(_TRAILING_METADATA)
@staticmethod
async def _test_inspect_context(request, context):
assert _REQUEST == request
context.set_code(_NON_OK_CODE)
context.set_details(_DETAILS)
context.set_trailing_metadata(_TRAILING_METADATA)
# ensure that we can read back the data we set on the context
assert context.get_code() == _NON_OK_CODE
assert context.get_details() == _DETAILS
assert context.get_trailing_metadata() == _TRAILING_METADATA
return _RESPONSE
def service(self, handler_call_details):
return self._routing_table.get(handler_call_details.method)
class _TestGenericHandlerItself(grpc.GenericRpcHandler):
@staticmethod
async def _method(request, unused_context):
assert _REQUEST == request
return _RESPONSE
def service(self, handler_call_details):
assert _common.seen_metadata(
_INITIAL_METADATA_FOR_GENERIC_HANDLER,
handler_call_details.invocation_metadata,
)
return grpc.unary_unary_rpc_method_handler(self._method)
async def _start_test_server():
server = aio.server()
port = server.add_insecure_port("[::]:0")
server.add_generic_rpc_handlers(
(
_TestGenericHandlerForMethods(),
_TestGenericHandlerItself(),
)
)
await server.start()
return "localhost:%d" % port, server
class TestMetadata(AioTestBase):
async def setUp(self):
address, self._server = await _start_test_server()
self._client = aio.insecure_channel(address)
async def tearDown(self):
await self._client.close()
await self._server.stop(None)
async def test_from_client_to_server(self):
multicallable = self._client.unary_unary(_TEST_CLIENT_TO_SERVER)
call = multicallable(
_REQUEST, metadata=_INITIAL_METADATA_FROM_CLIENT_TO_SERVER
)
self.assertEqual(_RESPONSE, await call)
self.assertEqual(grpc.StatusCode.OK, await call.code())
async def test_from_server_to_client(self):
multicallable = self._client.unary_unary(_TEST_SERVER_TO_CLIENT)
call = multicallable(_REQUEST)
self.assertEqual(
_INITIAL_METADATA_FROM_SERVER_TO_CLIENT,
await call.initial_metadata(),
)
self.assertEqual(_RESPONSE, await call)
self.assertEqual(grpc.StatusCode.OK, await call.code())
async def test_trailing_metadata(self):
multicallable = self._client.unary_unary(_TEST_TRAILING_METADATA)
call = multicallable(_REQUEST)
self.assertEqual(_TRAILING_METADATA, await call.trailing_metadata())
self.assertEqual(_RESPONSE, await call)
self.assertEqual(grpc.StatusCode.OK, await call.code())
async def test_from_client_to_server_with_list(self):
multicallable = self._client.unary_unary(_TEST_CLIENT_TO_SERVER)
call = multicallable(
_REQUEST, metadata=list(_INITIAL_METADATA_FROM_CLIENT_TO_SERVER)
) # pytype: disable=wrong-arg-types
self.assertEqual(_RESPONSE, await call)
self.assertEqual(grpc.StatusCode.OK, await call.code())
@unittest.skipIf(
platform.system() == "Windows",
"https://github.com/grpc/grpc/issues/21943",
)
async def test_invalid_metadata(self):
multicallable = self._client.unary_unary(_TEST_CLIENT_TO_SERVER)
for exception_type, metadata in _INVALID_METADATA_TEST_CASES:
with self.subTest(metadata=metadata):
with self.assertRaises(exception_type):
call = multicallable(_REQUEST, metadata=metadata)
await call
async def test_generic_handler(self):
multicallable = self._client.unary_unary(_TEST_GENERIC_HANDLER)
call = multicallable(
_REQUEST, metadata=_INITIAL_METADATA_FOR_GENERIC_HANDLER
)
self.assertEqual(_RESPONSE, await call)
self.assertEqual(grpc.StatusCode.OK, await call.code())
async def test_unary_stream(self):
multicallable = self._client.unary_stream(_TEST_UNARY_STREAM)
call = multicallable(
_REQUEST, metadata=_INITIAL_METADATA_FROM_CLIENT_TO_SERVER
)
self.assertTrue(
_common.seen_metadata(
_INITIAL_METADATA_FROM_SERVER_TO_CLIENT,
await call.initial_metadata(),
)
)
self.assertSequenceEqual(
[_RESPONSE], [request async for request in call]
)
self.assertEqual(_TRAILING_METADATA, await call.trailing_metadata())
self.assertEqual(grpc.StatusCode.OK, await call.code())
async def test_stream_unary(self):
multicallable = self._client.stream_unary(_TEST_STREAM_UNARY)
call = multicallable(metadata=_INITIAL_METADATA_FROM_CLIENT_TO_SERVER)
await call.write(_REQUEST)
await call.done_writing()
self.assertTrue(
_common.seen_metadata(
_INITIAL_METADATA_FROM_SERVER_TO_CLIENT,
await call.initial_metadata(),
)
)
self.assertEqual(_RESPONSE, await call)
self.assertEqual(_TRAILING_METADATA, await call.trailing_metadata())
self.assertEqual(grpc.StatusCode.OK, await call.code())
async def test_stream_stream(self):
multicallable = self._client.stream_stream(_TEST_STREAM_STREAM)
call = multicallable(metadata=_INITIAL_METADATA_FROM_CLIENT_TO_SERVER)
await call.write(_REQUEST)
await call.done_writing()
self.assertTrue(
_common.seen_metadata(
_INITIAL_METADATA_FROM_SERVER_TO_CLIENT,
await call.initial_metadata(),
)
)
self.assertSequenceEqual(
[_RESPONSE], [request async for request in call]
)
self.assertEqual(_TRAILING_METADATA, await call.trailing_metadata())
self.assertEqual(grpc.StatusCode.OK, await call.code())
async def test_compatibility_with_tuple(self):
metadata_obj = aio.Metadata(("key", "42"), ("key-2", "value"))
self.assertEqual(metadata_obj, tuple(metadata_obj))
self.assertEqual(tuple(metadata_obj), metadata_obj)
expected_sum = tuple(metadata_obj) + (("third", "3"),)
self.assertEqual(expected_sum, metadata_obj + (("third", "3"),))
self.assertEqual(
expected_sum, metadata_obj + aio.Metadata(("third", "3"))
)
async def test_inspect_context(self):
multicallable = self._client.unary_unary(_TEST_INSPECT_CONTEXT)
call = multicallable(_REQUEST)
with self.assertRaises(grpc.RpcError) as exc_data:
await call
err = exc_data.exception
self.assertEqual(_NON_OK_CODE, err.code())
if __name__ == "__main__":
logging.basicConfig(level=logging.DEBUG)
unittest.main(verbosity=2)
| 12,361
| 32.961538
| 78
|
py
|
grpc
|
grpc-master/src/python/grpcio_tests/tests_aio/unit/abort_test.py
|
# Copyright 2019 The gRPC Authors
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import asyncio
import gc
import logging
import time
import unittest
import grpc
from grpc.experimental import aio
from tests.unit.framework.common import test_constants
from tests_aio.unit._test_base import AioTestBase
_UNARY_UNARY_ABORT = "/test/UnaryUnaryAbort"
_SUPPRESS_ABORT = "/test/SuppressAbort"
_REPLACE_ABORT = "/test/ReplaceAbort"
_ABORT_AFTER_REPLY = "/test/AbortAfterReply"
_REQUEST = b"\x00\x00\x00"
_RESPONSE = b"\x01\x01\x01"
_NUM_STREAM_RESPONSES = 5
_ABORT_CODE = grpc.StatusCode.RESOURCE_EXHAUSTED
_ABORT_DETAILS = "Phony error details"
class _GenericHandler(grpc.GenericRpcHandler):
@staticmethod
async def _unary_unary_abort(unused_request, context):
await context.abort(_ABORT_CODE, _ABORT_DETAILS)
raise RuntimeError("This line should not be executed")
@staticmethod
async def _suppress_abort(unused_request, context):
try:
await context.abort(_ABORT_CODE, _ABORT_DETAILS)
except aio.AbortError as e:
pass
return _RESPONSE
@staticmethod
async def _replace_abort(unused_request, context):
try:
await context.abort(_ABORT_CODE, _ABORT_DETAILS)
except aio.AbortError as e:
await context.abort(
grpc.StatusCode.INVALID_ARGUMENT, "Override abort!"
)
@staticmethod
async def _abort_after_reply(unused_request, context):
yield _RESPONSE
await context.abort(_ABORT_CODE, _ABORT_DETAILS)
raise RuntimeError("This line should not be executed")
def service(self, handler_details):
if handler_details.method == _UNARY_UNARY_ABORT:
return grpc.unary_unary_rpc_method_handler(self._unary_unary_abort)
if handler_details.method == _SUPPRESS_ABORT:
return grpc.unary_unary_rpc_method_handler(self._suppress_abort)
if handler_details.method == _REPLACE_ABORT:
return grpc.unary_unary_rpc_method_handler(self._replace_abort)
if handler_details.method == _ABORT_AFTER_REPLY:
return grpc.unary_stream_rpc_method_handler(self._abort_after_reply)
async def _start_test_server():
server = aio.server()
port = server.add_insecure_port("[::]:0")
server.add_generic_rpc_handlers((_GenericHandler(),))
await server.start()
return "localhost:%d" % port, server
class TestAbort(AioTestBase):
async def setUp(self):
address, self._server = await _start_test_server()
self._channel = aio.insecure_channel(address)
async def tearDown(self):
await self._channel.close()
await self._server.stop(None)
async def test_unary_unary_abort(self):
method = self._channel.unary_unary(_UNARY_UNARY_ABORT)
call = method(_REQUEST)
self.assertEqual(_ABORT_CODE, await call.code())
self.assertEqual(_ABORT_DETAILS, await call.details())
with self.assertRaises(aio.AioRpcError) as exception_context:
await call
rpc_error = exception_context.exception
self.assertEqual(_ABORT_CODE, rpc_error.code())
self.assertEqual(_ABORT_DETAILS, rpc_error.details())
async def test_suppress_abort(self):
method = self._channel.unary_unary(_SUPPRESS_ABORT)
call = method(_REQUEST)
with self.assertRaises(aio.AioRpcError) as exception_context:
await call
rpc_error = exception_context.exception
self.assertEqual(_ABORT_CODE, rpc_error.code())
self.assertEqual(_ABORT_DETAILS, rpc_error.details())
async def test_replace_abort(self):
method = self._channel.unary_unary(_REPLACE_ABORT)
call = method(_REQUEST)
with self.assertRaises(aio.AioRpcError) as exception_context:
await call
rpc_error = exception_context.exception
self.assertEqual(_ABORT_CODE, rpc_error.code())
self.assertEqual(_ABORT_DETAILS, rpc_error.details())
async def test_abort_after_reply(self):
method = self._channel.unary_stream(_ABORT_AFTER_REPLY)
call = method(_REQUEST)
with self.assertRaises(aio.AioRpcError) as exception_context:
await call.read()
await call.read()
rpc_error = exception_context.exception
self.assertEqual(_ABORT_CODE, rpc_error.code())
self.assertEqual(_ABORT_DETAILS, rpc_error.details())
self.assertEqual(_ABORT_CODE, await call.code())
self.assertEqual(_ABORT_DETAILS, await call.details())
if __name__ == "__main__":
logging.basicConfig(level=logging.DEBUG)
unittest.main(verbosity=2)
| 5,194
| 33.177632
| 80
|
py
|
grpc
|
grpc-master/src/python/grpcio_tests/tests_aio/unit/client_unary_unary_interceptor_test.py
|
# Copyright 2019 The gRPC Authors.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import asyncio
import logging
import unittest
import grpc
from grpc.experimental import aio
from src.proto.grpc.testing import messages_pb2
from src.proto.grpc.testing import test_pb2_grpc
from tests_aio.unit import _common
from tests_aio.unit import _constants
from tests_aio.unit._test_base import AioTestBase
from tests_aio.unit._test_server import _INITIAL_METADATA_KEY
from tests_aio.unit._test_server import _TRAILING_METADATA_KEY
from tests_aio.unit._test_server import start_test_server
_LOCAL_CANCEL_DETAILS_EXPECTATION = "Locally cancelled by application!"
_INITIAL_METADATA_TO_INJECT = aio.Metadata(
(_INITIAL_METADATA_KEY, "extra info"),
(_TRAILING_METADATA_KEY, b"\x13\x37"),
)
_TIMEOUT_CHECK_IF_CALLBACK_WAS_CALLED = 1.0
class TestUnaryUnaryClientInterceptor(AioTestBase):
async def setUp(self):
self._server_target, self._server = await start_test_server()
async def tearDown(self):
await self._server.stop(None)
def test_invalid_interceptor(self):
class InvalidInterceptor:
"""Just an invalid Interceptor"""
with self.assertRaises(ValueError):
aio.insecure_channel("", interceptors=[InvalidInterceptor()])
async def test_executed_right_order(self):
interceptors_executed = []
class Interceptor(aio.UnaryUnaryClientInterceptor):
"""Interceptor used for testing if the interceptor is being called"""
async def intercept_unary_unary(
self, continuation, client_call_details, request
):
interceptors_executed.append(self)
call = await continuation(client_call_details, request)
return call
interceptors = [Interceptor() for i in range(2)]
async with aio.insecure_channel(
self._server_target, interceptors=interceptors
) as channel:
multicallable = channel.unary_unary(
"/grpc.testing.TestService/UnaryCall",
request_serializer=messages_pb2.SimpleRequest.SerializeToString,
response_deserializer=messages_pb2.SimpleResponse.FromString,
)
call = multicallable(messages_pb2.SimpleRequest())
response = await call
# Check that all interceptors were executed, and were executed
# in the right order.
self.assertSequenceEqual(interceptors_executed, interceptors)
self.assertIsInstance(response, messages_pb2.SimpleResponse)
@unittest.expectedFailure
# TODO(https://github.com/grpc/grpc/issues/20144) Once metadata support is
# implemented in the client-side, this test must be implemented.
def test_modify_metadata(self):
raise NotImplementedError()
@unittest.expectedFailure
# TODO(https://github.com/grpc/grpc/issues/20532) Once credentials support is
# implemented in the client-side, this test must be implemented.
def test_modify_credentials(self):
raise NotImplementedError()
async def test_status_code_Ok(self):
class StatusCodeOkInterceptor(aio.UnaryUnaryClientInterceptor):
"""Interceptor used for observing status code Ok returned by the RPC"""
def __init__(self):
self.status_code_Ok_observed = False
async def intercept_unary_unary(
self, continuation, client_call_details, request
):
call = await continuation(client_call_details, request)
code = await call.code()
if code == grpc.StatusCode.OK:
self.status_code_Ok_observed = True
return call
interceptor = StatusCodeOkInterceptor()
async with aio.insecure_channel(
self._server_target, interceptors=[interceptor]
) as channel:
# when no error StatusCode.OK must be observed
multicallable = channel.unary_unary(
"/grpc.testing.TestService/UnaryCall",
request_serializer=messages_pb2.SimpleRequest.SerializeToString,
response_deserializer=messages_pb2.SimpleResponse.FromString,
)
await multicallable(messages_pb2.SimpleRequest())
self.assertTrue(interceptor.status_code_Ok_observed)
async def test_add_timeout(self):
class TimeoutInterceptor(aio.UnaryUnaryClientInterceptor):
"""Interceptor used for adding a timeout to the RPC"""
async def intercept_unary_unary(
self, continuation, client_call_details, request
):
new_client_call_details = aio.ClientCallDetails(
method=client_call_details.method,
timeout=_constants.UNARY_CALL_WITH_SLEEP_VALUE / 2,
metadata=client_call_details.metadata,
credentials=client_call_details.credentials,
wait_for_ready=client_call_details.wait_for_ready,
)
return await continuation(new_client_call_details, request)
interceptor = TimeoutInterceptor()
async with aio.insecure_channel(
self._server_target, interceptors=[interceptor]
) as channel:
multicallable = channel.unary_unary(
"/grpc.testing.TestService/UnaryCallWithSleep",
request_serializer=messages_pb2.SimpleRequest.SerializeToString,
response_deserializer=messages_pb2.SimpleResponse.FromString,
)
call = multicallable(messages_pb2.SimpleRequest())
with self.assertRaises(aio.AioRpcError) as exception_context:
await call
self.assertEqual(
exception_context.exception.code(),
grpc.StatusCode.DEADLINE_EXCEEDED,
)
self.assertTrue(call.done())
self.assertEqual(
grpc.StatusCode.DEADLINE_EXCEEDED, await call.code()
)
async def test_retry(self):
class RetryInterceptor(aio.UnaryUnaryClientInterceptor):
"""Simulates a Retry Interceptor which ends up by making
two RPC calls."""
def __init__(self):
self.calls = []
async def intercept_unary_unary(
self, continuation, client_call_details, request
):
new_client_call_details = aio.ClientCallDetails(
method=client_call_details.method,
timeout=_constants.UNARY_CALL_WITH_SLEEP_VALUE / 2,
metadata=client_call_details.metadata,
credentials=client_call_details.credentials,
wait_for_ready=client_call_details.wait_for_ready,
)
try:
call = await continuation(new_client_call_details, request)
await call
except grpc.RpcError:
pass
self.calls.append(call)
new_client_call_details = aio.ClientCallDetails(
method=client_call_details.method,
timeout=None,
metadata=client_call_details.metadata,
credentials=client_call_details.credentials,
wait_for_ready=client_call_details.wait_for_ready,
)
call = await continuation(new_client_call_details, request)
self.calls.append(call)
return call
interceptor = RetryInterceptor()
async with aio.insecure_channel(
self._server_target, interceptors=[interceptor]
) as channel:
multicallable = channel.unary_unary(
"/grpc.testing.TestService/UnaryCallWithSleep",
request_serializer=messages_pb2.SimpleRequest.SerializeToString,
response_deserializer=messages_pb2.SimpleResponse.FromString,
)
call = multicallable(messages_pb2.SimpleRequest())
await call
self.assertEqual(grpc.StatusCode.OK, await call.code())
# Check that two calls were made, first one finishing with
# a deadline and second one finishing ok..
self.assertEqual(len(interceptor.calls), 2)
self.assertEqual(
await interceptor.calls[0].code(),
grpc.StatusCode.DEADLINE_EXCEEDED,
)
self.assertEqual(
await interceptor.calls[1].code(), grpc.StatusCode.OK
)
async def test_retry_with_multiple_interceptors(self):
class RetryInterceptor(aio.UnaryUnaryClientInterceptor):
async def intercept_unary_unary(
self, continuation, client_call_details, request
):
# Simulate retry twice
for _ in range(2):
call = await continuation(client_call_details, request)
result = await call
return result
class AnotherInterceptor(aio.UnaryUnaryClientInterceptor):
def __init__(self):
self.called_times = 0
async def intercept_unary_unary(
self, continuation, client_call_details, request
):
self.called_times += 1
call = await continuation(client_call_details, request)
result = await call
return result
# Create two interceptors, the retry interceptor will call another interceptor.
retry_interceptor = RetryInterceptor()
another_interceptor = AnotherInterceptor()
async with aio.insecure_channel(
self._server_target,
interceptors=[retry_interceptor, another_interceptor],
) as channel:
multicallable = channel.unary_unary(
"/grpc.testing.TestService/UnaryCallWithSleep",
request_serializer=messages_pb2.SimpleRequest.SerializeToString,
response_deserializer=messages_pb2.SimpleResponse.FromString,
)
call = multicallable(messages_pb2.SimpleRequest())
await call
self.assertEqual(grpc.StatusCode.OK, await call.code())
self.assertEqual(another_interceptor.called_times, 2)
async def test_rpcresponse(self):
class Interceptor(aio.UnaryUnaryClientInterceptor):
"""Raw responses are seen as reegular calls"""
async def intercept_unary_unary(
self, continuation, client_call_details, request
):
call = await continuation(client_call_details, request)
response = await call
return call
class ResponseInterceptor(aio.UnaryUnaryClientInterceptor):
"""Return a raw response"""
response = messages_pb2.SimpleResponse()
async def intercept_unary_unary(
self, continuation, client_call_details, request
):
return ResponseInterceptor.response
interceptor, interceptor_response = Interceptor(), ResponseInterceptor()
async with aio.insecure_channel(
self._server_target,
interceptors=[interceptor, interceptor_response],
) as channel:
multicallable = channel.unary_unary(
"/grpc.testing.TestService/UnaryCall",
request_serializer=messages_pb2.SimpleRequest.SerializeToString,
response_deserializer=messages_pb2.SimpleResponse.FromString,
)
call = multicallable(messages_pb2.SimpleRequest())
response = await call
# Check that the response returned is the one returned by the
# interceptor
self.assertEqual(id(response), id(ResponseInterceptor.response))
# Check all of the UnaryUnaryCallResponse attributes
self.assertTrue(call.done())
self.assertFalse(call.cancel())
self.assertFalse(call.cancelled())
self.assertEqual(await call.code(), grpc.StatusCode.OK)
self.assertEqual(await call.details(), "")
self.assertEqual(await call.initial_metadata(), None)
self.assertEqual(await call.trailing_metadata(), None)
self.assertEqual(await call.debug_error_string(), None)
class TestInterceptedUnaryUnaryCall(AioTestBase):
async def setUp(self):
self._server_target, self._server = await start_test_server()
async def tearDown(self):
await self._server.stop(None)
async def test_call_ok(self):
class Interceptor(aio.UnaryUnaryClientInterceptor):
async def intercept_unary_unary(
self, continuation, client_call_details, request
):
call = await continuation(client_call_details, request)
return call
async with aio.insecure_channel(
self._server_target, interceptors=[Interceptor()]
) as channel:
multicallable = channel.unary_unary(
"/grpc.testing.TestService/UnaryCall",
request_serializer=messages_pb2.SimpleRequest.SerializeToString,
response_deserializer=messages_pb2.SimpleResponse.FromString,
)
call = multicallable(messages_pb2.SimpleRequest())
response = await call
self.assertTrue(call.done())
self.assertFalse(call.cancelled())
self.assertEqual(type(response), messages_pb2.SimpleResponse)
self.assertEqual(await call.code(), grpc.StatusCode.OK)
self.assertEqual(await call.details(), "")
self.assertEqual(await call.initial_metadata(), aio.Metadata())
self.assertEqual(await call.trailing_metadata(), aio.Metadata())
async def test_call_ok_awaited(self):
class Interceptor(aio.UnaryUnaryClientInterceptor):
async def intercept_unary_unary(
self, continuation, client_call_details, request
):
call = await continuation(client_call_details, request)
await call
return call
async with aio.insecure_channel(
self._server_target, interceptors=[Interceptor()]
) as channel:
multicallable = channel.unary_unary(
"/grpc.testing.TestService/UnaryCall",
request_serializer=messages_pb2.SimpleRequest.SerializeToString,
response_deserializer=messages_pb2.SimpleResponse.FromString,
)
call = multicallable(messages_pb2.SimpleRequest())
response = await call
self.assertTrue(call.done())
self.assertFalse(call.cancelled())
self.assertEqual(type(response), messages_pb2.SimpleResponse)
self.assertEqual(await call.code(), grpc.StatusCode.OK)
self.assertEqual(await call.details(), "")
self.assertEqual(await call.initial_metadata(), aio.Metadata())
self.assertEqual(await call.trailing_metadata(), aio.Metadata())
async def test_call_rpc_error(self):
class Interceptor(aio.UnaryUnaryClientInterceptor):
async def intercept_unary_unary(
self, continuation, client_call_details, request
):
call = await continuation(client_call_details, request)
return call
async with aio.insecure_channel(
self._server_target, interceptors=[Interceptor()]
) as channel:
multicallable = channel.unary_unary(
"/grpc.testing.TestService/UnaryCallWithSleep",
request_serializer=messages_pb2.SimpleRequest.SerializeToString,
response_deserializer=messages_pb2.SimpleResponse.FromString,
)
call = multicallable(
messages_pb2.SimpleRequest(),
timeout=_constants.UNARY_CALL_WITH_SLEEP_VALUE / 2,
)
with self.assertRaises(aio.AioRpcError) as exception_context:
await call
self.assertTrue(call.done())
self.assertFalse(call.cancelled())
self.assertEqual(
await call.code(), grpc.StatusCode.DEADLINE_EXCEEDED
)
self.assertEqual(await call.details(), "Deadline Exceeded")
self.assertEqual(await call.initial_metadata(), aio.Metadata())
self.assertEqual(await call.trailing_metadata(), aio.Metadata())
async def test_call_rpc_error_awaited(self):
class Interceptor(aio.UnaryUnaryClientInterceptor):
async def intercept_unary_unary(
self, continuation, client_call_details, request
):
call = await continuation(client_call_details, request)
await call
return call
async with aio.insecure_channel(
self._server_target, interceptors=[Interceptor()]
) as channel:
multicallable = channel.unary_unary(
"/grpc.testing.TestService/UnaryCallWithSleep",
request_serializer=messages_pb2.SimpleRequest.SerializeToString,
response_deserializer=messages_pb2.SimpleResponse.FromString,
)
call = multicallable(
messages_pb2.SimpleRequest(),
timeout=_constants.UNARY_CALL_WITH_SLEEP_VALUE / 2,
)
with self.assertRaises(aio.AioRpcError) as exception_context:
await call
self.assertTrue(call.done())
self.assertFalse(call.cancelled())
self.assertEqual(
await call.code(), grpc.StatusCode.DEADLINE_EXCEEDED
)
self.assertEqual(await call.details(), "Deadline Exceeded")
self.assertEqual(await call.initial_metadata(), aio.Metadata())
self.assertEqual(await call.trailing_metadata(), aio.Metadata())
async def test_cancel_before_rpc(self):
interceptor_reached = asyncio.Event()
wait_for_ever = self.loop.create_future()
class Interceptor(aio.UnaryUnaryClientInterceptor):
async def intercept_unary_unary(
self, continuation, client_call_details, request
):
interceptor_reached.set()
await wait_for_ever
async with aio.insecure_channel(
self._server_target, interceptors=[Interceptor()]
) as channel:
multicallable = channel.unary_unary(
"/grpc.testing.TestService/UnaryCall",
request_serializer=messages_pb2.SimpleRequest.SerializeToString,
response_deserializer=messages_pb2.SimpleResponse.FromString,
)
call = multicallable(messages_pb2.SimpleRequest())
self.assertFalse(call.cancelled())
self.assertFalse(call.done())
await interceptor_reached.wait()
self.assertTrue(call.cancel())
with self.assertRaises(asyncio.CancelledError):
await call
self.assertTrue(call.cancelled())
self.assertTrue(call.done())
self.assertEqual(await call.code(), grpc.StatusCode.CANCELLED)
self.assertEqual(
await call.details(), _LOCAL_CANCEL_DETAILS_EXPECTATION
)
self.assertEqual(await call.initial_metadata(), None)
self.assertEqual(await call.trailing_metadata(), None)
async def test_cancel_after_rpc(self):
interceptor_reached = asyncio.Event()
wait_for_ever = self.loop.create_future()
class Interceptor(aio.UnaryUnaryClientInterceptor):
async def intercept_unary_unary(
self, continuation, client_call_details, request
):
call = await continuation(client_call_details, request)
await call
interceptor_reached.set()
await wait_for_ever
async with aio.insecure_channel(
self._server_target, interceptors=[Interceptor()]
) as channel:
multicallable = channel.unary_unary(
"/grpc.testing.TestService/UnaryCall",
request_serializer=messages_pb2.SimpleRequest.SerializeToString,
response_deserializer=messages_pb2.SimpleResponse.FromString,
)
call = multicallable(messages_pb2.SimpleRequest())
self.assertFalse(call.cancelled())
self.assertFalse(call.done())
await interceptor_reached.wait()
self.assertTrue(call.cancel())
with self.assertRaises(asyncio.CancelledError):
await call
self.assertTrue(call.cancelled())
self.assertTrue(call.done())
self.assertEqual(await call.code(), grpc.StatusCode.CANCELLED)
self.assertEqual(
await call.details(), _LOCAL_CANCEL_DETAILS_EXPECTATION
)
self.assertEqual(await call.initial_metadata(), None)
self.assertEqual(await call.trailing_metadata(), None)
async def test_cancel_inside_interceptor_after_rpc_awaiting(self):
class Interceptor(aio.UnaryUnaryClientInterceptor):
async def intercept_unary_unary(
self, continuation, client_call_details, request
):
call = await continuation(client_call_details, request)
call.cancel()
await call
return call
async with aio.insecure_channel(
self._server_target, interceptors=[Interceptor()]
) as channel:
multicallable = channel.unary_unary(
"/grpc.testing.TestService/UnaryCall",
request_serializer=messages_pb2.SimpleRequest.SerializeToString,
response_deserializer=messages_pb2.SimpleResponse.FromString,
)
call = multicallable(messages_pb2.SimpleRequest())
with self.assertRaises(asyncio.CancelledError):
await call
self.assertTrue(call.cancelled())
self.assertTrue(call.done())
self.assertEqual(await call.code(), grpc.StatusCode.CANCELLED)
self.assertEqual(
await call.details(), _LOCAL_CANCEL_DETAILS_EXPECTATION
)
self.assertEqual(await call.initial_metadata(), None)
self.assertEqual(await call.trailing_metadata(), None)
async def test_cancel_inside_interceptor_after_rpc_not_awaiting(self):
class Interceptor(aio.UnaryUnaryClientInterceptor):
async def intercept_unary_unary(
self, continuation, client_call_details, request
):
call = await continuation(client_call_details, request)
call.cancel()
return call
async with aio.insecure_channel(
self._server_target, interceptors=[Interceptor()]
) as channel:
multicallable = channel.unary_unary(
"/grpc.testing.TestService/UnaryCall",
request_serializer=messages_pb2.SimpleRequest.SerializeToString,
response_deserializer=messages_pb2.SimpleResponse.FromString,
)
call = multicallable(messages_pb2.SimpleRequest())
with self.assertRaises(asyncio.CancelledError):
await call
self.assertTrue(call.cancelled())
self.assertTrue(call.done())
self.assertEqual(await call.code(), grpc.StatusCode.CANCELLED)
self.assertEqual(
await call.details(), _LOCAL_CANCEL_DETAILS_EXPECTATION
)
self.assertEqual(await call.initial_metadata(), aio.Metadata())
self.assertEqual(
await call.trailing_metadata(),
aio.Metadata(),
"When the raw response is None, empty metadata is returned",
)
async def test_initial_metadata_modification(self):
class Interceptor(aio.UnaryUnaryClientInterceptor):
async def intercept_unary_unary(
self, continuation, client_call_details, request
):
new_metadata = aio.Metadata(
*client_call_details.metadata, *_INITIAL_METADATA_TO_INJECT
)
new_details = aio.ClientCallDetails(
method=client_call_details.method,
timeout=client_call_details.timeout,
metadata=new_metadata,
credentials=client_call_details.credentials,
wait_for_ready=client_call_details.wait_for_ready,
)
return await continuation(new_details, request)
async with aio.insecure_channel(
self._server_target, interceptors=[Interceptor()]
) as channel:
stub = test_pb2_grpc.TestServiceStub(channel)
call = stub.UnaryCall(messages_pb2.SimpleRequest())
# Expected to see the echoed initial metadata
self.assertTrue(
_common.seen_metadatum(
expected_key=_INITIAL_METADATA_KEY,
expected_value=_INITIAL_METADATA_TO_INJECT[
_INITIAL_METADATA_KEY
],
actual=await call.initial_metadata(),
)
)
# Expected to see the echoed trailing metadata
self.assertTrue(
_common.seen_metadatum(
expected_key=_TRAILING_METADATA_KEY,
expected_value=_INITIAL_METADATA_TO_INJECT[
_TRAILING_METADATA_KEY
],
actual=await call.trailing_metadata(),
)
)
self.assertEqual(await call.code(), grpc.StatusCode.OK)
async def test_add_done_callback_before_finishes(self):
called = asyncio.Event()
interceptor_can_continue = asyncio.Event()
def callback(call):
called.set()
class Interceptor(aio.UnaryUnaryClientInterceptor):
async def intercept_unary_unary(
self, continuation, client_call_details, request
):
await interceptor_can_continue.wait()
call = await continuation(client_call_details, request)
return call
async with aio.insecure_channel(
self._server_target, interceptors=[Interceptor()]
) as channel:
multicallable = channel.unary_unary(
"/grpc.testing.TestService/UnaryCall",
request_serializer=messages_pb2.SimpleRequest.SerializeToString,
response_deserializer=messages_pb2.SimpleResponse.FromString,
)
call = multicallable(messages_pb2.SimpleRequest())
call.add_done_callback(callback)
interceptor_can_continue.set()
await call
try:
await asyncio.wait_for(
called.wait(), timeout=_TIMEOUT_CHECK_IF_CALLBACK_WAS_CALLED
)
except:
self.fail("Callback was not called")
async def test_add_done_callback_after_finishes(self):
called = asyncio.Event()
def callback(call):
called.set()
class Interceptor(aio.UnaryUnaryClientInterceptor):
async def intercept_unary_unary(
self, continuation, client_call_details, request
):
call = await continuation(client_call_details, request)
return call
async with aio.insecure_channel(
self._server_target, interceptors=[Interceptor()]
) as channel:
multicallable = channel.unary_unary(
"/grpc.testing.TestService/UnaryCall",
request_serializer=messages_pb2.SimpleRequest.SerializeToString,
response_deserializer=messages_pb2.SimpleResponse.FromString,
)
call = multicallable(messages_pb2.SimpleRequest())
await call
call.add_done_callback(callback)
try:
await asyncio.wait_for(
called.wait(), timeout=_TIMEOUT_CHECK_IF_CALLBACK_WAS_CALLED
)
except:
self.fail("Callback was not called")
async def test_add_done_callback_after_finishes_before_await(self):
called = asyncio.Event()
def callback(call):
called.set()
class Interceptor(aio.UnaryUnaryClientInterceptor):
async def intercept_unary_unary(
self, continuation, client_call_details, request
):
call = await continuation(client_call_details, request)
return call
async with aio.insecure_channel(
self._server_target, interceptors=[Interceptor()]
) as channel:
multicallable = channel.unary_unary(
"/grpc.testing.TestService/UnaryCall",
request_serializer=messages_pb2.SimpleRequest.SerializeToString,
response_deserializer=messages_pb2.SimpleResponse.FromString,
)
call = multicallable(messages_pb2.SimpleRequest())
call.add_done_callback(callback)
await call
try:
await asyncio.wait_for(
called.wait(), timeout=_TIMEOUT_CHECK_IF_CALLBACK_WAS_CALLED
)
except:
self.fail("Callback was not called")
if __name__ == "__main__":
logging.basicConfig(level=logging.DEBUG)
unittest.main(verbosity=2)
| 30,604
| 39.164042
| 87
|
py
|
grpc
|
grpc-master/src/python/grpcio_tests/tests_aio/unit/wait_for_ready_test.py
|
# Copyright 2020 The gRPC Authors
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Testing the done callbacks mechanism."""
import asyncio
import gc
import logging
import platform
import time
import unittest
import grpc
from grpc.experimental import aio
from src.proto.grpc.testing import messages_pb2
from src.proto.grpc.testing import test_pb2_grpc
from tests.unit.framework.common import get_socket
from tests.unit.framework.common import test_constants
from tests_aio.unit import _common
from tests_aio.unit._test_base import AioTestBase
from tests_aio.unit._test_server import start_test_server
_NUM_STREAM_RESPONSES = 5
_REQUEST_PAYLOAD_SIZE = 7
_RESPONSE_PAYLOAD_SIZE = 42
async def _perform_unary_unary(stub, wait_for_ready):
await stub.UnaryCall(
messages_pb2.SimpleRequest(),
timeout=test_constants.LONG_TIMEOUT,
wait_for_ready=wait_for_ready,
)
async def _perform_unary_stream(stub, wait_for_ready):
request = messages_pb2.StreamingOutputCallRequest()
for _ in range(_NUM_STREAM_RESPONSES):
request.response_parameters.append(
messages_pb2.ResponseParameters(size=_RESPONSE_PAYLOAD_SIZE)
)
call = stub.StreamingOutputCall(
request,
timeout=test_constants.LONG_TIMEOUT,
wait_for_ready=wait_for_ready,
)
for _ in range(_NUM_STREAM_RESPONSES):
await call.read()
assert await call.code() == grpc.StatusCode.OK
async def _perform_stream_unary(stub, wait_for_ready):
payload = messages_pb2.Payload(body=b"\0" * _REQUEST_PAYLOAD_SIZE)
request = messages_pb2.StreamingInputCallRequest(payload=payload)
async def gen():
for _ in range(_NUM_STREAM_RESPONSES):
yield request
await stub.StreamingInputCall(
gen(),
timeout=test_constants.LONG_TIMEOUT,
wait_for_ready=wait_for_ready,
)
async def _perform_stream_stream(stub, wait_for_ready):
call = stub.FullDuplexCall(
timeout=test_constants.LONG_TIMEOUT, wait_for_ready=wait_for_ready
)
request = messages_pb2.StreamingOutputCallRequest()
request.response_parameters.append(
messages_pb2.ResponseParameters(size=_RESPONSE_PAYLOAD_SIZE)
)
for _ in range(_NUM_STREAM_RESPONSES):
await call.write(request)
response = await call.read()
assert _RESPONSE_PAYLOAD_SIZE == len(response.payload.body)
await call.done_writing()
assert await call.code() == grpc.StatusCode.OK
_RPC_ACTIONS = (
_perform_unary_unary,
_perform_unary_stream,
_perform_stream_unary,
_perform_stream_stream,
)
class TestWaitForReady(AioTestBase):
async def setUp(self):
address, self._port, self._socket = get_socket(listen=False)
self._channel = aio.insecure_channel(f"{address}:{self._port}")
self._stub = test_pb2_grpc.TestServiceStub(self._channel)
self._socket.close()
async def tearDown(self):
await self._channel.close()
async def _connection_fails_fast(self, wait_for_ready):
for action in _RPC_ACTIONS:
with self.subTest(name=action):
with self.assertRaises(aio.AioRpcError) as exception_context:
await action(self._stub, wait_for_ready)
rpc_error = exception_context.exception
self.assertEqual(grpc.StatusCode.UNAVAILABLE, rpc_error.code())
async def test_call_wait_for_ready_default(self):
"""RPC should fail immediately after connection failed."""
await self._connection_fails_fast(None)
async def test_call_wait_for_ready_disabled(self):
"""RPC should fail immediately after connection failed."""
await self._connection_fails_fast(False)
@unittest.skipIf(
platform.system() == "Windows",
"https://github.com/grpc/grpc/pull/26729",
)
async def test_call_wait_for_ready_enabled(self):
"""RPC will wait until the connection is ready."""
for action in _RPC_ACTIONS:
with self.subTest(name=action.__name__):
# Starts the RPC
action_task = self.loop.create_task(action(self._stub, True))
# Wait for TRANSIENT_FAILURE, and RPC is not aborting
await _common.block_until_certain_state(
self._channel, grpc.ChannelConnectivity.TRANSIENT_FAILURE
)
try:
# Start the server
_, server = await start_test_server(port=self._port)
# The RPC should recover itself
await action_task
finally:
if server is not None:
await server.stop(None)
if __name__ == "__main__":
logging.basicConfig(level=logging.DEBUG)
unittest.main(verbosity=2)
| 5,344
| 31.791411
| 79
|
py
|
grpc
|
grpc-master/src/python/grpcio_tests/tests_aio/unit/_metadata_test.py
|
# Copyright 2020 gRPC authors.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Tests for the metadata abstraction that's used in the asynchronous driver."""
import logging
import unittest
from grpc.experimental.aio import Metadata
class TestTypeMetadata(unittest.TestCase):
"""Tests for the metadata type"""
_DEFAULT_DATA = (("key1", "value1"), ("key2", "value2"))
_MULTI_ENTRY_DATA = (
("key1", "value1"),
("key1", "other value 1"),
("key2", "value2"),
)
def test_init_metadata(self):
test_cases = {
"emtpy": (),
"with-single-data": self._DEFAULT_DATA,
"with-multi-data": self._MULTI_ENTRY_DATA,
}
for case, args in test_cases.items():
with self.subTest(case=case):
metadata = Metadata(*args)
self.assertEqual(len(metadata), len(args))
def test_get_item(self):
metadata = Metadata(
("key", "value1"), ("key", "value2"), ("key2", "other value")
)
self.assertEqual(metadata["key"], "value1")
self.assertEqual(metadata["key2"], "other value")
self.assertEqual(metadata.get("key"), "value1")
self.assertEqual(metadata.get("key2"), "other value")
with self.assertRaises(KeyError):
metadata["key not found"]
self.assertIsNone(metadata.get("key not found"))
def test_add_value(self):
metadata = Metadata()
metadata.add("key", "value")
metadata.add("key", "second value")
metadata.add("key2", "value2")
self.assertEqual(metadata["key"], "value")
self.assertEqual(metadata["key2"], "value2")
def test_get_all_items(self):
metadata = Metadata(*self._MULTI_ENTRY_DATA)
self.assertEqual(metadata.get_all("key1"), ["value1", "other value 1"])
self.assertEqual(metadata.get_all("key2"), ["value2"])
self.assertEqual(metadata.get_all("non existing key"), [])
def test_container(self):
metadata = Metadata(*self._MULTI_ENTRY_DATA)
self.assertIn("key1", metadata)
def test_equals(self):
metadata = Metadata()
for key, value in self._DEFAULT_DATA:
metadata.add(key, value)
metadata2 = Metadata(*self._DEFAULT_DATA)
self.assertEqual(metadata, metadata2)
self.assertNotEqual(metadata, "foo")
def test_repr(self):
metadata = Metadata(*self._DEFAULT_DATA)
expected = "Metadata({0!r})".format(self._DEFAULT_DATA)
self.assertEqual(repr(metadata), expected)
def test_set(self):
metadata = Metadata(*self._MULTI_ENTRY_DATA)
override_value = "override value"
for _ in range(3):
metadata["key1"] = override_value
self.assertEqual(metadata["key1"], override_value)
self.assertEqual(
metadata.get_all("key1"), [override_value, "other value 1"]
)
empty_metadata = Metadata()
for _ in range(3):
empty_metadata["key"] = override_value
self.assertEqual(empty_metadata["key"], override_value)
self.assertEqual(empty_metadata.get_all("key"), [override_value])
def test_set_all(self):
metadata = Metadata(*self._DEFAULT_DATA)
metadata.set_all("key", ["value1", b"new value 2"])
self.assertEqual(metadata["key"], "value1")
self.assertEqual(metadata.get_all("key"), ["value1", b"new value 2"])
def test_delete_values(self):
metadata = Metadata(*self._MULTI_ENTRY_DATA)
del metadata["key1"]
self.assertEqual(metadata.get("key1"), "other value 1")
metadata.delete_all("key1")
self.assertNotIn("key1", metadata)
metadata.delete_all("key2")
self.assertEqual(len(metadata), 0)
with self.assertRaises(KeyError):
del metadata["other key"]
def test_metadata_from_tuple(self):
scenarios = (
(None, Metadata()),
(Metadata(), Metadata()),
(self._DEFAULT_DATA, Metadata(*self._DEFAULT_DATA)),
(self._MULTI_ENTRY_DATA, Metadata(*self._MULTI_ENTRY_DATA)),
(Metadata(*self._DEFAULT_DATA), Metadata(*self._DEFAULT_DATA)),
)
for source, expected in scenarios:
with self.subTest(raw_metadata=source, expected=expected):
self.assertEqual(expected, Metadata.from_tuple(source))
if __name__ == "__main__":
logging.basicConfig()
unittest.main(verbosity=2)
| 5,016
| 34.083916
| 80
|
py
|
grpc
|
grpc-master/src/python/grpcio_tests/tests_aio/unit/server_time_remaining_test.py
|
# Copyright 2021 The gRPC Authors
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Test the time_remaining() method of async ServicerContext."""
import asyncio
import datetime
import logging
import unittest
import grpc
from grpc import aio
from tests_aio.unit._common import ADHOC_METHOD
from tests_aio.unit._common import AdhocGenericHandler
from tests_aio.unit._test_base import AioTestBase
_REQUEST = b"\x09\x05"
_REQUEST_TIMEOUT_S = datetime.timedelta(seconds=5).total_seconds()
class TestServerTimeRemaining(AioTestBase):
async def setUp(self):
# Create async server
self._server = aio.server(options=(("grpc.so_reuseport", 0),))
self._adhoc_handlers = AdhocGenericHandler()
self._server.add_generic_rpc_handlers((self._adhoc_handlers,))
port = self._server.add_insecure_port("[::]:0")
address = "localhost:%d" % port
await self._server.start()
# Create async channel
self._channel = aio.insecure_channel(address)
async def tearDown(self):
await self._channel.close()
await self._server.stop(None)
async def test_servicer_context_time_remaining(self):
seen_time_remaining = []
@grpc.unary_unary_rpc_method_handler
def log_time_remaining(
request: bytes, context: grpc.ServicerContext
) -> bytes:
seen_time_remaining.append(context.time_remaining())
return b""
# Check if the deadline propagates properly
self._adhoc_handlers.set_adhoc_handler(log_time_remaining)
await self._channel.unary_unary(ADHOC_METHOD)(
_REQUEST, timeout=_REQUEST_TIMEOUT_S
)
self.assertGreater(seen_time_remaining[0], _REQUEST_TIMEOUT_S / 2)
# Check if there is no timeout, the time_remaining will be None
self._adhoc_handlers.set_adhoc_handler(log_time_remaining)
await self._channel.unary_unary(ADHOC_METHOD)(_REQUEST)
self.assertIsNone(seen_time_remaining[1])
if __name__ == "__main__":
logging.basicConfig(level=logging.DEBUG)
unittest.main(verbosity=2)
| 2,609
| 34.753425
| 74
|
py
|
grpc
|
grpc-master/src/python/grpcio_tests/tests_aio/unit/client_unary_stream_interceptor_test.py
|
# Copyright 2020 The gRPC Authors.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import asyncio
import datetime
import logging
import unittest
import grpc
from grpc.experimental import aio
from src.proto.grpc.testing import messages_pb2
from src.proto.grpc.testing import test_pb2_grpc
from tests.unit.framework.common import test_constants
from tests_aio.unit._common import CountingResponseIterator
from tests_aio.unit._common import inject_callbacks
from tests_aio.unit._constants import UNREACHABLE_TARGET
from tests_aio.unit._test_base import AioTestBase
from tests_aio.unit._test_server import start_test_server
_SHORT_TIMEOUT_S = 1.0
_NUM_STREAM_RESPONSES = 5
_REQUEST_PAYLOAD_SIZE = 7
_RESPONSE_PAYLOAD_SIZE = 7
_RESPONSE_INTERVAL_US = int(_SHORT_TIMEOUT_S * 1000 * 1000)
class _UnaryStreamInterceptorEmpty(aio.UnaryStreamClientInterceptor):
async def intercept_unary_stream(
self, continuation, client_call_details, request
):
return await continuation(client_call_details, request)
def assert_in_final_state(self, test: unittest.TestCase):
pass
class _UnaryStreamInterceptorWithResponseIterator(
aio.UnaryStreamClientInterceptor
):
async def intercept_unary_stream(
self, continuation, client_call_details, request
):
call = await continuation(client_call_details, request)
self.response_iterator = CountingResponseIterator(call)
return self.response_iterator
def assert_in_final_state(self, test: unittest.TestCase):
test.assertEqual(
_NUM_STREAM_RESPONSES, self.response_iterator.response_cnt
)
class TestUnaryStreamClientInterceptor(AioTestBase):
async def setUp(self):
self._server_target, self._server = await start_test_server()
async def tearDown(self):
await self._server.stop(None)
async def test_intercepts(self):
for interceptor_class in (
_UnaryStreamInterceptorEmpty,
_UnaryStreamInterceptorWithResponseIterator,
):
with self.subTest(name=interceptor_class):
interceptor = interceptor_class()
request = messages_pb2.StreamingOutputCallRequest()
request.response_parameters.extend(
[
messages_pb2.ResponseParameters(
size=_RESPONSE_PAYLOAD_SIZE
)
]
* _NUM_STREAM_RESPONSES
)
channel = aio.insecure_channel(
self._server_target, interceptors=[interceptor]
)
stub = test_pb2_grpc.TestServiceStub(channel)
call = stub.StreamingOutputCall(request)
await call.wait_for_connection()
response_cnt = 0
async for response in call:
response_cnt += 1
self.assertIs(
type(response), messages_pb2.StreamingOutputCallResponse
)
self.assertEqual(
_RESPONSE_PAYLOAD_SIZE, len(response.payload.body)
)
self.assertEqual(response_cnt, _NUM_STREAM_RESPONSES)
self.assertEqual(await call.code(), grpc.StatusCode.OK)
self.assertEqual(await call.initial_metadata(), aio.Metadata())
self.assertEqual(await call.trailing_metadata(), aio.Metadata())
self.assertEqual(await call.details(), "")
self.assertEqual(await call.debug_error_string(), "")
self.assertEqual(call.cancel(), False)
self.assertEqual(call.cancelled(), False)
self.assertEqual(call.done(), True)
interceptor.assert_in_final_state(self)
await channel.close()
async def test_add_done_callback_interceptor_task_not_finished(self):
for interceptor_class in (
_UnaryStreamInterceptorEmpty,
_UnaryStreamInterceptorWithResponseIterator,
):
with self.subTest(name=interceptor_class):
interceptor = interceptor_class()
request = messages_pb2.StreamingOutputCallRequest()
request.response_parameters.extend(
[
messages_pb2.ResponseParameters(
size=_RESPONSE_PAYLOAD_SIZE
)
]
* _NUM_STREAM_RESPONSES
)
channel = aio.insecure_channel(
self._server_target, interceptors=[interceptor]
)
stub = test_pb2_grpc.TestServiceStub(channel)
call = stub.StreamingOutputCall(request)
validation = inject_callbacks(call)
async for response in call:
pass
await validation
await channel.close()
async def test_add_done_callback_interceptor_task_finished(self):
for interceptor_class in (
_UnaryStreamInterceptorEmpty,
_UnaryStreamInterceptorWithResponseIterator,
):
with self.subTest(name=interceptor_class):
interceptor = interceptor_class()
request = messages_pb2.StreamingOutputCallRequest()
request.response_parameters.extend(
[
messages_pb2.ResponseParameters(
size=_RESPONSE_PAYLOAD_SIZE
)
]
* _NUM_STREAM_RESPONSES
)
channel = aio.insecure_channel(
self._server_target, interceptors=[interceptor]
)
stub = test_pb2_grpc.TestServiceStub(channel)
call = stub.StreamingOutputCall(request)
# This ensures that the callbacks will be registered
# with the intercepted call rather than saving in the
# pending state list.
await call.wait_for_connection()
validation = inject_callbacks(call)
async for response in call:
pass
await validation
await channel.close()
async def test_response_iterator_using_read(self):
interceptor = _UnaryStreamInterceptorWithResponseIterator()
channel = aio.insecure_channel(
self._server_target, interceptors=[interceptor]
)
stub = test_pb2_grpc.TestServiceStub(channel)
request = messages_pb2.StreamingOutputCallRequest()
request.response_parameters.extend(
[messages_pb2.ResponseParameters(size=_RESPONSE_PAYLOAD_SIZE)]
* _NUM_STREAM_RESPONSES
)
call = stub.StreamingOutputCall(request)
response_cnt = 0
for response in range(_NUM_STREAM_RESPONSES):
response = await call.read()
response_cnt += 1
self.assertIs(
type(response), messages_pb2.StreamingOutputCallResponse
)
self.assertEqual(_RESPONSE_PAYLOAD_SIZE, len(response.payload.body))
self.assertEqual(response_cnt, _NUM_STREAM_RESPONSES)
self.assertEqual(
interceptor.response_iterator.response_cnt, _NUM_STREAM_RESPONSES
)
self.assertEqual(await call.code(), grpc.StatusCode.OK)
await channel.close()
async def test_multiple_interceptors_response_iterator(self):
for interceptor_class in (
_UnaryStreamInterceptorEmpty,
_UnaryStreamInterceptorWithResponseIterator,
):
with self.subTest(name=interceptor_class):
interceptors = [interceptor_class(), interceptor_class()]
channel = aio.insecure_channel(
self._server_target, interceptors=interceptors
)
stub = test_pb2_grpc.TestServiceStub(channel)
request = messages_pb2.StreamingOutputCallRequest()
request.response_parameters.extend(
[
messages_pb2.ResponseParameters(
size=_RESPONSE_PAYLOAD_SIZE
)
]
* _NUM_STREAM_RESPONSES
)
call = stub.StreamingOutputCall(request)
response_cnt = 0
async for response in call:
response_cnt += 1
self.assertIs(
type(response), messages_pb2.StreamingOutputCallResponse
)
self.assertEqual(
_RESPONSE_PAYLOAD_SIZE, len(response.payload.body)
)
self.assertEqual(response_cnt, _NUM_STREAM_RESPONSES)
self.assertEqual(await call.code(), grpc.StatusCode.OK)
await channel.close()
async def test_intercepts_response_iterator_rpc_error(self):
for interceptor_class in (
_UnaryStreamInterceptorEmpty,
_UnaryStreamInterceptorWithResponseIterator,
):
with self.subTest(name=interceptor_class):
channel = aio.insecure_channel(
UNREACHABLE_TARGET, interceptors=[interceptor_class()]
)
request = messages_pb2.StreamingOutputCallRequest()
stub = test_pb2_grpc.TestServiceStub(channel)
call = stub.StreamingOutputCall(request)
with self.assertRaises(aio.AioRpcError) as exception_context:
async for response in call:
pass
self.assertEqual(
grpc.StatusCode.UNAVAILABLE,
exception_context.exception.code(),
)
self.assertTrue(call.done())
self.assertEqual(grpc.StatusCode.UNAVAILABLE, await call.code())
await channel.close()
async def test_cancel_before_rpc(self):
interceptor_reached = asyncio.Event()
wait_for_ever = self.loop.create_future()
class Interceptor(aio.UnaryStreamClientInterceptor):
async def intercept_unary_stream(
self, continuation, client_call_details, request
):
interceptor_reached.set()
await wait_for_ever
channel = aio.insecure_channel(
UNREACHABLE_TARGET, interceptors=[Interceptor()]
)
request = messages_pb2.StreamingOutputCallRequest()
stub = test_pb2_grpc.TestServiceStub(channel)
call = stub.StreamingOutputCall(request)
self.assertFalse(call.cancelled())
self.assertFalse(call.done())
await interceptor_reached.wait()
self.assertTrue(call.cancel())
with self.assertRaises(asyncio.CancelledError):
async for response in call:
pass
self.assertTrue(call.cancelled())
self.assertTrue(call.done())
self.assertEqual(await call.code(), grpc.StatusCode.CANCELLED)
self.assertEqual(await call.initial_metadata(), None)
self.assertEqual(await call.trailing_metadata(), None)
await channel.close()
async def test_cancel_after_rpc(self):
interceptor_reached = asyncio.Event()
wait_for_ever = self.loop.create_future()
class Interceptor(aio.UnaryStreamClientInterceptor):
async def intercept_unary_stream(
self, continuation, client_call_details, request
):
call = await continuation(client_call_details, request)
interceptor_reached.set()
await wait_for_ever
channel = aio.insecure_channel(
UNREACHABLE_TARGET, interceptors=[Interceptor()]
)
request = messages_pb2.StreamingOutputCallRequest()
stub = test_pb2_grpc.TestServiceStub(channel)
call = stub.StreamingOutputCall(request)
self.assertFalse(call.cancelled())
self.assertFalse(call.done())
await interceptor_reached.wait()
self.assertTrue(call.cancel())
with self.assertRaises(asyncio.CancelledError):
async for response in call:
pass
self.assertTrue(call.cancelled())
self.assertTrue(call.done())
self.assertEqual(await call.code(), grpc.StatusCode.CANCELLED)
self.assertEqual(await call.initial_metadata(), None)
self.assertEqual(await call.trailing_metadata(), None)
await channel.close()
async def test_cancel_consuming_response_iterator(self):
request = messages_pb2.StreamingOutputCallRequest()
request.response_parameters.extend(
[messages_pb2.ResponseParameters(size=_RESPONSE_PAYLOAD_SIZE)]
* _NUM_STREAM_RESPONSES
)
channel = aio.insecure_channel(
self._server_target,
interceptors=[_UnaryStreamInterceptorWithResponseIterator()],
)
stub = test_pb2_grpc.TestServiceStub(channel)
call = stub.StreamingOutputCall(request)
with self.assertRaises(asyncio.CancelledError):
async for response in call:
call.cancel()
self.assertTrue(call.cancelled())
self.assertTrue(call.done())
self.assertEqual(await call.code(), grpc.StatusCode.CANCELLED)
await channel.close()
async def test_cancel_by_the_interceptor(self):
class Interceptor(aio.UnaryStreamClientInterceptor):
async def intercept_unary_stream(
self, continuation, client_call_details, request
):
call = await continuation(client_call_details, request)
call.cancel()
return call
channel = aio.insecure_channel(
UNREACHABLE_TARGET, interceptors=[Interceptor()]
)
request = messages_pb2.StreamingOutputCallRequest()
stub = test_pb2_grpc.TestServiceStub(channel)
call = stub.StreamingOutputCall(request)
with self.assertRaises(asyncio.CancelledError):
async for response in call:
pass
self.assertTrue(call.cancelled())
self.assertTrue(call.done())
self.assertEqual(await call.code(), grpc.StatusCode.CANCELLED)
await channel.close()
async def test_exception_raised_by_interceptor(self):
class InterceptorException(Exception):
pass
class Interceptor(aio.UnaryStreamClientInterceptor):
async def intercept_unary_stream(
self, continuation, client_call_details, request
):
raise InterceptorException
channel = aio.insecure_channel(
UNREACHABLE_TARGET, interceptors=[Interceptor()]
)
request = messages_pb2.StreamingOutputCallRequest()
stub = test_pb2_grpc.TestServiceStub(channel)
call = stub.StreamingOutputCall(request)
with self.assertRaises(InterceptorException):
async for response in call:
pass
await channel.close()
if __name__ == "__main__":
logging.basicConfig(level=logging.DEBUG)
unittest.main(verbosity=2)
| 15,974
| 35.472603
| 80
|
py
|
grpc
|
grpc-master/src/python/grpcio_tests/tests_aio/unit/compatibility_test.py
|
# Copyright 2020 The gRPC Authors
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Testing the compatibility between AsyncIO stack and the old stack."""
import asyncio
from concurrent.futures import ThreadPoolExecutor
import logging
import os
import random
import threading
from typing import Callable, Iterable, Sequence, Tuple
import unittest
import grpc
from grpc.experimental import aio
from src.proto.grpc.testing import messages_pb2
from src.proto.grpc.testing import test_pb2_grpc
from tests.unit.framework.common import test_constants
from tests_aio.unit import _common
from tests_aio.unit._test_base import AioTestBase
from tests_aio.unit._test_server import TestServiceServicer
from tests_aio.unit._test_server import start_test_server
_NUM_STREAM_RESPONSES = 5
_REQUEST_PAYLOAD_SIZE = 7
_RESPONSE_PAYLOAD_SIZE = 42
_REQUEST = b"\x03\x07"
def _unique_options() -> Sequence[Tuple[str, float]]:
return (("iv", random.random()),)
@unittest.skipIf(
os.environ.get("GRPC_ASYNCIO_ENGINE", "").lower() == "custom_io_manager",
"Compatible mode needs POLLER completion queue.",
)
class TestCompatibility(AioTestBase):
async def setUp(self):
self._async_server = aio.server(
options=(("grpc.so_reuseport", 0),),
migration_thread_pool=ThreadPoolExecutor(),
)
test_pb2_grpc.add_TestServiceServicer_to_server(
TestServiceServicer(), self._async_server
)
self._adhoc_handlers = _common.AdhocGenericHandler()
self._async_server.add_generic_rpc_handlers((self._adhoc_handlers,))
port = self._async_server.add_insecure_port("[::]:0")
address = "localhost:%d" % port
await self._async_server.start()
# Create async stub
self._async_channel = aio.insecure_channel(
address, options=_unique_options()
)
self._async_stub = test_pb2_grpc.TestServiceStub(self._async_channel)
# Create sync stub
self._sync_channel = grpc.insecure_channel(
address, options=_unique_options()
)
self._sync_stub = test_pb2_grpc.TestServiceStub(self._sync_channel)
async def tearDown(self):
self._sync_channel.close()
await self._async_channel.close()
await self._async_server.stop(None)
async def _run_in_another_thread(self, func: Callable[[], None]):
work_done = asyncio.Event()
def thread_work():
func()
self.loop.call_soon_threadsafe(work_done.set)
thread = threading.Thread(target=thread_work, daemon=True)
thread.start()
await work_done.wait()
thread.join()
async def test_unary_unary(self):
# Calling async API in this thread
await self._async_stub.UnaryCall(
messages_pb2.SimpleRequest(), timeout=test_constants.LONG_TIMEOUT
)
# Calling sync API in a different thread
def sync_work() -> None:
response, call = self._sync_stub.UnaryCall.with_call(
messages_pb2.SimpleRequest(),
timeout=test_constants.LONG_TIMEOUT,
)
self.assertIsInstance(response, messages_pb2.SimpleResponse)
self.assertEqual(grpc.StatusCode.OK, call.code())
await self._run_in_another_thread(sync_work)
async def test_unary_stream(self):
request = messages_pb2.StreamingOutputCallRequest()
for _ in range(_NUM_STREAM_RESPONSES):
request.response_parameters.append(
messages_pb2.ResponseParameters(size=_RESPONSE_PAYLOAD_SIZE)
)
# Calling async API in this thread
call = self._async_stub.StreamingOutputCall(request)
for _ in range(_NUM_STREAM_RESPONSES):
await call.read()
self.assertEqual(grpc.StatusCode.OK, await call.code())
# Calling sync API in a different thread
def sync_work() -> None:
response_iterator = self._sync_stub.StreamingOutputCall(request)
for response in response_iterator:
assert _RESPONSE_PAYLOAD_SIZE == len(response.payload.body)
self.assertEqual(grpc.StatusCode.OK, response_iterator.code())
await self._run_in_another_thread(sync_work)
async def test_stream_unary(self):
payload = messages_pb2.Payload(body=b"\0" * _REQUEST_PAYLOAD_SIZE)
request = messages_pb2.StreamingInputCallRequest(payload=payload)
# Calling async API in this thread
async def gen():
for _ in range(_NUM_STREAM_RESPONSES):
yield request
response = await self._async_stub.StreamingInputCall(gen())
self.assertEqual(
_NUM_STREAM_RESPONSES * _REQUEST_PAYLOAD_SIZE,
response.aggregated_payload_size,
)
# Calling sync API in a different thread
def sync_work() -> None:
response = self._sync_stub.StreamingInputCall(
iter([request] * _NUM_STREAM_RESPONSES)
)
self.assertEqual(
_NUM_STREAM_RESPONSES * _REQUEST_PAYLOAD_SIZE,
response.aggregated_payload_size,
)
await self._run_in_another_thread(sync_work)
async def test_stream_stream(self):
request = messages_pb2.StreamingOutputCallRequest()
request.response_parameters.append(
messages_pb2.ResponseParameters(size=_RESPONSE_PAYLOAD_SIZE)
)
# Calling async API in this thread
call = self._async_stub.FullDuplexCall()
for _ in range(_NUM_STREAM_RESPONSES):
await call.write(request)
response = await call.read()
assert _RESPONSE_PAYLOAD_SIZE == len(response.payload.body)
await call.done_writing()
assert await call.code() == grpc.StatusCode.OK
# Calling sync API in a different thread
def sync_work() -> None:
response_iterator = self._sync_stub.FullDuplexCall(iter([request]))
for response in response_iterator:
assert _RESPONSE_PAYLOAD_SIZE == len(response.payload.body)
self.assertEqual(grpc.StatusCode.OK, response_iterator.code())
await self._run_in_another_thread(sync_work)
async def test_server(self):
class GenericHandlers(grpc.GenericRpcHandler):
def service(self, handler_call_details):
return grpc.unary_unary_rpc_method_handler(lambda x, _: x)
# It's fine to instantiate server object in the event loop thread.
# The server will spawn its own serving thread.
server = grpc.server(
ThreadPoolExecutor(), handlers=(GenericHandlers(),)
)
port = server.add_insecure_port("localhost:0")
server.start()
def sync_work() -> None:
for _ in range(100):
with grpc.insecure_channel("localhost:%d" % port) as channel:
response = channel.unary_unary("/test/test")(b"\x07\x08")
self.assertEqual(response, b"\x07\x08")
await self._run_in_another_thread(sync_work)
async def test_many_loop(self):
address, server = await start_test_server()
# Run another loop in another thread
def sync_work():
async def async_work():
# Create async stub
async_channel = aio.insecure_channel(
address, options=_unique_options()
)
async_stub = test_pb2_grpc.TestServiceStub(async_channel)
call = async_stub.UnaryCall(messages_pb2.SimpleRequest())
response = await call
self.assertIsInstance(response, messages_pb2.SimpleResponse)
self.assertEqual(grpc.StatusCode.OK, await call.code())
loop = asyncio.new_event_loop()
loop.run_until_complete(async_work())
await self._run_in_another_thread(sync_work)
await server.stop(None)
async def test_sync_unary_unary_success(self):
@grpc.unary_unary_rpc_method_handler
def echo_unary_unary(request: bytes, unused_context):
return request
self._adhoc_handlers.set_adhoc_handler(echo_unary_unary)
response = await self._async_channel.unary_unary(_common.ADHOC_METHOD)(
_REQUEST
)
self.assertEqual(_REQUEST, response)
async def test_sync_unary_unary_metadata(self):
metadata = (("unique", "key-42"),)
@grpc.unary_unary_rpc_method_handler
def metadata_unary_unary(request: bytes, context: grpc.ServicerContext):
context.send_initial_metadata(metadata)
return request
self._adhoc_handlers.set_adhoc_handler(metadata_unary_unary)
call = self._async_channel.unary_unary(_common.ADHOC_METHOD)(_REQUEST)
self.assertTrue(
_common.seen_metadata(
aio.Metadata(*metadata), await call.initial_metadata()
)
)
async def test_sync_unary_unary_abort(self):
@grpc.unary_unary_rpc_method_handler
def abort_unary_unary(request: bytes, context: grpc.ServicerContext):
context.abort(grpc.StatusCode.INTERNAL, "Test")
self._adhoc_handlers.set_adhoc_handler(abort_unary_unary)
with self.assertRaises(aio.AioRpcError) as exception_context:
await self._async_channel.unary_unary(_common.ADHOC_METHOD)(
_REQUEST
)
self.assertEqual(
grpc.StatusCode.INTERNAL, exception_context.exception.code()
)
async def test_sync_unary_unary_set_code(self):
@grpc.unary_unary_rpc_method_handler
def set_code_unary_unary(request: bytes, context: grpc.ServicerContext):
context.set_code(grpc.StatusCode.INTERNAL)
self._adhoc_handlers.set_adhoc_handler(set_code_unary_unary)
with self.assertRaises(aio.AioRpcError) as exception_context:
await self._async_channel.unary_unary(_common.ADHOC_METHOD)(
_REQUEST
)
self.assertEqual(
grpc.StatusCode.INTERNAL, exception_context.exception.code()
)
async def test_sync_unary_stream_success(self):
@grpc.unary_stream_rpc_method_handler
def echo_unary_stream(request: bytes, unused_context):
for _ in range(_NUM_STREAM_RESPONSES):
yield request
self._adhoc_handlers.set_adhoc_handler(echo_unary_stream)
call = self._async_channel.unary_stream(_common.ADHOC_METHOD)(_REQUEST)
async for response in call:
self.assertEqual(_REQUEST, response)
async def test_sync_unary_stream_error(self):
@grpc.unary_stream_rpc_method_handler
def error_unary_stream(request: bytes, unused_context):
for _ in range(_NUM_STREAM_RESPONSES):
yield request
raise RuntimeError("Test")
self._adhoc_handlers.set_adhoc_handler(error_unary_stream)
call = self._async_channel.unary_stream(_common.ADHOC_METHOD)(_REQUEST)
with self.assertRaises(aio.AioRpcError) as exception_context:
async for response in call:
self.assertEqual(_REQUEST, response)
self.assertEqual(
grpc.StatusCode.UNKNOWN, exception_context.exception.code()
)
async def test_sync_stream_unary_success(self):
@grpc.stream_unary_rpc_method_handler
def echo_stream_unary(
request_iterator: Iterable[bytes], unused_context
):
self.assertEqual(len(list(request_iterator)), _NUM_STREAM_RESPONSES)
return _REQUEST
self._adhoc_handlers.set_adhoc_handler(echo_stream_unary)
request_iterator = iter([_REQUEST] * _NUM_STREAM_RESPONSES)
response = await self._async_channel.stream_unary(_common.ADHOC_METHOD)(
request_iterator
)
self.assertEqual(_REQUEST, response)
async def test_sync_stream_unary_error(self):
@grpc.stream_unary_rpc_method_handler
def echo_stream_unary(
request_iterator: Iterable[bytes], unused_context
):
self.assertEqual(len(list(request_iterator)), _NUM_STREAM_RESPONSES)
raise RuntimeError("Test")
self._adhoc_handlers.set_adhoc_handler(echo_stream_unary)
request_iterator = iter([_REQUEST] * _NUM_STREAM_RESPONSES)
with self.assertRaises(aio.AioRpcError) as exception_context:
response = await self._async_channel.stream_unary(
_common.ADHOC_METHOD
)(request_iterator)
self.assertEqual(
grpc.StatusCode.UNKNOWN, exception_context.exception.code()
)
async def test_sync_stream_stream_success(self):
@grpc.stream_stream_rpc_method_handler
def echo_stream_stream(
request_iterator: Iterable[bytes], unused_context
):
for request in request_iterator:
yield request
self._adhoc_handlers.set_adhoc_handler(echo_stream_stream)
request_iterator = iter([_REQUEST] * _NUM_STREAM_RESPONSES)
call = self._async_channel.stream_stream(_common.ADHOC_METHOD)(
request_iterator
)
async for response in call:
self.assertEqual(_REQUEST, response)
async def test_sync_stream_stream_error(self):
@grpc.stream_stream_rpc_method_handler
def echo_stream_stream(
request_iterator: Iterable[bytes], unused_context
):
for request in request_iterator:
yield request
raise RuntimeError("test")
self._adhoc_handlers.set_adhoc_handler(echo_stream_stream)
request_iterator = iter([_REQUEST] * _NUM_STREAM_RESPONSES)
call = self._async_channel.stream_stream(_common.ADHOC_METHOD)(
request_iterator
)
with self.assertRaises(aio.AioRpcError) as exception_context:
async for response in call:
self.assertEqual(_REQUEST, response)
self.assertEqual(
grpc.StatusCode.UNKNOWN, exception_context.exception.code()
)
if __name__ == "__main__":
logging.basicConfig(level=logging.DEBUG)
unittest.main(verbosity=2)
| 14,819
| 37.097686
| 80
|
py
|
grpc
|
grpc-master/src/python/grpcio_tests/tests_aio/unit/_test_base.py
|
# Copyright 2019 The gRPC Authors
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import asyncio
import functools
import logging
from typing import Callable
import unittest
from grpc.experimental import aio
__all__ = "AioTestBase"
_COROUTINE_FUNCTION_ALLOWLIST = ["setUp", "tearDown"]
def _async_to_sync_decorator(f: Callable, loop: asyncio.AbstractEventLoop):
@functools.wraps(f)
def wrapper(*args, **kwargs):
return loop.run_until_complete(f(*args, **kwargs))
return wrapper
def _get_default_loop(debug=True):
try:
loop = asyncio.get_event_loop()
except:
loop = asyncio.new_event_loop()
asyncio.set_event_loop(loop)
finally:
loop.set_debug(debug)
return loop
# NOTE(gnossen) this test class can also be implemented with metaclass.
class AioTestBase(unittest.TestCase):
# NOTE(lidi) We need to pick a loop for entire testing phase, otherwise it
# will trigger create new loops in new threads, leads to deadlock.
_TEST_LOOP = _get_default_loop()
@property
def loop(self):
return self._TEST_LOOP
def __getattribute__(self, name):
"""Overrides the loading logic to support coroutine functions."""
attr = super().__getattribute__(name)
# If possible, converts the coroutine into a sync function.
if name.startswith("test_") or name in _COROUTINE_FUNCTION_ALLOWLIST:
if asyncio.iscoroutinefunction(attr):
return _async_to_sync_decorator(attr, self._TEST_LOOP)
# For other attributes, let them pass.
return attr
| 2,099
| 30.343284
| 78
|
py
|
grpc
|
grpc-master/src/python/grpcio_tests/tests_aio/unit/secure_call_test.py
|
# Copyright 2020 The gRPC Authors.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Tests the behaviour of the Call classes under a secure channel."""
import logging
import unittest
import grpc
from grpc.experimental import aio
from src.proto.grpc.testing import messages_pb2
from src.proto.grpc.testing import test_pb2_grpc
from tests.unit import resources
from tests_aio.unit._test_base import AioTestBase
from tests_aio.unit._test_server import start_test_server
_SERVER_HOST_OVERRIDE = "foo.test.google.fr"
_NUM_STREAM_RESPONSES = 5
_RESPONSE_PAYLOAD_SIZE = 42
class _SecureCallMixin:
"""A Mixin to run the call tests over a secure channel."""
async def setUp(self):
server_credentials = grpc.ssl_server_credentials(
[(resources.private_key(), resources.certificate_chain())]
)
channel_credentials = grpc.ssl_channel_credentials(
resources.test_root_certificates()
)
self._server_address, self._server = await start_test_server(
secure=True, server_credentials=server_credentials
)
channel_options = (
(
"grpc.ssl_target_name_override",
_SERVER_HOST_OVERRIDE,
),
)
self._channel = aio.secure_channel(
self._server_address, channel_credentials, channel_options
)
self._stub = test_pb2_grpc.TestServiceStub(self._channel)
async def tearDown(self):
await self._channel.close()
await self._server.stop(None)
class TestUnaryUnarySecureCall(_SecureCallMixin, AioTestBase):
"""unary_unary Calls made over a secure channel."""
async def test_call_ok_over_secure_channel(self):
call = self._stub.UnaryCall(messages_pb2.SimpleRequest())
response = await call
self.assertIsInstance(response, messages_pb2.SimpleResponse)
self.assertEqual(await call.code(), grpc.StatusCode.OK)
async def test_call_with_credentials(self):
call_credentials = grpc.composite_call_credentials(
grpc.access_token_call_credentials("abc"),
grpc.access_token_call_credentials("def"),
)
call = self._stub.UnaryCall(
messages_pb2.SimpleRequest(), credentials=call_credentials
)
response = await call
self.assertIsInstance(response, messages_pb2.SimpleResponse)
class TestUnaryStreamSecureCall(_SecureCallMixin, AioTestBase):
"""unary_stream calls over a secure channel"""
async def test_unary_stream_async_generator_secure(self):
request = messages_pb2.StreamingOutputCallRequest()
request.response_parameters.extend(
messages_pb2.ResponseParameters(
size=_RESPONSE_PAYLOAD_SIZE,
)
for _ in range(_NUM_STREAM_RESPONSES)
)
call_credentials = grpc.composite_call_credentials(
grpc.access_token_call_credentials("abc"),
grpc.access_token_call_credentials("def"),
)
call = self._stub.StreamingOutputCall(
request, credentials=call_credentials
)
async for response in call:
self.assertIsInstance(
response, messages_pb2.StreamingOutputCallResponse
)
self.assertEqual(len(response.payload.body), _RESPONSE_PAYLOAD_SIZE)
self.assertEqual(await call.code(), grpc.StatusCode.OK)
# Prepares the request that stream in a ping-pong manner.
_STREAM_OUTPUT_REQUEST_ONE_RESPONSE = messages_pb2.StreamingOutputCallRequest()
_STREAM_OUTPUT_REQUEST_ONE_RESPONSE.response_parameters.append(
messages_pb2.ResponseParameters(size=_RESPONSE_PAYLOAD_SIZE)
)
class TestStreamStreamSecureCall(_SecureCallMixin, AioTestBase):
_STREAM_ITERATIONS = 2
async def test_async_generator_secure_channel(self):
async def request_generator():
for _ in range(self._STREAM_ITERATIONS):
yield _STREAM_OUTPUT_REQUEST_ONE_RESPONSE
call_credentials = grpc.composite_call_credentials(
grpc.access_token_call_credentials("abc"),
grpc.access_token_call_credentials("def"),
)
call = self._stub.FullDuplexCall(
request_generator(), credentials=call_credentials
)
async for response in call:
self.assertEqual(_RESPONSE_PAYLOAD_SIZE, len(response.payload.body))
self.assertEqual(await call.code(), grpc.StatusCode.OK)
if __name__ == "__main__":
logging.basicConfig(level=logging.DEBUG)
unittest.main(verbosity=2)
| 5,079
| 34.034483
| 80
|
py
|
grpc
|
grpc-master/src/python/grpcio_tests/tests_aio/unit/aio_rpc_error_test.py
|
# Copyright 2019 The gRPC Authors
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Tests AioRpcError class."""
import logging
import unittest
import grpc
from grpc.aio._call import AioRpcError
from grpc.experimental import aio
from tests_aio.unit._test_base import AioTestBase
_TEST_INITIAL_METADATA = aio.Metadata(
("initial metadata key", "initial metadata value")
)
_TEST_TRAILING_METADATA = aio.Metadata(
("trailing metadata key", "trailing metadata value")
)
_TEST_DEBUG_ERROR_STRING = "{This is a debug string}"
class TestAioRpcError(unittest.TestCase):
def test_attributes(self):
aio_rpc_error = AioRpcError(
grpc.StatusCode.CANCELLED,
initial_metadata=_TEST_INITIAL_METADATA,
trailing_metadata=_TEST_TRAILING_METADATA,
details="details",
debug_error_string=_TEST_DEBUG_ERROR_STRING,
)
self.assertEqual(aio_rpc_error.code(), grpc.StatusCode.CANCELLED)
self.assertEqual(aio_rpc_error.details(), "details")
self.assertEqual(
aio_rpc_error.initial_metadata(), _TEST_INITIAL_METADATA
)
self.assertEqual(
aio_rpc_error.trailing_metadata(), _TEST_TRAILING_METADATA
)
self.assertEqual(
aio_rpc_error.debug_error_string(), _TEST_DEBUG_ERROR_STRING
)
if __name__ == "__main__":
logging.basicConfig()
unittest.main(verbosity=2)
| 1,929
| 31.711864
| 74
|
py
|
grpc
|
grpc-master/src/python/grpcio_tests/tests_aio/unit/timeout_test.py
|
# Copyright 2020 The gRPC Authors
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Tests behavior of the timeout mechanism on client side."""
import asyncio
import datetime
import logging
import platform
import random
import unittest
import grpc
from grpc.experimental import aio
from tests_aio.unit import _common
from tests_aio.unit._test_base import AioTestBase
_SLEEP_TIME_UNIT_S = datetime.timedelta(seconds=1).total_seconds()
_TEST_SLEEPY_UNARY_UNARY = "/test/Test/SleepyUnaryUnary"
_TEST_SLEEPY_UNARY_STREAM = "/test/Test/SleepyUnaryStream"
_TEST_SLEEPY_STREAM_UNARY = "/test/Test/SleepyStreamUnary"
_TEST_SLEEPY_STREAM_STREAM = "/test/Test/SleepyStreamStream"
_REQUEST = b"\x00\x00\x00"
_RESPONSE = b"\x01\x01\x01"
async def _test_sleepy_unary_unary(unused_request, unused_context):
await asyncio.sleep(_SLEEP_TIME_UNIT_S)
return _RESPONSE
async def _test_sleepy_unary_stream(unused_request, unused_context):
yield _RESPONSE
await asyncio.sleep(_SLEEP_TIME_UNIT_S)
yield _RESPONSE
async def _test_sleepy_stream_unary(unused_request_iterator, context):
assert _REQUEST == await context.read()
await asyncio.sleep(_SLEEP_TIME_UNIT_S)
assert _REQUEST == await context.read()
return _RESPONSE
async def _test_sleepy_stream_stream(unused_request_iterator, context):
assert _REQUEST == await context.read()
await asyncio.sleep(_SLEEP_TIME_UNIT_S)
await context.write(_RESPONSE)
_ROUTING_TABLE = {
_TEST_SLEEPY_UNARY_UNARY: grpc.unary_unary_rpc_method_handler(
_test_sleepy_unary_unary
),
_TEST_SLEEPY_UNARY_STREAM: grpc.unary_stream_rpc_method_handler(
_test_sleepy_unary_stream
),
_TEST_SLEEPY_STREAM_UNARY: grpc.stream_unary_rpc_method_handler(
_test_sleepy_stream_unary
),
_TEST_SLEEPY_STREAM_STREAM: grpc.stream_stream_rpc_method_handler(
_test_sleepy_stream_stream
),
}
class _GenericHandler(grpc.GenericRpcHandler):
def service(self, handler_call_details):
return _ROUTING_TABLE.get(handler_call_details.method)
async def _start_test_server():
server = aio.server()
port = server.add_insecure_port("[::]:0")
server.add_generic_rpc_handlers((_GenericHandler(),))
await server.start()
return f"localhost:{port}", server
class TestTimeout(AioTestBase):
async def setUp(self):
address, self._server = await _start_test_server()
self._client = aio.insecure_channel(address)
self.assertEqual(
grpc.ChannelConnectivity.IDLE, self._client.get_state(True)
)
await _common.block_until_certain_state(
self._client, grpc.ChannelConnectivity.READY
)
async def tearDown(self):
await self._client.close()
await self._server.stop(None)
async def test_unary_unary_success_with_timeout(self):
multicallable = self._client.unary_unary(_TEST_SLEEPY_UNARY_UNARY)
call = multicallable(_REQUEST, timeout=2 * _SLEEP_TIME_UNIT_S)
self.assertEqual(_RESPONSE, await call)
self.assertEqual(grpc.StatusCode.OK, await call.code())
async def test_unary_unary_deadline_exceeded(self):
multicallable = self._client.unary_unary(_TEST_SLEEPY_UNARY_UNARY)
call = multicallable(_REQUEST, timeout=0.5 * _SLEEP_TIME_UNIT_S)
with self.assertRaises(aio.AioRpcError) as exception_context:
await call
rpc_error = exception_context.exception
self.assertEqual(grpc.StatusCode.DEADLINE_EXCEEDED, rpc_error.code())
async def test_unary_stream_success_with_timeout(self):
multicallable = self._client.unary_stream(_TEST_SLEEPY_UNARY_STREAM)
call = multicallable(_REQUEST, timeout=2 * _SLEEP_TIME_UNIT_S)
self.assertEqual(_RESPONSE, await call.read())
self.assertEqual(_RESPONSE, await call.read())
self.assertEqual(grpc.StatusCode.OK, await call.code())
async def test_unary_stream_deadline_exceeded(self):
multicallable = self._client.unary_stream(_TEST_SLEEPY_UNARY_STREAM)
call = multicallable(_REQUEST, timeout=0.5 * _SLEEP_TIME_UNIT_S)
self.assertEqual(_RESPONSE, await call.read())
with self.assertRaises(aio.AioRpcError) as exception_context:
await call.read()
rpc_error = exception_context.exception
self.assertEqual(grpc.StatusCode.DEADLINE_EXCEEDED, rpc_error.code())
async def test_stream_unary_success_with_timeout(self):
multicallable = self._client.stream_unary(_TEST_SLEEPY_STREAM_UNARY)
call = multicallable(timeout=2 * _SLEEP_TIME_UNIT_S)
await call.write(_REQUEST)
await call.write(_REQUEST)
self.assertEqual(grpc.StatusCode.OK, await call.code())
async def test_stream_unary_deadline_exceeded(self):
multicallable = self._client.stream_unary(_TEST_SLEEPY_STREAM_UNARY)
call = multicallable(timeout=0.5 * _SLEEP_TIME_UNIT_S)
with self.assertRaises(aio.AioRpcError) as exception_context:
await call.write(_REQUEST)
await call.write(_REQUEST)
await call
rpc_error = exception_context.exception
self.assertEqual(grpc.StatusCode.DEADLINE_EXCEEDED, rpc_error.code())
async def test_stream_stream_success_with_timeout(self):
multicallable = self._client.stream_stream(_TEST_SLEEPY_STREAM_STREAM)
call = multicallable(timeout=2 * _SLEEP_TIME_UNIT_S)
await call.write(_REQUEST)
self.assertEqual(_RESPONSE, await call.read())
self.assertEqual(grpc.StatusCode.OK, await call.code())
async def test_stream_stream_deadline_exceeded(self):
multicallable = self._client.stream_stream(_TEST_SLEEPY_STREAM_STREAM)
call = multicallable(timeout=0.5 * _SLEEP_TIME_UNIT_S)
with self.assertRaises(aio.AioRpcError) as exception_context:
await call.write(_REQUEST)
await call.read()
rpc_error = exception_context.exception
self.assertEqual(grpc.StatusCode.DEADLINE_EXCEEDED, rpc_error.code())
if __name__ == "__main__":
logging.basicConfig(level=logging.DEBUG)
unittest.main(verbosity=2)
| 6,668
| 35.442623
| 78
|
py
|
grpc
|
grpc-master/src/python/grpcio_tests/tests_aio/unit/client_stream_unary_interceptor_test.py
|
# Copyright 2020 The gRPC Authors.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import asyncio
import datetime
import logging
import unittest
import grpc
from grpc.experimental import aio
from src.proto.grpc.testing import messages_pb2
from src.proto.grpc.testing import test_pb2_grpc
from tests.unit.framework.common import test_constants
from tests_aio.unit._common import CountingRequestIterator
from tests_aio.unit._common import inject_callbacks
from tests_aio.unit._constants import UNREACHABLE_TARGET
from tests_aio.unit._test_base import AioTestBase
from tests_aio.unit._test_server import start_test_server
_SHORT_TIMEOUT_S = 1.0
_NUM_STREAM_REQUESTS = 5
_REQUEST_PAYLOAD_SIZE = 7
_RESPONSE_INTERVAL_US = int(_SHORT_TIMEOUT_S * 1000 * 1000)
class _StreamUnaryInterceptorEmpty(aio.StreamUnaryClientInterceptor):
async def intercept_stream_unary(
self, continuation, client_call_details, request_iterator
):
return await continuation(client_call_details, request_iterator)
def assert_in_final_state(self, test: unittest.TestCase):
pass
class _StreamUnaryInterceptorWithRequestIterator(
aio.StreamUnaryClientInterceptor
):
async def intercept_stream_unary(
self, continuation, client_call_details, request_iterator
):
self.request_iterator = CountingRequestIterator(request_iterator)
call = await continuation(client_call_details, self.request_iterator)
return call
def assert_in_final_state(self, test: unittest.TestCase):
test.assertEqual(
_NUM_STREAM_REQUESTS, self.request_iterator.request_cnt
)
class TestStreamUnaryClientInterceptor(AioTestBase):
async def setUp(self):
self._server_target, self._server = await start_test_server()
async def tearDown(self):
await self._server.stop(None)
async def test_intercepts(self):
for interceptor_class in (
_StreamUnaryInterceptorEmpty,
_StreamUnaryInterceptorWithRequestIterator,
):
with self.subTest(name=interceptor_class):
interceptor = interceptor_class()
channel = aio.insecure_channel(
self._server_target, interceptors=[interceptor]
)
stub = test_pb2_grpc.TestServiceStub(channel)
payload = messages_pb2.Payload(
body=b"\0" * _REQUEST_PAYLOAD_SIZE
)
request = messages_pb2.StreamingInputCallRequest(
payload=payload
)
async def request_iterator():
for _ in range(_NUM_STREAM_REQUESTS):
yield request
call = stub.StreamingInputCall(request_iterator())
response = await call
self.assertEqual(
_NUM_STREAM_REQUESTS * _REQUEST_PAYLOAD_SIZE,
response.aggregated_payload_size,
)
self.assertEqual(await call.code(), grpc.StatusCode.OK)
self.assertEqual(await call.initial_metadata(), aio.Metadata())
self.assertEqual(await call.trailing_metadata(), aio.Metadata())
self.assertEqual(await call.details(), "")
self.assertEqual(await call.debug_error_string(), "")
self.assertEqual(call.cancel(), False)
self.assertEqual(call.cancelled(), False)
self.assertEqual(call.done(), True)
interceptor.assert_in_final_state(self)
await channel.close()
async def test_intercepts_using_write(self):
for interceptor_class in (
_StreamUnaryInterceptorEmpty,
_StreamUnaryInterceptorWithRequestIterator,
):
with self.subTest(name=interceptor_class):
interceptor = interceptor_class()
channel = aio.insecure_channel(
self._server_target, interceptors=[interceptor]
)
stub = test_pb2_grpc.TestServiceStub(channel)
payload = messages_pb2.Payload(
body=b"\0" * _REQUEST_PAYLOAD_SIZE
)
request = messages_pb2.StreamingInputCallRequest(
payload=payload
)
call = stub.StreamingInputCall()
for _ in range(_NUM_STREAM_REQUESTS):
await call.write(request)
await call.done_writing()
response = await call
self.assertEqual(
_NUM_STREAM_REQUESTS * _REQUEST_PAYLOAD_SIZE,
response.aggregated_payload_size,
)
self.assertEqual(await call.code(), grpc.StatusCode.OK)
self.assertEqual(await call.initial_metadata(), aio.Metadata())
self.assertEqual(await call.trailing_metadata(), aio.Metadata())
self.assertEqual(await call.details(), "")
self.assertEqual(await call.debug_error_string(), "")
self.assertEqual(call.cancel(), False)
self.assertEqual(call.cancelled(), False)
self.assertEqual(call.done(), True)
interceptor.assert_in_final_state(self)
await channel.close()
async def test_add_done_callback_interceptor_task_not_finished(self):
for interceptor_class in (
_StreamUnaryInterceptorEmpty,
_StreamUnaryInterceptorWithRequestIterator,
):
with self.subTest(name=interceptor_class):
interceptor = interceptor_class()
channel = aio.insecure_channel(
self._server_target, interceptors=[interceptor]
)
stub = test_pb2_grpc.TestServiceStub(channel)
payload = messages_pb2.Payload(
body=b"\0" * _REQUEST_PAYLOAD_SIZE
)
request = messages_pb2.StreamingInputCallRequest(
payload=payload
)
async def request_iterator():
for _ in range(_NUM_STREAM_REQUESTS):
yield request
call = stub.StreamingInputCall(request_iterator())
validation = inject_callbacks(call)
response = await call
await validation
await channel.close()
async def test_add_done_callback_interceptor_task_finished(self):
for interceptor_class in (
_StreamUnaryInterceptorEmpty,
_StreamUnaryInterceptorWithRequestIterator,
):
with self.subTest(name=interceptor_class):
interceptor = interceptor_class()
channel = aio.insecure_channel(
self._server_target, interceptors=[interceptor]
)
stub = test_pb2_grpc.TestServiceStub(channel)
payload = messages_pb2.Payload(
body=b"\0" * _REQUEST_PAYLOAD_SIZE
)
request = messages_pb2.StreamingInputCallRequest(
payload=payload
)
async def request_iterator():
for _ in range(_NUM_STREAM_REQUESTS):
yield request
call = stub.StreamingInputCall(request_iterator())
response = await call
validation = inject_callbacks(call)
await validation
await channel.close()
async def test_multiple_interceptors_request_iterator(self):
for interceptor_class in (
_StreamUnaryInterceptorEmpty,
_StreamUnaryInterceptorWithRequestIterator,
):
with self.subTest(name=interceptor_class):
interceptors = [interceptor_class(), interceptor_class()]
channel = aio.insecure_channel(
self._server_target, interceptors=interceptors
)
stub = test_pb2_grpc.TestServiceStub(channel)
payload = messages_pb2.Payload(
body=b"\0" * _REQUEST_PAYLOAD_SIZE
)
request = messages_pb2.StreamingInputCallRequest(
payload=payload
)
async def request_iterator():
for _ in range(_NUM_STREAM_REQUESTS):
yield request
call = stub.StreamingInputCall(request_iterator())
response = await call
self.assertEqual(
_NUM_STREAM_REQUESTS * _REQUEST_PAYLOAD_SIZE,
response.aggregated_payload_size,
)
self.assertEqual(await call.code(), grpc.StatusCode.OK)
self.assertEqual(await call.initial_metadata(), aio.Metadata())
self.assertEqual(await call.trailing_metadata(), aio.Metadata())
self.assertEqual(await call.details(), "")
self.assertEqual(await call.debug_error_string(), "")
self.assertEqual(call.cancel(), False)
self.assertEqual(call.cancelled(), False)
self.assertEqual(call.done(), True)
for interceptor in interceptors:
interceptor.assert_in_final_state(self)
await channel.close()
async def test_intercepts_request_iterator_rpc_error(self):
for interceptor_class in (
_StreamUnaryInterceptorEmpty,
_StreamUnaryInterceptorWithRequestIterator,
):
with self.subTest(name=interceptor_class):
channel = aio.insecure_channel(
UNREACHABLE_TARGET, interceptors=[interceptor_class()]
)
stub = test_pb2_grpc.TestServiceStub(channel)
payload = messages_pb2.Payload(
body=b"\0" * _REQUEST_PAYLOAD_SIZE
)
request = messages_pb2.StreamingInputCallRequest(
payload=payload
)
# When there is an error the request iterator is no longer
# consumed.
async def request_iterator():
for _ in range(_NUM_STREAM_REQUESTS):
yield request
call = stub.StreamingInputCall(request_iterator())
with self.assertRaises(aio.AioRpcError) as exception_context:
await call
self.assertEqual(
grpc.StatusCode.UNAVAILABLE,
exception_context.exception.code(),
)
self.assertTrue(call.done())
self.assertEqual(grpc.StatusCode.UNAVAILABLE, await call.code())
await channel.close()
async def test_intercepts_request_iterator_rpc_error_using_write(self):
for interceptor_class in (
_StreamUnaryInterceptorEmpty,
_StreamUnaryInterceptorWithRequestIterator,
):
with self.subTest(name=interceptor_class):
channel = aio.insecure_channel(
UNREACHABLE_TARGET, interceptors=[interceptor_class()]
)
stub = test_pb2_grpc.TestServiceStub(channel)
payload = messages_pb2.Payload(
body=b"\0" * _REQUEST_PAYLOAD_SIZE
)
request = messages_pb2.StreamingInputCallRequest(
payload=payload
)
call = stub.StreamingInputCall()
# When there is an error during the write, exception is raised.
with self.assertRaises(asyncio.InvalidStateError):
for _ in range(_NUM_STREAM_REQUESTS):
await call.write(request)
with self.assertRaises(aio.AioRpcError) as exception_context:
await call
self.assertEqual(
grpc.StatusCode.UNAVAILABLE,
exception_context.exception.code(),
)
self.assertTrue(call.done())
self.assertEqual(grpc.StatusCode.UNAVAILABLE, await call.code())
await channel.close()
async def test_cancel_before_rpc(self):
interceptor_reached = asyncio.Event()
wait_for_ever = self.loop.create_future()
class Interceptor(aio.StreamUnaryClientInterceptor):
async def intercept_stream_unary(
self, continuation, client_call_details, request_iterator
):
interceptor_reached.set()
await wait_for_ever
channel = aio.insecure_channel(
self._server_target, interceptors=[Interceptor()]
)
stub = test_pb2_grpc.TestServiceStub(channel)
payload = messages_pb2.Payload(body=b"\0" * _REQUEST_PAYLOAD_SIZE)
request = messages_pb2.StreamingInputCallRequest(payload=payload)
call = stub.StreamingInputCall()
self.assertFalse(call.cancelled())
self.assertFalse(call.done())
await interceptor_reached.wait()
self.assertTrue(call.cancel())
# When there is an error during the write, exception is raised.
with self.assertRaises(asyncio.InvalidStateError):
for _ in range(_NUM_STREAM_REQUESTS):
await call.write(request)
with self.assertRaises(asyncio.CancelledError):
await call
self.assertTrue(call.cancelled())
self.assertTrue(call.done())
self.assertEqual(await call.code(), grpc.StatusCode.CANCELLED)
self.assertEqual(await call.initial_metadata(), None)
self.assertEqual(await call.trailing_metadata(), None)
await channel.close()
async def test_cancel_after_rpc(self):
interceptor_reached = asyncio.Event()
wait_for_ever = self.loop.create_future()
class Interceptor(aio.StreamUnaryClientInterceptor):
async def intercept_stream_unary(
self, continuation, client_call_details, request_iterator
):
call = await continuation(client_call_details, request_iterator)
interceptor_reached.set()
await wait_for_ever
channel = aio.insecure_channel(
self._server_target, interceptors=[Interceptor()]
)
stub = test_pb2_grpc.TestServiceStub(channel)
payload = messages_pb2.Payload(body=b"\0" * _REQUEST_PAYLOAD_SIZE)
request = messages_pb2.StreamingInputCallRequest(payload=payload)
call = stub.StreamingInputCall()
self.assertFalse(call.cancelled())
self.assertFalse(call.done())
await interceptor_reached.wait()
self.assertTrue(call.cancel())
# When there is an error during the write, exception is raised.
with self.assertRaises(asyncio.InvalidStateError):
for _ in range(_NUM_STREAM_REQUESTS):
await call.write(request)
with self.assertRaises(asyncio.CancelledError):
await call
self.assertTrue(call.cancelled())
self.assertTrue(call.done())
self.assertEqual(await call.code(), grpc.StatusCode.CANCELLED)
self.assertEqual(await call.initial_metadata(), None)
self.assertEqual(await call.trailing_metadata(), None)
await channel.close()
async def test_cancel_while_writing(self):
# Test cancelation before making any write or after doing at least 1
for num_writes_before_cancel in (0, 1):
with self.subTest(
name="Num writes before cancel: {}".format(
num_writes_before_cancel
)
):
channel = aio.insecure_channel(
UNREACHABLE_TARGET,
interceptors=[_StreamUnaryInterceptorWithRequestIterator()],
)
stub = test_pb2_grpc.TestServiceStub(channel)
payload = messages_pb2.Payload(
body=b"\0" * _REQUEST_PAYLOAD_SIZE
)
request = messages_pb2.StreamingInputCallRequest(
payload=payload
)
call = stub.StreamingInputCall()
with self.assertRaises(asyncio.InvalidStateError):
for i in range(_NUM_STREAM_REQUESTS):
if i == num_writes_before_cancel:
self.assertTrue(call.cancel())
await call.write(request)
with self.assertRaises(asyncio.CancelledError):
await call
self.assertTrue(call.cancelled())
self.assertTrue(call.done())
self.assertEqual(await call.code(), grpc.StatusCode.CANCELLED)
await channel.close()
async def test_cancel_by_the_interceptor(self):
class Interceptor(aio.StreamUnaryClientInterceptor):
async def intercept_stream_unary(
self, continuation, client_call_details, request_iterator
):
call = await continuation(client_call_details, request_iterator)
call.cancel()
return call
channel = aio.insecure_channel(
UNREACHABLE_TARGET, interceptors=[Interceptor()]
)
stub = test_pb2_grpc.TestServiceStub(channel)
payload = messages_pb2.Payload(body=b"\0" * _REQUEST_PAYLOAD_SIZE)
request = messages_pb2.StreamingInputCallRequest(payload=payload)
call = stub.StreamingInputCall()
with self.assertRaises(asyncio.InvalidStateError):
for i in range(_NUM_STREAM_REQUESTS):
await call.write(request)
with self.assertRaises(asyncio.CancelledError):
await call
self.assertTrue(call.cancelled())
self.assertTrue(call.done())
self.assertEqual(await call.code(), grpc.StatusCode.CANCELLED)
await channel.close()
async def test_exception_raised_by_interceptor(self):
class InterceptorException(Exception):
pass
class Interceptor(aio.StreamUnaryClientInterceptor):
async def intercept_stream_unary(
self, continuation, client_call_details, request_iterator
):
raise InterceptorException
channel = aio.insecure_channel(
UNREACHABLE_TARGET, interceptors=[Interceptor()]
)
stub = test_pb2_grpc.TestServiceStub(channel)
payload = messages_pb2.Payload(body=b"\0" * _REQUEST_PAYLOAD_SIZE)
request = messages_pb2.StreamingInputCallRequest(payload=payload)
call = stub.StreamingInputCall()
with self.assertRaises(InterceptorException):
for i in range(_NUM_STREAM_REQUESTS):
await call.write(request)
with self.assertRaises(InterceptorException):
await call
await channel.close()
async def test_intercepts_prohibit_mixing_style(self):
channel = aio.insecure_channel(
self._server_target, interceptors=[_StreamUnaryInterceptorEmpty()]
)
stub = test_pb2_grpc.TestServiceStub(channel)
payload = messages_pb2.Payload(body=b"\0" * _REQUEST_PAYLOAD_SIZE)
request = messages_pb2.StreamingInputCallRequest(payload=payload)
async def request_iterator():
for _ in range(_NUM_STREAM_REQUESTS):
yield request
call = stub.StreamingInputCall(request_iterator())
with self.assertRaises(grpc._cython.cygrpc.UsageError):
await call.write(request)
with self.assertRaises(grpc._cython.cygrpc.UsageError):
await call.done_writing()
await channel.close()
if __name__ == "__main__":
logging.basicConfig(level=logging.DEBUG)
unittest.main(verbosity=2)
| 20,665
| 35.969589
| 80
|
py
|
grpc
|
grpc-master/src/python/grpcio_tests/tests_aio/unit/context_peer_test.py
|
# Copyright 2020 The gRPC Authors
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Testing the server context ability to access peer info."""
import asyncio
import logging
import os
from typing import Callable, Iterable, Sequence, Tuple
import unittest
import grpc
from grpc.experimental import aio
from src.proto.grpc.testing import messages_pb2
from src.proto.grpc.testing import test_pb2_grpc
from tests.unit.framework.common import test_constants
from tests_aio.unit import _common
from tests_aio.unit._test_base import AioTestBase
from tests_aio.unit._test_server import TestServiceServicer
from tests_aio.unit._test_server import start_test_server
_REQUEST = b"\x03\x07"
_TEST_METHOD = "/test/UnaryUnary"
class TestContextPeer(AioTestBase):
async def test_peer(self):
@grpc.unary_unary_rpc_method_handler
async def check_peer_unary_unary(
request: bytes, context: aio.ServicerContext
):
self.assertEqual(_REQUEST, request)
# The peer address could be ipv4 or ipv6
self.assertIn("ip", context.peer())
return request
# Creates a server
server = aio.server()
handlers = grpc.method_handlers_generic_handler(
"test", {"UnaryUnary": check_peer_unary_unary}
)
server.add_generic_rpc_handlers((handlers,))
port = server.add_insecure_port("[::]:0")
await server.start()
# Creates a channel
async with aio.insecure_channel("localhost:%d" % port) as channel:
response = await channel.unary_unary(_TEST_METHOD)(_REQUEST)
self.assertEqual(_REQUEST, response)
await server.stop(None)
if __name__ == "__main__":
logging.basicConfig(level=logging.DEBUG)
unittest.main(verbosity=2)
| 2,295
| 32.764706
| 74
|
py
|
grpc
|
grpc-master/src/python/grpcio_tests/tests_aio/unit/channel_argument_test.py
|
# Copyright 2019 The gRPC Authors
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Tests behavior around the Core channel arguments."""
import asyncio
import errno
import logging
import platform
import random
import unittest
import grpc
from grpc.experimental import aio
from src.proto.grpc.testing import messages_pb2
from src.proto.grpc.testing import test_pb2_grpc
from tests.unit.framework import common
from tests_aio.unit._test_base import AioTestBase
from tests_aio.unit._test_server import start_test_server
_RANDOM_SEED = 42
_ENABLE_REUSE_PORT = "SO_REUSEPORT enabled"
_DISABLE_REUSE_PORT = "SO_REUSEPORT disabled"
_SOCKET_OPT_SO_REUSEPORT = "grpc.so_reuseport"
_OPTIONS = (
(_ENABLE_REUSE_PORT, ((_SOCKET_OPT_SO_REUSEPORT, 1),)),
(_DISABLE_REUSE_PORT, ((_SOCKET_OPT_SO_REUSEPORT, 0),)),
)
_NUM_SERVER_CREATED = 5
_GRPC_ARG_MAX_RECEIVE_MESSAGE_LENGTH = "grpc.max_receive_message_length"
_MAX_MESSAGE_LENGTH = 1024
_ADDRESS_TOKEN_ERRNO = errno.EADDRINUSE, errno.ENOSR
class _TestPointerWrapper(object):
def __int__(self):
return 123456
_TEST_CHANNEL_ARGS = (
("arg1", b"bytes_val"),
("arg2", "str_val"),
("arg3", 1),
(b"arg4", "str_val"),
("arg6", _TestPointerWrapper()),
)
_INVALID_TEST_CHANNEL_ARGS = [
{"foo": "bar"},
(("key",),),
"str",
]
async def test_if_reuse_port_enabled(server: aio.Server):
port = server.add_insecure_port("localhost:0")
await server.start()
try:
with common.bound_socket(
bind_address="localhost",
port=port,
listen=False,
) as (unused_host, bound_port):
assert bound_port == port
except OSError as e:
if e.errno in _ADDRESS_TOKEN_ERRNO:
return False
else:
logging.exception(e)
raise
else:
return True
class TestChannelArgument(AioTestBase):
async def setUp(self):
random.seed(_RANDOM_SEED)
@unittest.skipIf(
platform.system() == "Windows",
"SO_REUSEPORT only available in Linux-like OS.",
)
@unittest.skipIf(
"aarch64" in platform.machine(),
"SO_REUSEPORT needs to be enabled in Core's port.h.",
)
async def test_server_so_reuse_port_is_set_properly(self):
async def test_body():
fact, options = random.choice(_OPTIONS)
server = aio.server(options=options)
try:
result = await test_if_reuse_port_enabled(server)
if fact == _ENABLE_REUSE_PORT and not result:
self.fail(
"Enabled reuse port in options, but not observed in"
" socket"
)
elif fact == _DISABLE_REUSE_PORT and result:
self.fail(
"Disabled reuse port in options, but observed in socket"
)
finally:
await server.stop(None)
# Creating a lot of servers concurrently
await asyncio.gather(*(test_body() for _ in range(_NUM_SERVER_CREATED)))
async def test_client(self):
# Do not segfault, or raise exception!
channel = aio.insecure_channel("[::]:0", options=_TEST_CHANNEL_ARGS)
await channel.close()
async def test_server(self):
# Do not segfault, or raise exception!
server = aio.server(options=_TEST_CHANNEL_ARGS)
await server.stop(None)
async def test_invalid_client_args(self):
for invalid_arg in _INVALID_TEST_CHANNEL_ARGS:
self.assertRaises(
(ValueError, TypeError),
aio.insecure_channel,
"[::]:0",
options=invalid_arg,
)
async def test_max_message_length_applied(self):
address, server = await start_test_server()
async with aio.insecure_channel(
address,
options=(
(_GRPC_ARG_MAX_RECEIVE_MESSAGE_LENGTH, _MAX_MESSAGE_LENGTH),
),
) as channel:
stub = test_pb2_grpc.TestServiceStub(channel)
request = messages_pb2.StreamingOutputCallRequest()
# First request will pass
request.response_parameters.append(
messages_pb2.ResponseParameters(
size=_MAX_MESSAGE_LENGTH // 2,
)
)
# Second request should fail
request.response_parameters.append(
messages_pb2.ResponseParameters(
size=_MAX_MESSAGE_LENGTH * 2,
)
)
call = stub.StreamingOutputCall(request)
response = await call.read()
self.assertEqual(
_MAX_MESSAGE_LENGTH // 2, len(response.payload.body)
)
with self.assertRaises(aio.AioRpcError) as exception_context:
await call.read()
rpc_error = exception_context.exception
self.assertEqual(
grpc.StatusCode.RESOURCE_EXHAUSTED, rpc_error.code()
)
self.assertIn(str(_MAX_MESSAGE_LENGTH), rpc_error.details())
self.assertEqual(
grpc.StatusCode.RESOURCE_EXHAUSTED, await call.code()
)
await server.stop(None)
if __name__ == "__main__":
logging.basicConfig(level=logging.DEBUG)
unittest.main(verbosity=2)
| 5,922
| 29.689119
| 80
|
py
|
grpc
|
grpc-master/src/python/grpcio_tests/tests_aio/unit/auth_context_test.py
|
# Copyright 2020 The gRPC Authors
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Porting auth context tests from sync stack."""
import logging
import pickle
import unittest
import grpc
from grpc.experimental import aio
from grpc.experimental import session_cache
from tests.unit import resources
from tests_aio.unit._test_base import AioTestBase
_REQUEST = b"\x00\x00\x00"
_RESPONSE = b"\x00\x00\x00"
_UNARY_UNARY = "/test/UnaryUnary"
_SERVER_HOST_OVERRIDE = "foo.test.google.fr"
_CLIENT_IDS = (
b"*.test.google.fr",
b"waterzooi.test.google.be",
b"*.test.youtube.com",
b"192.168.1.3",
)
_ID = "id"
_ID_KEY = "id_key"
_AUTH_CTX = "auth_ctx"
_PRIVATE_KEY = resources.private_key()
_CERTIFICATE_CHAIN = resources.certificate_chain()
_TEST_ROOT_CERTIFICATES = resources.test_root_certificates()
_SERVER_CERTS = ((_PRIVATE_KEY, _CERTIFICATE_CHAIN),)
_PROPERTY_OPTIONS = (
(
"grpc.ssl_target_name_override",
_SERVER_HOST_OVERRIDE,
),
)
async def handle_unary_unary(
unused_request: bytes, servicer_context: aio.ServicerContext
):
return pickle.dumps(
{
_ID: servicer_context.peer_identities(),
_ID_KEY: servicer_context.peer_identity_key(),
_AUTH_CTX: servicer_context.auth_context(),
}
)
class TestAuthContext(AioTestBase):
async def test_insecure(self):
handler = grpc.method_handlers_generic_handler(
"test",
{
"UnaryUnary": grpc.unary_unary_rpc_method_handler(
handle_unary_unary
)
},
)
server = aio.server()
server.add_generic_rpc_handlers((handler,))
port = server.add_insecure_port("[::]:0")
await server.start()
async with aio.insecure_channel("localhost:%d" % port) as channel:
response = await channel.unary_unary(_UNARY_UNARY)(_REQUEST)
await server.stop(None)
auth_data = pickle.loads(response)
self.assertIsNone(auth_data[_ID])
self.assertIsNone(auth_data[_ID_KEY])
self.assertDictEqual(
{
"security_level": [b"TSI_SECURITY_NONE"],
"transport_security_type": [b"insecure"],
},
auth_data[_AUTH_CTX],
)
async def test_secure_no_cert(self):
handler = grpc.method_handlers_generic_handler(
"test",
{
"UnaryUnary": grpc.unary_unary_rpc_method_handler(
handle_unary_unary
)
},
)
server = aio.server()
server.add_generic_rpc_handlers((handler,))
server_cred = grpc.ssl_server_credentials(_SERVER_CERTS)
port = server.add_secure_port("[::]:0", server_cred)
await server.start()
channel_creds = grpc.ssl_channel_credentials(
root_certificates=_TEST_ROOT_CERTIFICATES
)
channel = aio.secure_channel(
"localhost:{}".format(port),
channel_creds,
options=_PROPERTY_OPTIONS,
)
response = await channel.unary_unary(_UNARY_UNARY)(_REQUEST)
await channel.close()
await server.stop(None)
auth_data = pickle.loads(response)
self.assertIsNone(auth_data[_ID])
self.assertIsNone(auth_data[_ID_KEY])
self.assertDictEqual(
{
"security_level": [b"TSI_PRIVACY_AND_INTEGRITY"],
"transport_security_type": [b"ssl"],
"ssl_session_reused": [b"false"],
},
auth_data[_AUTH_CTX],
)
async def test_secure_client_cert(self):
handler = grpc.method_handlers_generic_handler(
"test",
{
"UnaryUnary": grpc.unary_unary_rpc_method_handler(
handle_unary_unary
)
},
)
server = aio.server()
server.add_generic_rpc_handlers((handler,))
server_cred = grpc.ssl_server_credentials(
_SERVER_CERTS,
root_certificates=_TEST_ROOT_CERTIFICATES,
require_client_auth=True,
)
port = server.add_secure_port("[::]:0", server_cred)
await server.start()
channel_creds = grpc.ssl_channel_credentials(
root_certificates=_TEST_ROOT_CERTIFICATES,
private_key=_PRIVATE_KEY,
certificate_chain=_CERTIFICATE_CHAIN,
)
channel = aio.secure_channel(
"localhost:{}".format(port),
channel_creds,
options=_PROPERTY_OPTIONS,
)
response = await channel.unary_unary(_UNARY_UNARY)(_REQUEST)
await channel.close()
await server.stop(None)
auth_data = pickle.loads(response)
auth_ctx = auth_data[_AUTH_CTX]
self.assertCountEqual(_CLIENT_IDS, auth_data[_ID])
self.assertEqual("x509_subject_alternative_name", auth_data[_ID_KEY])
self.assertSequenceEqual([b"ssl"], auth_ctx["transport_security_type"])
self.assertSequenceEqual(
[b"*.test.google.com"], auth_ctx["x509_common_name"]
)
async def _do_one_shot_client_rpc(
self, channel_creds, channel_options, port, expect_ssl_session_reused
):
channel = aio.secure_channel(
"localhost:{}".format(port), channel_creds, options=channel_options
)
response = await channel.unary_unary(_UNARY_UNARY)(_REQUEST)
auth_data = pickle.loads(response)
self.assertEqual(
expect_ssl_session_reused,
auth_data[_AUTH_CTX]["ssl_session_reused"],
)
await channel.close()
async def test_session_resumption(self):
# Set up a secure server
handler = grpc.method_handlers_generic_handler(
"test",
{
"UnaryUnary": grpc.unary_unary_rpc_method_handler(
handle_unary_unary
)
},
)
server = aio.server()
server.add_generic_rpc_handlers((handler,))
server_cred = grpc.ssl_server_credentials(_SERVER_CERTS)
port = server.add_secure_port("[::]:0", server_cred)
await server.start()
# Create a cache for TLS session tickets
cache = session_cache.ssl_session_cache_lru(1)
channel_creds = grpc.ssl_channel_credentials(
root_certificates=_TEST_ROOT_CERTIFICATES
)
channel_options = _PROPERTY_OPTIONS + (
("grpc.ssl_session_cache", cache),
)
# Initial connection has no session to resume
await self._do_one_shot_client_rpc(
channel_creds,
channel_options,
port,
expect_ssl_session_reused=[b"false"],
)
# Subsequent connections resume sessions
await self._do_one_shot_client_rpc(
channel_creds,
channel_options,
port,
expect_ssl_session_reused=[b"true"],
)
await server.stop(None)
if __name__ == "__main__":
logging.basicConfig(level=logging.DEBUG)
unittest.main()
| 7,668
| 31.087866
| 79
|
py
|
grpc
|
grpc-master/src/python/grpcio_tests/tests_aio/unit/done_callback_test.py
|
# Copyright 2020 The gRPC Authors
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Testing the done callbacks mechanism."""
import asyncio
import logging
import unittest
import grpc
from grpc.experimental import aio
from src.proto.grpc.testing import messages_pb2
from src.proto.grpc.testing import test_pb2_grpc
from tests_aio.unit._common import inject_callbacks
from tests_aio.unit._test_base import AioTestBase
from tests_aio.unit._test_server import start_test_server
_NUM_STREAM_RESPONSES = 5
_REQUEST_PAYLOAD_SIZE = 7
_RESPONSE_PAYLOAD_SIZE = 42
_REQUEST = b"\x01\x02\x03"
_RESPONSE = b"\x04\x05\x06"
_TEST_METHOD = "/test/Test"
_FAKE_METHOD = "/test/Fake"
class TestClientSideDoneCallback(AioTestBase):
async def setUp(self):
address, self._server = await start_test_server()
self._channel = aio.insecure_channel(address)
self._stub = test_pb2_grpc.TestServiceStub(self._channel)
async def tearDown(self):
await self._channel.close()
await self._server.stop(None)
async def test_add_after_done(self):
call = self._stub.UnaryCall(messages_pb2.SimpleRequest())
self.assertEqual(grpc.StatusCode.OK, await call.code())
validation = inject_callbacks(call)
await validation
async def test_unary_unary(self):
call = self._stub.UnaryCall(messages_pb2.SimpleRequest())
validation = inject_callbacks(call)
self.assertEqual(grpc.StatusCode.OK, await call.code())
await validation
async def test_unary_stream(self):
request = messages_pb2.StreamingOutputCallRequest()
for _ in range(_NUM_STREAM_RESPONSES):
request.response_parameters.append(
messages_pb2.ResponseParameters(size=_RESPONSE_PAYLOAD_SIZE)
)
call = self._stub.StreamingOutputCall(request)
validation = inject_callbacks(call)
response_cnt = 0
async for response in call:
response_cnt += 1
self.assertIsInstance(
response, messages_pb2.StreamingOutputCallResponse
)
self.assertEqual(_RESPONSE_PAYLOAD_SIZE, len(response.payload.body))
self.assertEqual(_NUM_STREAM_RESPONSES, response_cnt)
self.assertEqual(grpc.StatusCode.OK, await call.code())
await validation
async def test_stream_unary(self):
payload = messages_pb2.Payload(body=b"\0" * _REQUEST_PAYLOAD_SIZE)
request = messages_pb2.StreamingInputCallRequest(payload=payload)
async def gen():
for _ in range(_NUM_STREAM_RESPONSES):
yield request
call = self._stub.StreamingInputCall(gen())
validation = inject_callbacks(call)
response = await call
self.assertIsInstance(response, messages_pb2.StreamingInputCallResponse)
self.assertEqual(
_NUM_STREAM_RESPONSES * _REQUEST_PAYLOAD_SIZE,
response.aggregated_payload_size,
)
self.assertEqual(grpc.StatusCode.OK, await call.code())
await validation
async def test_stream_stream(self):
call = self._stub.FullDuplexCall()
validation = inject_callbacks(call)
request = messages_pb2.StreamingOutputCallRequest()
request.response_parameters.append(
messages_pb2.ResponseParameters(size=_RESPONSE_PAYLOAD_SIZE)
)
for _ in range(_NUM_STREAM_RESPONSES):
await call.write(request)
response = await call.read()
self.assertIsInstance(
response, messages_pb2.StreamingOutputCallResponse
)
self.assertEqual(_RESPONSE_PAYLOAD_SIZE, len(response.payload.body))
await call.done_writing()
self.assertEqual(grpc.StatusCode.OK, await call.code())
await validation
class TestServerSideDoneCallback(AioTestBase):
async def setUp(self):
self._server = aio.server()
port = self._server.add_insecure_port("[::]:0")
self._channel = aio.insecure_channel("localhost:%d" % port)
async def tearDown(self):
await self._channel.close()
await self._server.stop(None)
async def _register_method_handler(self, method_handler):
"""Registers method handler and starts the server"""
generic_handler = grpc.method_handlers_generic_handler(
"test",
dict(Test=method_handler),
)
self._server.add_generic_rpc_handlers((generic_handler,))
await self._server.start()
async def test_unary_unary(self):
validation_future = self.loop.create_future()
async def test_handler(request: bytes, context: aio.ServicerContext):
self.assertEqual(_REQUEST, request)
validation_future.set_result(inject_callbacks(context))
return _RESPONSE
await self._register_method_handler(
grpc.unary_unary_rpc_method_handler(test_handler)
)
response = await self._channel.unary_unary(_TEST_METHOD)(_REQUEST)
self.assertEqual(_RESPONSE, response)
validation = await validation_future
await validation
async def test_unary_stream(self):
validation_future = self.loop.create_future()
async def test_handler(request: bytes, context: aio.ServicerContext):
self.assertEqual(_REQUEST, request)
validation_future.set_result(inject_callbacks(context))
for _ in range(_NUM_STREAM_RESPONSES):
yield _RESPONSE
await self._register_method_handler(
grpc.unary_stream_rpc_method_handler(test_handler)
)
call = self._channel.unary_stream(_TEST_METHOD)(_REQUEST)
async for response in call:
self.assertEqual(_RESPONSE, response)
validation = await validation_future
await validation
async def test_stream_unary(self):
validation_future = self.loop.create_future()
async def test_handler(request_iterator, context: aio.ServicerContext):
validation_future.set_result(inject_callbacks(context))
async for request in request_iterator:
self.assertEqual(_REQUEST, request)
return _RESPONSE
await self._register_method_handler(
grpc.stream_unary_rpc_method_handler(test_handler)
)
call = self._channel.stream_unary(_TEST_METHOD)()
for _ in range(_NUM_STREAM_RESPONSES):
await call.write(_REQUEST)
await call.done_writing()
self.assertEqual(_RESPONSE, await call)
validation = await validation_future
await validation
async def test_stream_stream(self):
validation_future = self.loop.create_future()
async def test_handler(request_iterator, context: aio.ServicerContext):
validation_future.set_result(inject_callbacks(context))
async for request in request_iterator:
self.assertEqual(_REQUEST, request)
return _RESPONSE
await self._register_method_handler(
grpc.stream_stream_rpc_method_handler(test_handler)
)
call = self._channel.stream_stream(_TEST_METHOD)()
for _ in range(_NUM_STREAM_RESPONSES):
await call.write(_REQUEST)
await call.done_writing()
async for response in call:
self.assertEqual(_RESPONSE, response)
validation = await validation_future
await validation
async def test_error_in_handler(self):
"""Errors in the handler still triggers callbacks."""
validation_future = self.loop.create_future()
async def test_handler(request: bytes, context: aio.ServicerContext):
self.assertEqual(_REQUEST, request)
validation_future.set_result(inject_callbacks(context))
raise RuntimeError("A test RuntimeError")
await self._register_method_handler(
grpc.unary_unary_rpc_method_handler(test_handler)
)
with self.assertRaises(aio.AioRpcError) as exception_context:
await self._channel.unary_unary(_TEST_METHOD)(_REQUEST)
rpc_error = exception_context.exception
self.assertEqual(grpc.StatusCode.UNKNOWN, rpc_error.code())
validation = await validation_future
await validation
async def test_error_in_callback(self):
"""Errors in the callback won't be propagated to client."""
validation_future = self.loop.create_future()
async def test_handler(request: bytes, context: aio.ServicerContext):
self.assertEqual(_REQUEST, request)
def exception_raiser(unused_context):
raise RuntimeError("A test RuntimeError")
context.add_done_callback(exception_raiser)
validation_future.set_result(inject_callbacks(context))
return _RESPONSE
await self._register_method_handler(
grpc.unary_unary_rpc_method_handler(test_handler)
)
response = await self._channel.unary_unary(_TEST_METHOD)(_REQUEST)
self.assertEqual(_RESPONSE, response)
# Following callbacks won't be invoked, if one of the callback crashed.
validation = await validation_future
with self.assertRaises(asyncio.TimeoutError):
await validation
# Invoke RPC one more time to ensure the toxic callback won't break the
# server.
with self.assertRaises(aio.AioRpcError) as exception_context:
await self._channel.unary_unary(_FAKE_METHOD)(_REQUEST)
rpc_error = exception_context.exception
self.assertEqual(grpc.StatusCode.UNIMPLEMENTED, rpc_error.code())
if __name__ == "__main__":
logging.basicConfig(level=logging.DEBUG)
unittest.main(verbosity=2)
| 10,325
| 34.979094
| 80
|
py
|
grpc
|
grpc-master/src/python/grpcio_tests/tests_aio/unit/__init__.py
|
# Copyright 2019 The gRPC Authors.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
| 581
| 40.571429
| 74
|
py
|
grpc
|
grpc-master/src/python/grpcio_tests/tests_aio/unit/_test_server.py
|
# Copyright 2019 The gRPC Authors
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import asyncio
import datetime
import grpc
from grpc.experimental import aio
from src.proto.grpc.testing import empty_pb2
from src.proto.grpc.testing import messages_pb2
from src.proto.grpc.testing import test_pb2_grpc
from tests.unit import resources
from tests_aio.unit import _constants
_INITIAL_METADATA_KEY = "x-grpc-test-echo-initial"
_TRAILING_METADATA_KEY = "x-grpc-test-echo-trailing-bin"
async def _maybe_echo_metadata(servicer_context):
"""Copies metadata from request to response if it is present."""
invocation_metadata = dict(servicer_context.invocation_metadata())
if _INITIAL_METADATA_KEY in invocation_metadata:
initial_metadatum = (
_INITIAL_METADATA_KEY,
invocation_metadata[_INITIAL_METADATA_KEY],
)
await servicer_context.send_initial_metadata((initial_metadatum,))
if _TRAILING_METADATA_KEY in invocation_metadata:
trailing_metadatum = (
_TRAILING_METADATA_KEY,
invocation_metadata[_TRAILING_METADATA_KEY],
)
servicer_context.set_trailing_metadata((trailing_metadatum,))
async def _maybe_echo_status(
request: messages_pb2.SimpleRequest, servicer_context
):
"""Echos the RPC status if demanded by the request."""
if request.HasField("response_status"):
await servicer_context.abort(
request.response_status.code, request.response_status.message
)
class TestServiceServicer(test_pb2_grpc.TestServiceServicer):
async def UnaryCall(self, request, context):
await _maybe_echo_metadata(context)
await _maybe_echo_status(request, context)
return messages_pb2.SimpleResponse(
payload=messages_pb2.Payload(
type=messages_pb2.COMPRESSABLE,
body=b"\x00" * request.response_size,
)
)
async def EmptyCall(self, request, context):
return empty_pb2.Empty()
async def StreamingOutputCall(
self, request: messages_pb2.StreamingOutputCallRequest, unused_context
):
for response_parameters in request.response_parameters:
if response_parameters.interval_us != 0:
await asyncio.sleep(
datetime.timedelta(
microseconds=response_parameters.interval_us
).total_seconds()
)
if response_parameters.size != 0:
yield messages_pb2.StreamingOutputCallResponse(
payload=messages_pb2.Payload(
type=request.response_type,
body=b"\x00" * response_parameters.size,
)
)
else:
yield messages_pb2.StreamingOutputCallResponse()
# Next methods are extra ones that are registred programatically
# when the sever is instantiated. They are not being provided by
# the proto file.
async def UnaryCallWithSleep(self, unused_request, unused_context):
await asyncio.sleep(_constants.UNARY_CALL_WITH_SLEEP_VALUE)
return messages_pb2.SimpleResponse()
async def StreamingInputCall(self, request_async_iterator, unused_context):
aggregate_size = 0
async for request in request_async_iterator:
if request.payload is not None and request.payload.body:
aggregate_size += len(request.payload.body)
return messages_pb2.StreamingInputCallResponse(
aggregated_payload_size=aggregate_size
)
async def FullDuplexCall(self, request_async_iterator, context):
await _maybe_echo_metadata(context)
async for request in request_async_iterator:
await _maybe_echo_status(request, context)
for response_parameters in request.response_parameters:
if response_parameters.interval_us != 0:
await asyncio.sleep(
datetime.timedelta(
microseconds=response_parameters.interval_us
).total_seconds()
)
if response_parameters.size != 0:
yield messages_pb2.StreamingOutputCallResponse(
payload=messages_pb2.Payload(
type=request.payload.type,
body=b"\x00" * response_parameters.size,
)
)
else:
yield messages_pb2.StreamingOutputCallResponse()
def _create_extra_generic_handler(servicer: TestServiceServicer):
# Add programatically extra methods not provided by the proto file
# that are used during the tests
rpc_method_handlers = {
"UnaryCallWithSleep": grpc.unary_unary_rpc_method_handler(
servicer.UnaryCallWithSleep,
request_deserializer=messages_pb2.SimpleRequest.FromString,
response_serializer=messages_pb2.SimpleResponse.SerializeToString,
)
}
return grpc.method_handlers_generic_handler(
"grpc.testing.TestService", rpc_method_handlers
)
async def start_test_server(
port=0, secure=False, server_credentials=None, interceptors=None
):
server = aio.server(
options=(("grpc.so_reuseport", 0),), interceptors=interceptors
)
servicer = TestServiceServicer()
test_pb2_grpc.add_TestServiceServicer_to_server(servicer, server)
server.add_generic_rpc_handlers((_create_extra_generic_handler(servicer),))
if secure:
if server_credentials is None:
server_credentials = grpc.ssl_server_credentials(
[(resources.private_key(), resources.certificate_chain())]
)
port = server.add_secure_port("[::]:%d" % port, server_credentials)
else:
port = server.add_insecure_port("[::]:%d" % port)
await server.start()
# NOTE(lidizheng) returning the server to prevent it from deallocation
return "localhost:%d" % port, server
| 6,586
| 37.976331
| 79
|
py
|
grpc
|
grpc-master/src/python/grpcio_tests/tests_aio/unit/server_test.py
|
# Copyright 2019 The gRPC Authors.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import asyncio
import logging
import time
import unittest
import grpc
from grpc.experimental import aio
from tests.unit import resources
from tests.unit.framework.common import test_constants
from tests_aio.unit._test_base import AioTestBase
_SIMPLE_UNARY_UNARY = "/test/SimpleUnaryUnary"
_BLOCK_FOREVER = "/test/BlockForever"
_BLOCK_BRIEFLY = "/test/BlockBriefly"
_UNARY_STREAM_ASYNC_GEN = "/test/UnaryStreamAsyncGen"
_UNARY_STREAM_READER_WRITER = "/test/UnaryStreamReaderWriter"
_UNARY_STREAM_EVILLY_MIXED = "/test/UnaryStreamEvillyMixed"
_STREAM_UNARY_ASYNC_GEN = "/test/StreamUnaryAsyncGen"
_STREAM_UNARY_READER_WRITER = "/test/StreamUnaryReaderWriter"
_STREAM_UNARY_EVILLY_MIXED = "/test/StreamUnaryEvillyMixed"
_STREAM_STREAM_ASYNC_GEN = "/test/StreamStreamAsyncGen"
_STREAM_STREAM_READER_WRITER = "/test/StreamStreamReaderWriter"
_STREAM_STREAM_EVILLY_MIXED = "/test/StreamStreamEvillyMixed"
_UNIMPLEMENTED_METHOD = "/test/UnimplementedMethod"
_ERROR_IN_STREAM_STREAM = "/test/ErrorInStreamStream"
_ERROR_IN_STREAM_UNARY = "/test/ErrorInStreamUnary"
_ERROR_WITHOUT_RAISE_IN_UNARY_UNARY = "/test/ErrorWithoutRaiseInUnaryUnary"
_ERROR_WITHOUT_RAISE_IN_STREAM_STREAM = "/test/ErrorWithoutRaiseInStreamStream"
_INVALID_TRAILING_METADATA = "/test/InvalidTrailingMetadata"
_REQUEST = b"\x00\x00\x00"
_RESPONSE = b"\x01\x01\x01"
_NUM_STREAM_REQUESTS = 3
_NUM_STREAM_RESPONSES = 5
_MAXIMUM_CONCURRENT_RPCS = 5
class _GenericHandler(grpc.GenericRpcHandler):
def __init__(self):
self._called = asyncio.get_event_loop().create_future()
self._routing_table = {
_SIMPLE_UNARY_UNARY: grpc.unary_unary_rpc_method_handler(
self._unary_unary
),
_BLOCK_FOREVER: grpc.unary_unary_rpc_method_handler(
self._block_forever
),
_BLOCK_BRIEFLY: grpc.unary_unary_rpc_method_handler(
self._block_briefly
),
_UNARY_STREAM_ASYNC_GEN: grpc.unary_stream_rpc_method_handler(
self._unary_stream_async_gen
),
_UNARY_STREAM_READER_WRITER: grpc.unary_stream_rpc_method_handler(
self._unary_stream_reader_writer
),
_UNARY_STREAM_EVILLY_MIXED: grpc.unary_stream_rpc_method_handler(
self._unary_stream_evilly_mixed
),
_STREAM_UNARY_ASYNC_GEN: grpc.stream_unary_rpc_method_handler(
self._stream_unary_async_gen
),
_STREAM_UNARY_READER_WRITER: grpc.stream_unary_rpc_method_handler(
self._stream_unary_reader_writer
),
_STREAM_UNARY_EVILLY_MIXED: grpc.stream_unary_rpc_method_handler(
self._stream_unary_evilly_mixed
),
_STREAM_STREAM_ASYNC_GEN: grpc.stream_stream_rpc_method_handler(
self._stream_stream_async_gen
),
_STREAM_STREAM_READER_WRITER: grpc.stream_stream_rpc_method_handler(
self._stream_stream_reader_writer
),
_STREAM_STREAM_EVILLY_MIXED: grpc.stream_stream_rpc_method_handler(
self._stream_stream_evilly_mixed
),
_ERROR_IN_STREAM_STREAM: grpc.stream_stream_rpc_method_handler(
self._error_in_stream_stream
),
_ERROR_IN_STREAM_UNARY: grpc.stream_unary_rpc_method_handler(
self._value_error_in_stream_unary
),
_ERROR_WITHOUT_RAISE_IN_UNARY_UNARY: grpc.unary_unary_rpc_method_handler(
self._error_without_raise_in_unary_unary
),
_ERROR_WITHOUT_RAISE_IN_STREAM_STREAM: grpc.stream_stream_rpc_method_handler(
self._error_without_raise_in_stream_stream
),
_INVALID_TRAILING_METADATA: grpc.unary_unary_rpc_method_handler(
self._invalid_trailing_metadata
),
}
@staticmethod
async def _unary_unary(unused_request, unused_context):
return _RESPONSE
async def _block_forever(self, unused_request, unused_context):
await asyncio.get_event_loop().create_future()
async def _block_briefly(self, unused_request, unused_context):
await asyncio.sleep(test_constants.SHORT_TIMEOUT / 2)
return _RESPONSE
async def _unary_stream_async_gen(self, unused_request, unused_context):
for _ in range(_NUM_STREAM_RESPONSES):
yield _RESPONSE
async def _unary_stream_reader_writer(self, unused_request, context):
for _ in range(_NUM_STREAM_RESPONSES):
await context.write(_RESPONSE)
async def _unary_stream_evilly_mixed(self, unused_request, context):
yield _RESPONSE
for _ in range(_NUM_STREAM_RESPONSES - 1):
await context.write(_RESPONSE)
async def _stream_unary_async_gen(self, request_iterator, unused_context):
request_count = 0
async for request in request_iterator:
assert _REQUEST == request
request_count += 1
assert _NUM_STREAM_REQUESTS == request_count
return _RESPONSE
async def _stream_unary_reader_writer(self, unused_request, context):
for _ in range(_NUM_STREAM_REQUESTS):
assert _REQUEST == await context.read()
return _RESPONSE
async def _stream_unary_evilly_mixed(self, request_iterator, context):
assert _REQUEST == await context.read()
request_count = 0
async for request in request_iterator:
assert _REQUEST == request
request_count += 1
assert _NUM_STREAM_REQUESTS - 1 == request_count
return _RESPONSE
async def _stream_stream_async_gen(self, request_iterator, unused_context):
request_count = 0
async for request in request_iterator:
assert _REQUEST == request
request_count += 1
assert _NUM_STREAM_REQUESTS == request_count
for _ in range(_NUM_STREAM_RESPONSES):
yield _RESPONSE
async def _stream_stream_reader_writer(self, unused_request, context):
for _ in range(_NUM_STREAM_REQUESTS):
assert _REQUEST == await context.read()
for _ in range(_NUM_STREAM_RESPONSES):
await context.write(_RESPONSE)
async def _stream_stream_evilly_mixed(self, request_iterator, context):
assert _REQUEST == await context.read()
request_count = 0
async for request in request_iterator:
assert _REQUEST == request
request_count += 1
assert _NUM_STREAM_REQUESTS - 1 == request_count
yield _RESPONSE
for _ in range(_NUM_STREAM_RESPONSES - 1):
await context.write(_RESPONSE)
async def _error_in_stream_stream(self, request_iterator, unused_context):
async for request in request_iterator:
assert _REQUEST == request
raise RuntimeError("A testing RuntimeError!")
yield _RESPONSE
async def _value_error_in_stream_unary(self, request_iterator, context):
request_count = 0
async for request in request_iterator:
assert _REQUEST == request
request_count += 1
if request_count >= 1:
raise ValueError("A testing RuntimeError!")
async def _error_without_raise_in_unary_unary(self, request, context):
assert _REQUEST == request
context.set_code(grpc.StatusCode.INTERNAL)
async def _error_without_raise_in_stream_stream(
self, request_iterator, context
):
async for request in request_iterator:
assert _REQUEST == request
context.set_code(grpc.StatusCode.INTERNAL)
async def _invalid_trailing_metadata(self, request, context):
assert _REQUEST == request
for invalid_metadata in [
42,
{},
{"error": "error"},
[{"error": "error"}],
]:
try:
context.set_trailing_metadata(invalid_metadata)
except TypeError:
pass
else:
raise ValueError(
"No TypeError raised for invalid metadata:"
f" {invalid_metadata}"
)
await context.abort(
grpc.StatusCode.DATA_LOSS,
details="invalid abort",
trailing_metadata=({"error": ("error1", "error2")}),
)
def service(self, handler_details):
if not self._called.done():
self._called.set_result(None)
return self._routing_table.get(handler_details.method)
async def wait_for_call(self):
await self._called
async def _start_test_server():
server = aio.server()
port = server.add_insecure_port("[::]:0")
generic_handler = _GenericHandler()
server.add_generic_rpc_handlers((generic_handler,))
await server.start()
return "localhost:%d" % port, server, generic_handler
class TestServer(AioTestBase):
async def setUp(self):
addr, self._server, self._generic_handler = await _start_test_server()
self._channel = aio.insecure_channel(addr)
async def tearDown(self):
await self._channel.close()
await self._server.stop(None)
async def test_unary_unary(self):
unary_unary_call = self._channel.unary_unary(_SIMPLE_UNARY_UNARY)
response = await unary_unary_call(_REQUEST)
self.assertEqual(response, _RESPONSE)
async def test_unary_stream_async_generator(self):
unary_stream_call = self._channel.unary_stream(_UNARY_STREAM_ASYNC_GEN)
call = unary_stream_call(_REQUEST)
response_cnt = 0
async for response in call:
response_cnt += 1
self.assertEqual(_RESPONSE, response)
self.assertEqual(_NUM_STREAM_RESPONSES, response_cnt)
self.assertEqual(await call.code(), grpc.StatusCode.OK)
async def test_unary_stream_reader_writer(self):
unary_stream_call = self._channel.unary_stream(
_UNARY_STREAM_READER_WRITER
)
call = unary_stream_call(_REQUEST)
for _ in range(_NUM_STREAM_RESPONSES):
response = await call.read()
self.assertEqual(_RESPONSE, response)
self.assertEqual(await call.code(), grpc.StatusCode.OK)
async def test_unary_stream_evilly_mixed(self):
unary_stream_call = self._channel.unary_stream(
_UNARY_STREAM_EVILLY_MIXED
)
call = unary_stream_call(_REQUEST)
# Uses reader API
self.assertEqual(_RESPONSE, await call.read())
# Uses async generator API, mixed!
with self.assertRaises(aio.UsageError):
async for response in call:
self.assertEqual(_RESPONSE, response)
async def test_stream_unary_async_generator(self):
stream_unary_call = self._channel.stream_unary(_STREAM_UNARY_ASYNC_GEN)
call = stream_unary_call()
for _ in range(_NUM_STREAM_REQUESTS):
await call.write(_REQUEST)
await call.done_writing()
response = await call
self.assertEqual(_RESPONSE, response)
self.assertEqual(await call.code(), grpc.StatusCode.OK)
async def test_stream_unary_async_generator_with_request_iter(self):
stream_unary_call = self._channel.stream_unary(_STREAM_UNARY_ASYNC_GEN)
finished = False
def request_gen():
for _ in range(_NUM_STREAM_REQUESTS):
yield _REQUEST
nonlocal finished
finished = True
call = stream_unary_call(request_gen())
response = await call
self.assertEqual(_RESPONSE, response)
self.assertEqual(await call.code(), grpc.StatusCode.OK)
self.assertEqual(finished, True)
async def test_stream_unary_reader_writer(self):
stream_unary_call = self._channel.stream_unary(
_STREAM_UNARY_READER_WRITER
)
call = stream_unary_call()
for _ in range(_NUM_STREAM_REQUESTS):
await call.write(_REQUEST)
await call.done_writing()
response = await call
self.assertEqual(_RESPONSE, response)
self.assertEqual(await call.code(), grpc.StatusCode.OK)
async def test_stream_unary_evilly_mixed(self):
stream_unary_call = self._channel.stream_unary(
_STREAM_UNARY_EVILLY_MIXED
)
call = stream_unary_call()
for _ in range(_NUM_STREAM_REQUESTS):
await call.write(_REQUEST)
await call.done_writing()
response = await call
self.assertEqual(_RESPONSE, response)
self.assertEqual(await call.code(), grpc.StatusCode.OK)
async def test_stream_stream_async_generator(self):
stream_stream_call = self._channel.stream_stream(
_STREAM_STREAM_ASYNC_GEN
)
call = stream_stream_call()
for _ in range(_NUM_STREAM_REQUESTS):
await call.write(_REQUEST)
await call.done_writing()
for _ in range(_NUM_STREAM_RESPONSES):
response = await call.read()
self.assertEqual(_RESPONSE, response)
self.assertEqual(await call.code(), grpc.StatusCode.OK)
async def test_stream_stream_reader_writer(self):
stream_stream_call = self._channel.stream_stream(
_STREAM_STREAM_READER_WRITER
)
call = stream_stream_call()
for _ in range(_NUM_STREAM_REQUESTS):
await call.write(_REQUEST)
await call.done_writing()
for _ in range(_NUM_STREAM_RESPONSES):
response = await call.read()
self.assertEqual(_RESPONSE, response)
self.assertEqual(await call.code(), grpc.StatusCode.OK)
async def test_stream_stream_evilly_mixed(self):
stream_stream_call = self._channel.stream_stream(
_STREAM_STREAM_EVILLY_MIXED
)
call = stream_stream_call()
for _ in range(_NUM_STREAM_REQUESTS):
await call.write(_REQUEST)
await call.done_writing()
for _ in range(_NUM_STREAM_RESPONSES):
response = await call.read()
self.assertEqual(_RESPONSE, response)
self.assertEqual(await call.code(), grpc.StatusCode.OK)
async def test_shutdown(self):
await self._server.stop(None)
# Ensures no SIGSEGV triggered, and ends within timeout.
async def test_shutdown_after_call(self):
await self._channel.unary_unary(_SIMPLE_UNARY_UNARY)(_REQUEST)
await self._server.stop(None)
async def test_graceful_shutdown_success(self):
call = self._channel.unary_unary(_BLOCK_BRIEFLY)(_REQUEST)
await self._generic_handler.wait_for_call()
shutdown_start_time = time.time()
await self._server.stop(test_constants.SHORT_TIMEOUT)
grace_period_length = time.time() - shutdown_start_time
self.assertGreater(
grace_period_length, test_constants.SHORT_TIMEOUT / 3
)
# Validates the states.
self.assertEqual(_RESPONSE, await call)
self.assertTrue(call.done())
async def test_graceful_shutdown_failed(self):
call = self._channel.unary_unary(_BLOCK_FOREVER)(_REQUEST)
await self._generic_handler.wait_for_call()
await self._server.stop(test_constants.SHORT_TIMEOUT)
with self.assertRaises(aio.AioRpcError) as exception_context:
await call
self.assertEqual(
grpc.StatusCode.UNAVAILABLE, exception_context.exception.code()
)
async def test_concurrent_graceful_shutdown(self):
call = self._channel.unary_unary(_BLOCK_BRIEFLY)(_REQUEST)
await self._generic_handler.wait_for_call()
# Expects the shortest grace period to be effective.
shutdown_start_time = time.time()
await asyncio.gather(
self._server.stop(test_constants.LONG_TIMEOUT),
self._server.stop(test_constants.SHORT_TIMEOUT),
self._server.stop(test_constants.LONG_TIMEOUT),
)
grace_period_length = time.time() - shutdown_start_time
self.assertGreater(
grace_period_length, test_constants.SHORT_TIMEOUT / 3
)
self.assertEqual(_RESPONSE, await call)
self.assertTrue(call.done())
async def test_concurrent_graceful_shutdown_immediate(self):
call = self._channel.unary_unary(_BLOCK_FOREVER)(_REQUEST)
await self._generic_handler.wait_for_call()
# Expects no grace period, due to the "server.stop(None)".
await asyncio.gather(
self._server.stop(test_constants.LONG_TIMEOUT),
self._server.stop(None),
self._server.stop(test_constants.SHORT_TIMEOUT),
self._server.stop(test_constants.LONG_TIMEOUT),
)
with self.assertRaises(aio.AioRpcError) as exception_context:
await call
self.assertEqual(
grpc.StatusCode.UNAVAILABLE, exception_context.exception.code()
)
async def test_shutdown_before_call(self):
await self._server.stop(None)
# Ensures the server is cleaned up at this point.
# Some proper exception should be raised.
with self.assertRaises(aio.AioRpcError):
await self._channel.unary_unary(_SIMPLE_UNARY_UNARY)(_REQUEST)
async def test_unimplemented(self):
call = self._channel.unary_unary(_UNIMPLEMENTED_METHOD)
with self.assertRaises(aio.AioRpcError) as exception_context:
await call(_REQUEST)
rpc_error = exception_context.exception
self.assertEqual(grpc.StatusCode.UNIMPLEMENTED, rpc_error.code())
async def test_shutdown_during_stream_stream(self):
stream_stream_call = self._channel.stream_stream(
_STREAM_STREAM_ASYNC_GEN
)
call = stream_stream_call()
# Don't half close the RPC yet, keep it alive.
await call.write(_REQUEST)
await self._server.stop(None)
self.assertEqual(grpc.StatusCode.UNAVAILABLE, await call.code())
# No segfault
async def test_error_in_stream_stream(self):
stream_stream_call = self._channel.stream_stream(
_ERROR_IN_STREAM_STREAM
)
call = stream_stream_call()
# Don't half close the RPC yet, keep it alive.
await call.write(_REQUEST)
# Don't segfault here
self.assertEqual(grpc.StatusCode.UNKNOWN, await call.code())
async def test_error_without_raise_in_unary_unary(self):
call = self._channel.unary_unary(_ERROR_WITHOUT_RAISE_IN_UNARY_UNARY)(
_REQUEST
)
with self.assertRaises(aio.AioRpcError) as exception_context:
await call
rpc_error = exception_context.exception
self.assertEqual(grpc.StatusCode.INTERNAL, rpc_error.code())
async def test_error_without_raise_in_stream_stream(self):
call = self._channel.stream_stream(
_ERROR_WITHOUT_RAISE_IN_STREAM_STREAM
)()
for _ in range(_NUM_STREAM_REQUESTS):
await call.write(_REQUEST)
await call.done_writing()
self.assertEqual(grpc.StatusCode.INTERNAL, await call.code())
async def test_error_in_stream_unary(self):
stream_unary_call = self._channel.stream_unary(_ERROR_IN_STREAM_UNARY)
async def request_gen():
for _ in range(_NUM_STREAM_REQUESTS):
yield _REQUEST
call = stream_unary_call(request_gen())
with self.assertRaises(aio.AioRpcError) as exception_context:
await call
rpc_error = exception_context.exception
self.assertEqual(grpc.StatusCode.UNKNOWN, rpc_error.code())
async def test_port_binding_exception(self):
server = aio.server(options=(("grpc.so_reuseport", 0),))
port = server.add_insecure_port("localhost:0")
bind_address = "localhost:%d" % port
with self.assertRaises(RuntimeError):
server.add_insecure_port(bind_address)
server_credentials = grpc.ssl_server_credentials(
[(resources.private_key(), resources.certificate_chain())]
)
with self.assertRaises(RuntimeError):
server.add_secure_port(bind_address, server_credentials)
async def test_maximum_concurrent_rpcs(self):
# Build the server with concurrent rpc argument
server = aio.server(maximum_concurrent_rpcs=_MAXIMUM_CONCURRENT_RPCS)
port = server.add_insecure_port("localhost:0")
bind_address = "localhost:%d" % port
server.add_generic_rpc_handlers((_GenericHandler(),))
await server.start()
# Build the channel
channel = aio.insecure_channel(bind_address)
# Deplete the concurrent quota with 3 times of max RPCs
rpcs = []
for _ in range(3 * _MAXIMUM_CONCURRENT_RPCS):
rpcs.append(channel.unary_unary(_BLOCK_BRIEFLY)(_REQUEST))
task = self.loop.create_task(
asyncio.wait(rpcs, return_when=asyncio.FIRST_EXCEPTION)
)
# Each batch took test_constants.SHORT_TIMEOUT /2
start_time = time.time()
await task
elapsed_time = time.time() - start_time
self.assertGreater(elapsed_time, test_constants.SHORT_TIMEOUT * 3 / 2)
# Clean-up
await channel.close()
await server.stop(0)
async def test_invalid_trailing_metadata(self):
call = self._channel.unary_unary(_INVALID_TRAILING_METADATA)(_REQUEST)
with self.assertRaises(aio.AioRpcError) as exception_context:
await call
rpc_error = exception_context.exception
self.assertEqual(grpc.StatusCode.UNKNOWN, rpc_error.code())
self.assertIn("trailing", rpc_error.details())
if __name__ == "__main__":
logging.basicConfig(level=logging.DEBUG)
unittest.main(verbosity=2)
| 22,514
| 35.849427
| 89
|
py
|
grpc
|
grpc-master/src/python/grpcio_tests/tests_aio/unit/_common.py
|
# Copyright 2020 The gRPC Authors
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import asyncio
from typing import AsyncIterable
import grpc
from grpc.aio._metadata import Metadata
from grpc.aio._typing import MetadataKey
from grpc.aio._typing import MetadataValue
from grpc.aio._typing import MetadatumType
from grpc.experimental import aio
from tests.unit.framework.common import test_constants
ADHOC_METHOD = "/test/AdHoc"
def seen_metadata(expected: Metadata, actual: Metadata):
return not bool(set(tuple(expected)) - set(tuple(actual)))
def seen_metadatum(
expected_key: MetadataKey, expected_value: MetadataValue, actual: Metadata
) -> bool:
obtained = actual[expected_key]
return obtained == expected_value
async def block_until_certain_state(
channel: aio.Channel, expected_state: grpc.ChannelConnectivity
):
state = channel.get_state()
while state != expected_state:
await channel.wait_for_state_change(state)
state = channel.get_state()
def inject_callbacks(call: aio.Call):
first_callback_ran = asyncio.Event()
def first_callback(call):
# Validate that all resopnses have been received
# and the call is an end state.
assert call.done()
first_callback_ran.set()
second_callback_ran = asyncio.Event()
def second_callback(call):
# Validate that all responses have been received
# and the call is an end state.
assert call.done()
second_callback_ran.set()
call.add_done_callback(first_callback)
call.add_done_callback(second_callback)
async def validation():
await asyncio.wait_for(
asyncio.gather(
first_callback_ran.wait(), second_callback_ran.wait()
),
test_constants.SHORT_TIMEOUT,
)
return validation()
class CountingRequestIterator:
def __init__(self, request_iterator):
self.request_cnt = 0
self._request_iterator = request_iterator
async def _forward_requests(self):
async for request in self._request_iterator:
self.request_cnt += 1
yield request
def __aiter__(self):
return self._forward_requests()
class CountingResponseIterator:
def __init__(self, response_iterator):
self.response_cnt = 0
self._response_iterator = response_iterator
async def _forward_responses(self):
async for response in self._response_iterator:
self.response_cnt += 1
yield response
def __aiter__(self):
return self._forward_responses()
class AdhocGenericHandler(grpc.GenericRpcHandler):
"""A generic handler to plugin testing server methods on the fly."""
_handler: grpc.RpcMethodHandler
def __init__(self):
self._handler = None
def set_adhoc_handler(self, handler: grpc.RpcMethodHandler):
self._handler = handler
def service(self, handler_call_details):
if handler_call_details.method == ADHOC_METHOD:
return self._handler
else:
return None
| 3,586
| 27.696
| 78
|
py
|
grpc
|
grpc-master/src/python/grpcio_tests/tests_aio/unit/compression_test.py
|
# Copyright 2020 The gRPC Authors
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Tests behavior around the compression mechanism."""
import asyncio
import logging
import platform
import random
import unittest
import grpc
from grpc.experimental import aio
from tests_aio.unit import _common
from tests_aio.unit._test_base import AioTestBase
_GZIP_CHANNEL_ARGUMENT = ("grpc.default_compression_algorithm", 2)
_GZIP_DISABLED_CHANNEL_ARGUMENT = (
"grpc.compression_enabled_algorithms_bitset",
3,
)
_DEFLATE_DISABLED_CHANNEL_ARGUMENT = (
"grpc.compression_enabled_algorithms_bitset",
5,
)
_TEST_UNARY_UNARY = "/test/TestUnaryUnary"
_TEST_SET_COMPRESSION = "/test/TestSetCompression"
_TEST_DISABLE_COMPRESSION_UNARY = "/test/TestDisableCompressionUnary"
_TEST_DISABLE_COMPRESSION_STREAM = "/test/TestDisableCompressionStream"
_REQUEST = b"\x01" * 100
_RESPONSE = b"\x02" * 100
async def _test_unary_unary(unused_request, unused_context):
return _RESPONSE
async def _test_set_compression(unused_request_iterator, context):
assert _REQUEST == await context.read()
context.set_compression(grpc.Compression.Deflate)
await context.write(_RESPONSE)
try:
context.set_compression(grpc.Compression.Deflate)
except RuntimeError:
# NOTE(lidiz) Testing if the servicer context raises exception when
# the set_compression method is called after initial_metadata sent.
# After the initial_metadata sent, the server-side has no control over
# which compression algorithm it should use.
pass
else:
raise ValueError(
"Expecting exceptions if set_compression is not effective"
)
async def _test_disable_compression_unary(request, context):
assert _REQUEST == request
context.set_compression(grpc.Compression.Deflate)
context.disable_next_message_compression()
return _RESPONSE
async def _test_disable_compression_stream(unused_request_iterator, context):
assert _REQUEST == await context.read()
context.set_compression(grpc.Compression.Deflate)
await context.write(_RESPONSE)
context.disable_next_message_compression()
await context.write(_RESPONSE)
await context.write(_RESPONSE)
_ROUTING_TABLE = {
_TEST_UNARY_UNARY: grpc.unary_unary_rpc_method_handler(_test_unary_unary),
_TEST_SET_COMPRESSION: grpc.stream_stream_rpc_method_handler(
_test_set_compression
),
_TEST_DISABLE_COMPRESSION_UNARY: grpc.unary_unary_rpc_method_handler(
_test_disable_compression_unary
),
_TEST_DISABLE_COMPRESSION_STREAM: grpc.stream_stream_rpc_method_handler(
_test_disable_compression_stream
),
}
class _GenericHandler(grpc.GenericRpcHandler):
def service(self, handler_call_details):
return _ROUTING_TABLE.get(handler_call_details.method)
async def _start_test_server(options=None):
server = aio.server(options=options)
port = server.add_insecure_port("[::]:0")
server.add_generic_rpc_handlers((_GenericHandler(),))
await server.start()
return f"localhost:{port}", server
class TestCompression(AioTestBase):
async def setUp(self):
server_options = (_GZIP_DISABLED_CHANNEL_ARGUMENT,)
self._address, self._server = await _start_test_server(server_options)
self._channel = aio.insecure_channel(self._address)
async def tearDown(self):
await self._channel.close()
await self._server.stop(None)
async def test_channel_level_compression_baned_compression(self):
# GZIP is disabled, this call should fail
async with aio.insecure_channel(
self._address, compression=grpc.Compression.Gzip
) as channel:
multicallable = channel.unary_unary(_TEST_UNARY_UNARY)
call = multicallable(_REQUEST)
with self.assertRaises(aio.AioRpcError) as exception_context:
await call
rpc_error = exception_context.exception
self.assertEqual(grpc.StatusCode.UNIMPLEMENTED, rpc_error.code())
async def test_channel_level_compression_allowed_compression(self):
# Deflate is allowed, this call should succeed
async with aio.insecure_channel(
self._address, compression=grpc.Compression.Deflate
) as channel:
multicallable = channel.unary_unary(_TEST_UNARY_UNARY)
call = multicallable(_REQUEST)
self.assertEqual(grpc.StatusCode.OK, await call.code())
async def test_client_call_level_compression_baned_compression(self):
multicallable = self._channel.unary_unary(_TEST_UNARY_UNARY)
# GZIP is disabled, this call should fail
call = multicallable(_REQUEST, compression=grpc.Compression.Gzip)
with self.assertRaises(aio.AioRpcError) as exception_context:
await call
rpc_error = exception_context.exception
self.assertEqual(grpc.StatusCode.UNIMPLEMENTED, rpc_error.code())
async def test_client_call_level_compression_allowed_compression(self):
multicallable = self._channel.unary_unary(_TEST_UNARY_UNARY)
# Deflate is allowed, this call should succeed
call = multicallable(_REQUEST, compression=grpc.Compression.Deflate)
self.assertEqual(grpc.StatusCode.OK, await call.code())
async def test_server_call_level_compression(self):
multicallable = self._channel.stream_stream(_TEST_SET_COMPRESSION)
call = multicallable()
await call.write(_REQUEST)
await call.done_writing()
self.assertEqual(_RESPONSE, await call.read())
self.assertEqual(grpc.StatusCode.OK, await call.code())
async def test_server_disable_compression_unary(self):
multicallable = self._channel.unary_unary(
_TEST_DISABLE_COMPRESSION_UNARY
)
call = multicallable(_REQUEST)
self.assertEqual(_RESPONSE, await call)
self.assertEqual(grpc.StatusCode.OK, await call.code())
async def test_server_disable_compression_stream(self):
multicallable = self._channel.stream_stream(
_TEST_DISABLE_COMPRESSION_STREAM
)
call = multicallable()
await call.write(_REQUEST)
await call.done_writing()
self.assertEqual(_RESPONSE, await call.read())
self.assertEqual(_RESPONSE, await call.read())
self.assertEqual(_RESPONSE, await call.read())
self.assertEqual(grpc.StatusCode.OK, await call.code())
async def test_server_default_compression_algorithm(self):
server = aio.server(compression=grpc.Compression.Deflate)
port = server.add_insecure_port("[::]:0")
server.add_generic_rpc_handlers((_GenericHandler(),))
await server.start()
async with aio.insecure_channel(f"localhost:{port}") as channel:
multicallable = channel.unary_unary(_TEST_UNARY_UNARY)
call = multicallable(_REQUEST)
self.assertEqual(_RESPONSE, await call)
self.assertEqual(grpc.StatusCode.OK, await call.code())
await server.stop(None)
if __name__ == "__main__":
logging.basicConfig(level=logging.DEBUG)
unittest.main(verbosity=2)
| 7,689
| 36.330097
| 78
|
py
|
grpc
|
grpc-master/src/python/grpcio_tests/tests_aio/unit/connectivity_test.py
|
# Copyright 2019 The gRPC Authors.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Tests behavior of the connectivity state."""
import asyncio
import logging
import platform
import threading
import time
import unittest
import grpc
from grpc.experimental import aio
from tests.unit.framework.common import test_constants
from tests_aio.unit import _common
from tests_aio.unit._constants import UNREACHABLE_TARGET
from tests_aio.unit._test_base import AioTestBase
from tests_aio.unit._test_server import start_test_server
class TestConnectivityState(AioTestBase):
async def setUp(self):
self._server_address, self._server = await start_test_server()
async def tearDown(self):
await self._server.stop(None)
@unittest.skipIf(
"aarch64" in platform.machine(),
"The transient failure propagation is slower on aarch64",
)
async def test_unavailable_backend(self):
async with aio.insecure_channel(UNREACHABLE_TARGET) as channel:
self.assertEqual(
grpc.ChannelConnectivity.IDLE, channel.get_state(False)
)
self.assertEqual(
grpc.ChannelConnectivity.IDLE, channel.get_state(True)
)
# Should not time out
await asyncio.wait_for(
_common.block_until_certain_state(
channel, grpc.ChannelConnectivity.TRANSIENT_FAILURE
),
test_constants.SHORT_TIMEOUT,
)
async def test_normal_backend(self):
async with aio.insecure_channel(self._server_address) as channel:
current_state = channel.get_state(True)
self.assertEqual(grpc.ChannelConnectivity.IDLE, current_state)
# Should not time out
await asyncio.wait_for(
_common.block_until_certain_state(
channel, grpc.ChannelConnectivity.READY
),
test_constants.SHORT_TIMEOUT,
)
async def test_timeout(self):
async with aio.insecure_channel(self._server_address) as channel:
self.assertEqual(
grpc.ChannelConnectivity.IDLE, channel.get_state(False)
)
# If timed out, the function should return None.
with self.assertRaises(asyncio.TimeoutError):
await asyncio.wait_for(
_common.block_until_certain_state(
channel, grpc.ChannelConnectivity.READY
),
test_constants.SHORT_TIMEOUT,
)
async def test_shutdown(self):
channel = aio.insecure_channel(self._server_address)
self.assertEqual(
grpc.ChannelConnectivity.IDLE, channel.get_state(False)
)
# Waiting for changes in a separate coroutine
wait_started = asyncio.Event()
async def a_pending_wait():
wait_started.set()
await channel.wait_for_state_change(grpc.ChannelConnectivity.IDLE)
pending_task = self.loop.create_task(a_pending_wait())
await wait_started.wait()
await channel.close()
self.assertEqual(
grpc.ChannelConnectivity.SHUTDOWN, channel.get_state(True)
)
self.assertEqual(
grpc.ChannelConnectivity.SHUTDOWN, channel.get_state(False)
)
# Make sure there isn't any exception in the task
await pending_task
# It can raise exceptions since it is an usage error, but it should not
# segfault or abort.
with self.assertRaises(aio.UsageError):
await channel.wait_for_state_change(
grpc.ChannelConnectivity.SHUTDOWN
)
if __name__ == "__main__":
logging.basicConfig(level=logging.DEBUG)
unittest.main(verbosity=2)
| 4,343
| 32.415385
| 79
|
py
|
grpc
|
grpc-master/src/python/grpcio_tests/tests_aio/unit/init_test.py
|
# Copyright 2019 The gRPC Authors.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import logging
import unittest
class TestInit(unittest.TestCase):
def test_grpc(self):
import grpc # pylint: disable=wrong-import-position
channel = grpc.aio.insecure_channel("phony")
self.assertIsInstance(channel, grpc.aio.Channel)
def test_grpc_dot_aio(self):
import grpc.aio # pylint: disable=wrong-import-position
channel = grpc.aio.insecure_channel("phony")
self.assertIsInstance(channel, grpc.aio.Channel)
if __name__ == "__main__":
logging.basicConfig(level=logging.DEBUG)
unittest.main(verbosity=2)
| 1,161
| 32.2
| 74
|
py
|
grpc
|
grpc-master/src/python/grpcio_tests/tests_aio/unit/_constants.py
|
# Copyright 2020 The gRPC Authors
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# If we use an unreachable IP, depending on the network stack, we might not get
# with an RST fast enough. This used to cause tests to flake under different
# platforms.
UNREACHABLE_TARGET = "foo/bar"
UNARY_CALL_WITH_SLEEP_VALUE = 0.2
| 816
| 39.85
| 79
|
py
|
grpc
|
grpc-master/src/python/grpcio_tests/tests_aio/unit/channel_ready_test.py
|
# Copyright 2020 The gRPC Authors
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Testing the channel_ready function."""
import asyncio
import gc
import logging
import socket
import time
import unittest
import grpc
from grpc.experimental import aio
from tests.unit.framework.common import get_socket
from tests.unit.framework.common import test_constants
from tests_aio.unit import _common
from tests_aio.unit._test_base import AioTestBase
from tests_aio.unit._test_server import start_test_server
class TestChannelReady(AioTestBase):
async def setUp(self):
address, self._port, self._socket = get_socket(
listen=False, sock_options=(socket.SO_REUSEADDR,)
)
self._channel = aio.insecure_channel(f"{address}:{self._port}")
self._socket.close()
async def tearDown(self):
await self._channel.close()
async def test_channel_ready_success(self):
# Start `channel_ready` as another Task
channel_ready_task = self.loop.create_task(
self._channel.channel_ready()
)
# Wait for TRANSIENT_FAILURE
await _common.block_until_certain_state(
self._channel, grpc.ChannelConnectivity.TRANSIENT_FAILURE
)
try:
# Start the server
_, server = await start_test_server(port=self._port)
# The RPC should recover itself
await channel_ready_task
finally:
await server.stop(None)
async def test_channel_ready_blocked(self):
with self.assertRaises(asyncio.TimeoutError):
await asyncio.wait_for(
self._channel.channel_ready(), test_constants.SHORT_TIMEOUT
)
if __name__ == "__main__":
logging.basicConfig(level=logging.DEBUG)
unittest.main(verbosity=2)
| 2,311
| 30.243243
| 75
|
py
|
grpc
|
grpc-master/src/python/grpcio_tests/tests_aio/unit/wait_for_connection_test.py
|
# Copyright 2020 The gRPC Authors
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Tests behavior of the wait for connection API on client side."""
import asyncio
import datetime
import logging
from typing import Callable, Tuple
import unittest
import grpc
from grpc.experimental import aio
from src.proto.grpc.testing import messages_pb2
from src.proto.grpc.testing import test_pb2_grpc
from tests_aio.unit import _common
from tests_aio.unit._constants import UNREACHABLE_TARGET
from tests_aio.unit._test_base import AioTestBase
from tests_aio.unit._test_server import start_test_server
_REQUEST = b"\x01\x02\x03"
_TEST_METHOD = "/test/Test"
_NUM_STREAM_RESPONSES = 5
_REQUEST_PAYLOAD_SIZE = 7
_RESPONSE_PAYLOAD_SIZE = 42
class TestWaitForConnection(AioTestBase):
"""Tests if wait_for_connection raises connectivity issue."""
async def setUp(self):
address, self._server = await start_test_server()
self._channel = aio.insecure_channel(address)
self._phony_channel = aio.insecure_channel(UNREACHABLE_TARGET)
self._stub = test_pb2_grpc.TestServiceStub(self._channel)
async def tearDown(self):
await self._phony_channel.close()
await self._channel.close()
await self._server.stop(None)
async def test_unary_unary_ok(self):
call = self._stub.UnaryCall(messages_pb2.SimpleRequest())
# No exception raised and no message swallowed.
await call.wait_for_connection()
response = await call
self.assertIsInstance(response, messages_pb2.SimpleResponse)
async def test_unary_stream_ok(self):
request = messages_pb2.StreamingOutputCallRequest()
for _ in range(_NUM_STREAM_RESPONSES):
request.response_parameters.append(
messages_pb2.ResponseParameters(size=_RESPONSE_PAYLOAD_SIZE)
)
call = self._stub.StreamingOutputCall(request)
# No exception raised and no message swallowed.
await call.wait_for_connection()
response_cnt = 0
async for response in call:
response_cnt += 1
self.assertIs(
type(response), messages_pb2.StreamingOutputCallResponse
)
self.assertEqual(_RESPONSE_PAYLOAD_SIZE, len(response.payload.body))
self.assertEqual(_NUM_STREAM_RESPONSES, response_cnt)
self.assertEqual(await call.code(), grpc.StatusCode.OK)
async def test_stream_unary_ok(self):
call = self._stub.StreamingInputCall()
# No exception raised and no message swallowed.
await call.wait_for_connection()
payload = messages_pb2.Payload(body=b"\0" * _REQUEST_PAYLOAD_SIZE)
request = messages_pb2.StreamingInputCallRequest(payload=payload)
for _ in range(_NUM_STREAM_RESPONSES):
await call.write(request)
await call.done_writing()
response = await call
self.assertIsInstance(response, messages_pb2.StreamingInputCallResponse)
self.assertEqual(
_NUM_STREAM_RESPONSES * _REQUEST_PAYLOAD_SIZE,
response.aggregated_payload_size,
)
self.assertEqual(await call.code(), grpc.StatusCode.OK)
async def test_stream_stream_ok(self):
call = self._stub.FullDuplexCall()
# No exception raised and no message swallowed.
await call.wait_for_connection()
request = messages_pb2.StreamingOutputCallRequest()
request.response_parameters.append(
messages_pb2.ResponseParameters(size=_RESPONSE_PAYLOAD_SIZE)
)
for _ in range(_NUM_STREAM_RESPONSES):
await call.write(request)
response = await call.read()
self.assertIsInstance(
response, messages_pb2.StreamingOutputCallResponse
)
self.assertEqual(_RESPONSE_PAYLOAD_SIZE, len(response.payload.body))
await call.done_writing()
self.assertEqual(grpc.StatusCode.OK, await call.code())
async def test_unary_unary_error(self):
call = self._phony_channel.unary_unary(_TEST_METHOD)(_REQUEST)
with self.assertRaises(aio.AioRpcError) as exception_context:
await call.wait_for_connection()
rpc_error = exception_context.exception
self.assertEqual(grpc.StatusCode.UNAVAILABLE, rpc_error.code())
async def test_unary_stream_error(self):
call = self._phony_channel.unary_stream(_TEST_METHOD)(_REQUEST)
with self.assertRaises(aio.AioRpcError) as exception_context:
await call.wait_for_connection()
rpc_error = exception_context.exception
self.assertEqual(grpc.StatusCode.UNAVAILABLE, rpc_error.code())
async def test_stream_unary_error(self):
call = self._phony_channel.stream_unary(_TEST_METHOD)()
with self.assertRaises(aio.AioRpcError) as exception_context:
await call.wait_for_connection()
rpc_error = exception_context.exception
self.assertEqual(grpc.StatusCode.UNAVAILABLE, rpc_error.code())
async def test_stream_stream_error(self):
call = self._phony_channel.stream_stream(_TEST_METHOD)()
with self.assertRaises(aio.AioRpcError) as exception_context:
await call.wait_for_connection()
rpc_error = exception_context.exception
self.assertEqual(grpc.StatusCode.UNAVAILABLE, rpc_error.code())
if __name__ == "__main__":
logging.basicConfig(level=logging.DEBUG)
unittest.main(verbosity=2)
| 6,003
| 34.952096
| 80
|
py
|
grpc
|
grpc-master/src/python/grpcio_tests/tests_aio/unit/outside_init_test.py
|
# Copyright 2020 The gRPC Authors
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Tests behavior around the metadata mechanism."""
import asyncio
import logging
import unittest
import grpc
from grpc.experimental import aio
from src.proto.grpc.testing import messages_pb2
from src.proto.grpc.testing import test_pb2_grpc
from tests_aio.unit._test_server import start_test_server
_NUM_OF_LOOPS = 50
class TestOutsideInit(unittest.TestCase):
def test_behavior_outside_asyncio(self):
# Ensures non-AsyncIO object can be initiated
channel_creds = grpc.ssl_channel_credentials()
# Ensures AsyncIO API not raising outside of AsyncIO.
# NOTE(lidiz) This behavior is bound with GAPIC generator, and required
# by test frameworks like pytest. In test frameworks, objects shared
# across cases need to be created outside of AsyncIO coroutines.
aio.insecure_channel("")
aio.secure_channel("", channel_creds)
aio.server()
aio.init_grpc_aio()
aio.shutdown_grpc_aio()
def test_multi_ephemeral_loops(self):
# Initializes AIO module outside. It's part of the test. We especially
# want to ensure the closing of the default loop won't cause deadlocks.
aio.init_grpc_aio()
async def ping_pong():
address, server = await start_test_server()
channel = aio.insecure_channel(address)
stub = test_pb2_grpc.TestServiceStub(channel)
await stub.UnaryCall(messages_pb2.SimpleRequest())
await channel.close()
await server.stop(None)
for i in range(_NUM_OF_LOOPS):
old_loop = asyncio.get_event_loop()
old_loop.close()
loop = asyncio.new_event_loop()
loop.set_debug(True)
asyncio.set_event_loop(loop)
loop.run_until_complete(ping_pong())
aio.shutdown_grpc_aio()
if __name__ == "__main__":
logging.basicConfig(level=logging.DEBUG)
unittest.main(verbosity=2)
| 2,540
| 32.434211
| 79
|
py
|
grpc
|
grpc-master/src/python/grpcio_tests/tests_aio/unit/call_test.py
|
# Copyright 2019 The gRPC Authors.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Tests behavior of the Call classes."""
import asyncio
import datetime
import logging
import random
import unittest
import grpc
from grpc.experimental import aio
from src.proto.grpc.testing import messages_pb2
from src.proto.grpc.testing import test_pb2_grpc
from tests_aio.unit._constants import UNREACHABLE_TARGET
from tests_aio.unit._test_base import AioTestBase
from tests_aio.unit._test_server import start_test_server
_SHORT_TIMEOUT_S = datetime.timedelta(seconds=1).total_seconds()
_NUM_STREAM_RESPONSES = 5
_RESPONSE_PAYLOAD_SIZE = 42
_REQUEST_PAYLOAD_SIZE = 7
_LOCAL_CANCEL_DETAILS_EXPECTATION = "Locally cancelled by application!"
_RESPONSE_INTERVAL_US = int(_SHORT_TIMEOUT_S * 1000 * 1000)
_INFINITE_INTERVAL_US = 2**31 - 1
_NONDETERMINISTIC_ITERATIONS = 50
_NONDETERMINISTIC_SERVER_SLEEP_MAX_US = 1000
class _MulticallableTestMixin:
async def setUp(self):
address, self._server = await start_test_server()
self._channel = aio.insecure_channel(address)
self._stub = test_pb2_grpc.TestServiceStub(self._channel)
async def tearDown(self):
await self._channel.close()
await self._server.stop(None)
class TestUnaryUnaryCall(_MulticallableTestMixin, AioTestBase):
async def test_call_to_string(self):
call = self._stub.UnaryCall(messages_pb2.SimpleRequest())
self.assertTrue(str(call) is not None)
self.assertTrue(repr(call) is not None)
await call
self.assertTrue(str(call) is not None)
self.assertTrue(repr(call) is not None)
async def test_call_ok(self):
call = self._stub.UnaryCall(messages_pb2.SimpleRequest())
self.assertFalse(call.done())
response = await call
self.assertTrue(call.done())
self.assertIsInstance(response, messages_pb2.SimpleResponse)
self.assertEqual(await call.code(), grpc.StatusCode.OK)
# Response is cached at call object level, reentrance
# returns again the same response
response_retry = await call
self.assertIs(response, response_retry)
async def test_call_rpc_error(self):
async with aio.insecure_channel(UNREACHABLE_TARGET) as channel:
stub = test_pb2_grpc.TestServiceStub(channel)
call = stub.UnaryCall(messages_pb2.SimpleRequest())
with self.assertRaises(aio.AioRpcError) as exception_context:
await call
self.assertEqual(
grpc.StatusCode.UNAVAILABLE, exception_context.exception.code()
)
self.assertTrue(call.done())
self.assertEqual(grpc.StatusCode.UNAVAILABLE, await call.code())
async def test_call_code_awaitable(self):
call = self._stub.UnaryCall(messages_pb2.SimpleRequest())
self.assertEqual(await call.code(), grpc.StatusCode.OK)
async def test_call_details_awaitable(self):
call = self._stub.UnaryCall(messages_pb2.SimpleRequest())
self.assertEqual("", await call.details())
async def test_call_initial_metadata_awaitable(self):
call = self._stub.UnaryCall(messages_pb2.SimpleRequest())
self.assertEqual(aio.Metadata(), await call.initial_metadata())
async def test_call_trailing_metadata_awaitable(self):
call = self._stub.UnaryCall(messages_pb2.SimpleRequest())
self.assertEqual(aio.Metadata(), await call.trailing_metadata())
async def test_call_initial_metadata_cancelable(self):
coro_started = asyncio.Event()
call = self._stub.UnaryCall(messages_pb2.SimpleRequest())
async def coro():
coro_started.set()
await call.initial_metadata()
task = self.loop.create_task(coro())
await coro_started.wait()
task.cancel()
# Test that initial metadata can still be asked thought
# a cancellation happened with the previous task
self.assertEqual(aio.Metadata(), await call.initial_metadata())
async def test_call_initial_metadata_multiple_waiters(self):
call = self._stub.UnaryCall(messages_pb2.SimpleRequest())
async def coro():
return await call.initial_metadata()
task1 = self.loop.create_task(coro())
task2 = self.loop.create_task(coro())
await call
expected = [aio.Metadata() for _ in range(2)]
self.assertEqual(expected, await asyncio.gather(*[task1, task2]))
async def test_call_code_cancelable(self):
coro_started = asyncio.Event()
call = self._stub.UnaryCall(messages_pb2.SimpleRequest())
async def coro():
coro_started.set()
await call.code()
task = self.loop.create_task(coro())
await coro_started.wait()
task.cancel()
# Test that code can still be asked thought
# a cancellation happened with the previous task
self.assertEqual(grpc.StatusCode.OK, await call.code())
async def test_call_code_multiple_waiters(self):
call = self._stub.UnaryCall(messages_pb2.SimpleRequest())
async def coro():
return await call.code()
task1 = self.loop.create_task(coro())
task2 = self.loop.create_task(coro())
await call
self.assertEqual(
[grpc.StatusCode.OK, grpc.StatusCode.OK],
await asyncio.gather(task1, task2),
)
async def test_cancel_unary_unary(self):
call = self._stub.UnaryCall(messages_pb2.SimpleRequest())
self.assertFalse(call.cancelled())
self.assertTrue(call.cancel())
self.assertFalse(call.cancel())
with self.assertRaises(asyncio.CancelledError):
await call
# The info in the RpcError should match the info in Call object.
self.assertTrue(call.cancelled())
self.assertEqual(await call.code(), grpc.StatusCode.CANCELLED)
self.assertEqual(
await call.details(), "Locally cancelled by application!"
)
async def test_cancel_unary_unary_in_task(self):
coro_started = asyncio.Event()
call = self._stub.EmptyCall(messages_pb2.SimpleRequest())
async def another_coro():
coro_started.set()
await call
task = self.loop.create_task(another_coro())
await coro_started.wait()
self.assertFalse(task.done())
task.cancel()
self.assertEqual(grpc.StatusCode.CANCELLED, await call.code())
with self.assertRaises(asyncio.CancelledError):
await task
async def test_passing_credentials_fails_over_insecure_channel(self):
call_credentials = grpc.composite_call_credentials(
grpc.access_token_call_credentials("abc"),
grpc.access_token_call_credentials("def"),
)
with self.assertRaisesRegex(
aio.UsageError, "Call credentials are only valid on secure channels"
):
self._stub.UnaryCall(
messages_pb2.SimpleRequest(), credentials=call_credentials
)
class TestUnaryStreamCall(_MulticallableTestMixin, AioTestBase):
async def test_call_rpc_error(self):
channel = aio.insecure_channel(UNREACHABLE_TARGET)
request = messages_pb2.StreamingOutputCallRequest()
stub = test_pb2_grpc.TestServiceStub(channel)
call = stub.StreamingOutputCall(request)
with self.assertRaises(aio.AioRpcError) as exception_context:
async for response in call:
pass
self.assertEqual(
grpc.StatusCode.UNAVAILABLE, exception_context.exception.code()
)
self.assertTrue(call.done())
self.assertEqual(grpc.StatusCode.UNAVAILABLE, await call.code())
await channel.close()
async def test_cancel_unary_stream(self):
# Prepares the request
request = messages_pb2.StreamingOutputCallRequest()
for _ in range(_NUM_STREAM_RESPONSES):
request.response_parameters.append(
messages_pb2.ResponseParameters(
size=_RESPONSE_PAYLOAD_SIZE,
interval_us=_RESPONSE_INTERVAL_US,
)
)
# Invokes the actual RPC
call = self._stub.StreamingOutputCall(request)
self.assertFalse(call.cancelled())
response = await call.read()
self.assertIs(type(response), messages_pb2.StreamingOutputCallResponse)
self.assertEqual(_RESPONSE_PAYLOAD_SIZE, len(response.payload.body))
self.assertTrue(call.cancel())
self.assertEqual(grpc.StatusCode.CANCELLED, await call.code())
self.assertEqual(
_LOCAL_CANCEL_DETAILS_EXPECTATION, await call.details()
)
self.assertFalse(call.cancel())
with self.assertRaises(asyncio.CancelledError):
await call.read()
self.assertTrue(call.cancelled())
async def test_multiple_cancel_unary_stream(self):
# Prepares the request
request = messages_pb2.StreamingOutputCallRequest()
for _ in range(_NUM_STREAM_RESPONSES):
request.response_parameters.append(
messages_pb2.ResponseParameters(
size=_RESPONSE_PAYLOAD_SIZE,
interval_us=_RESPONSE_INTERVAL_US,
)
)
# Invokes the actual RPC
call = self._stub.StreamingOutputCall(request)
self.assertFalse(call.cancelled())
response = await call.read()
self.assertIs(type(response), messages_pb2.StreamingOutputCallResponse)
self.assertEqual(_RESPONSE_PAYLOAD_SIZE, len(response.payload.body))
self.assertTrue(call.cancel())
self.assertFalse(call.cancel())
self.assertFalse(call.cancel())
self.assertFalse(call.cancel())
with self.assertRaises(asyncio.CancelledError):
await call.read()
async def test_early_cancel_unary_stream(self):
"""Test cancellation before receiving messages."""
# Prepares the request
request = messages_pb2.StreamingOutputCallRequest()
for _ in range(_NUM_STREAM_RESPONSES):
request.response_parameters.append(
messages_pb2.ResponseParameters(
size=_RESPONSE_PAYLOAD_SIZE,
interval_us=_RESPONSE_INTERVAL_US,
)
)
# Invokes the actual RPC
call = self._stub.StreamingOutputCall(request)
self.assertFalse(call.cancelled())
self.assertTrue(call.cancel())
self.assertFalse(call.cancel())
with self.assertRaises(asyncio.CancelledError):
await call.read()
self.assertTrue(call.cancelled())
self.assertEqual(grpc.StatusCode.CANCELLED, await call.code())
self.assertEqual(
_LOCAL_CANCEL_DETAILS_EXPECTATION, await call.details()
)
async def test_late_cancel_unary_stream(self):
"""Test cancellation after received all messages."""
# Prepares the request
request = messages_pb2.StreamingOutputCallRequest()
for _ in range(_NUM_STREAM_RESPONSES):
request.response_parameters.append(
messages_pb2.ResponseParameters(
size=_RESPONSE_PAYLOAD_SIZE,
)
)
# Invokes the actual RPC
call = self._stub.StreamingOutputCall(request)
for _ in range(_NUM_STREAM_RESPONSES):
response = await call.read()
self.assertIs(
type(response), messages_pb2.StreamingOutputCallResponse
)
self.assertEqual(_RESPONSE_PAYLOAD_SIZE, len(response.payload.body))
# After all messages received, it is possible that the final state
# is received or on its way. It's basically a data race, so our
# expectation here is do not crash :)
call.cancel()
self.assertIn(
await call.code(), [grpc.StatusCode.OK, grpc.StatusCode.CANCELLED]
)
async def test_too_many_reads_unary_stream(self):
"""Test calling read after received all messages fails."""
# Prepares the request
request = messages_pb2.StreamingOutputCallRequest()
for _ in range(_NUM_STREAM_RESPONSES):
request.response_parameters.append(
messages_pb2.ResponseParameters(
size=_RESPONSE_PAYLOAD_SIZE,
)
)
# Invokes the actual RPC
call = self._stub.StreamingOutputCall(request)
for _ in range(_NUM_STREAM_RESPONSES):
response = await call.read()
self.assertIs(
type(response), messages_pb2.StreamingOutputCallResponse
)
self.assertEqual(_RESPONSE_PAYLOAD_SIZE, len(response.payload.body))
self.assertIs(await call.read(), aio.EOF)
# After the RPC is finished, further reads will lead to exception.
self.assertEqual(await call.code(), grpc.StatusCode.OK)
self.assertIs(await call.read(), aio.EOF)
async def test_unary_stream_async_generator(self):
"""Sunny day test case for unary_stream."""
# Prepares the request
request = messages_pb2.StreamingOutputCallRequest()
for _ in range(_NUM_STREAM_RESPONSES):
request.response_parameters.append(
messages_pb2.ResponseParameters(
size=_RESPONSE_PAYLOAD_SIZE,
)
)
# Invokes the actual RPC
call = self._stub.StreamingOutputCall(request)
self.assertFalse(call.cancelled())
async for response in call:
self.assertIs(
type(response), messages_pb2.StreamingOutputCallResponse
)
self.assertEqual(_RESPONSE_PAYLOAD_SIZE, len(response.payload.body))
self.assertEqual(await call.code(), grpc.StatusCode.OK)
async def test_cancel_unary_stream_with_many_interleavings(self):
"""A cheap alternative to a structured fuzzer.
Certain classes of error only appear for very specific interleavings of
coroutines. Rather than inserting semi-private asyncio.Events throughout
the implementation on which to coordinate and explicilty waiting on those
in tests, we instead search for bugs over the space of interleavings by
stochastically varying the durations of certain events within the test.
"""
# We range over several orders of magnitude to ensure that switching platforms
# (i.e. to slow CI machines) does not result in this test becoming a no-op.
sleep_ranges = (10.0**-i for i in range(1, 4))
for sleep_range in sleep_ranges:
for _ in range(_NONDETERMINISTIC_ITERATIONS):
interval_us = random.randrange(
_NONDETERMINISTIC_SERVER_SLEEP_MAX_US
)
sleep_secs = sleep_range * random.random()
coro_started = asyncio.Event()
# Configs the server method to block forever
request = messages_pb2.StreamingOutputCallRequest()
request.response_parameters.append(
messages_pb2.ResponseParameters(
size=1,
interval_us=interval_us,
)
)
# Invokes the actual RPC
call = self._stub.StreamingOutputCall(request)
unhandled_error = False
async def another_coro():
nonlocal unhandled_error
coro_started.set()
try:
await call.read()
except asyncio.CancelledError:
pass
except Exception as e:
unhandled_error = True
raise
task = self.loop.create_task(another_coro())
await coro_started.wait()
await asyncio.sleep(sleep_secs)
task.cancel()
try:
await task
except asyncio.CancelledError:
pass
self.assertFalse(unhandled_error)
async def test_cancel_unary_stream_in_task_using_read(self):
coro_started = asyncio.Event()
# Configs the server method to block forever
request = messages_pb2.StreamingOutputCallRequest()
request.response_parameters.append(
messages_pb2.ResponseParameters(
size=_RESPONSE_PAYLOAD_SIZE,
interval_us=_INFINITE_INTERVAL_US,
)
)
# Invokes the actual RPC
call = self._stub.StreamingOutputCall(request)
async def another_coro():
coro_started.set()
await call.read()
task = self.loop.create_task(another_coro())
await coro_started.wait()
self.assertFalse(task.done())
task.cancel()
self.assertEqual(grpc.StatusCode.CANCELLED, await call.code())
with self.assertRaises(asyncio.CancelledError):
await task
async def test_cancel_unary_stream_in_task_using_async_for(self):
coro_started = asyncio.Event()
# Configs the server method to block forever
request = messages_pb2.StreamingOutputCallRequest()
request.response_parameters.append(
messages_pb2.ResponseParameters(
size=_RESPONSE_PAYLOAD_SIZE,
interval_us=_INFINITE_INTERVAL_US,
)
)
# Invokes the actual RPC
call = self._stub.StreamingOutputCall(request)
async def another_coro():
coro_started.set()
async for _ in call:
pass
task = self.loop.create_task(another_coro())
await coro_started.wait()
self.assertFalse(task.done())
task.cancel()
self.assertEqual(grpc.StatusCode.CANCELLED, await call.code())
with self.assertRaises(asyncio.CancelledError):
await task
async def test_time_remaining(self):
request = messages_pb2.StreamingOutputCallRequest()
# First message comes back immediately
request.response_parameters.append(
messages_pb2.ResponseParameters(
size=_RESPONSE_PAYLOAD_SIZE,
)
)
# Second message comes back after a unit of wait time
request.response_parameters.append(
messages_pb2.ResponseParameters(
size=_RESPONSE_PAYLOAD_SIZE,
interval_us=_RESPONSE_INTERVAL_US,
)
)
call = self._stub.StreamingOutputCall(
request, timeout=_SHORT_TIMEOUT_S * 2
)
response = await call.read()
self.assertEqual(_RESPONSE_PAYLOAD_SIZE, len(response.payload.body))
# Should be around the same as the timeout
remained_time = call.time_remaining()
self.assertGreater(remained_time, _SHORT_TIMEOUT_S * 3 / 2)
self.assertLess(remained_time, _SHORT_TIMEOUT_S * 5 / 2)
response = await call.read()
self.assertEqual(_RESPONSE_PAYLOAD_SIZE, len(response.payload.body))
# Should be around the timeout minus a unit of wait time
remained_time = call.time_remaining()
self.assertGreater(remained_time, _SHORT_TIMEOUT_S / 2)
self.assertLess(remained_time, _SHORT_TIMEOUT_S * 3 / 2)
self.assertEqual(grpc.StatusCode.OK, await call.code())
async def test_empty_responses(self):
# Prepares the request
request = messages_pb2.StreamingOutputCallRequest()
for _ in range(_NUM_STREAM_RESPONSES):
request.response_parameters.append(
messages_pb2.ResponseParameters()
)
# Invokes the actual RPC
call = self._stub.StreamingOutputCall(request)
for _ in range(_NUM_STREAM_RESPONSES):
response = await call.read()
self.assertIs(
type(response), messages_pb2.StreamingOutputCallResponse
)
self.assertEqual(b"", response.SerializeToString())
self.assertEqual(grpc.StatusCode.OK, await call.code())
class TestStreamUnaryCall(_MulticallableTestMixin, AioTestBase):
async def test_cancel_stream_unary(self):
call = self._stub.StreamingInputCall()
# Prepares the request
payload = messages_pb2.Payload(body=b"\0" * _REQUEST_PAYLOAD_SIZE)
request = messages_pb2.StreamingInputCallRequest(payload=payload)
# Sends out requests
for _ in range(_NUM_STREAM_RESPONSES):
await call.write(request)
# Cancels the RPC
self.assertFalse(call.done())
self.assertFalse(call.cancelled())
self.assertTrue(call.cancel())
self.assertTrue(call.cancelled())
await call.done_writing()
with self.assertRaises(asyncio.CancelledError):
await call
async def test_early_cancel_stream_unary(self):
call = self._stub.StreamingInputCall()
# Cancels the RPC
self.assertFalse(call.done())
self.assertFalse(call.cancelled())
self.assertTrue(call.cancel())
self.assertTrue(call.cancelled())
with self.assertRaises(asyncio.InvalidStateError):
await call.write(messages_pb2.StreamingInputCallRequest())
# Should be no-op
await call.done_writing()
with self.assertRaises(asyncio.CancelledError):
await call
async def test_write_after_done_writing(self):
call = self._stub.StreamingInputCall()
# Prepares the request
payload = messages_pb2.Payload(body=b"\0" * _REQUEST_PAYLOAD_SIZE)
request = messages_pb2.StreamingInputCallRequest(payload=payload)
# Sends out requests
for _ in range(_NUM_STREAM_RESPONSES):
await call.write(request)
# Should be no-op
await call.done_writing()
with self.assertRaises(asyncio.InvalidStateError):
await call.write(messages_pb2.StreamingInputCallRequest())
response = await call
self.assertIsInstance(response, messages_pb2.StreamingInputCallResponse)
self.assertEqual(
_NUM_STREAM_RESPONSES * _REQUEST_PAYLOAD_SIZE,
response.aggregated_payload_size,
)
self.assertEqual(await call.code(), grpc.StatusCode.OK)
async def test_error_in_async_generator(self):
# Server will pause between responses
request = messages_pb2.StreamingOutputCallRequest()
request.response_parameters.append(
messages_pb2.ResponseParameters(
size=_RESPONSE_PAYLOAD_SIZE,
interval_us=_RESPONSE_INTERVAL_US,
)
)
# We expect the request iterator to receive the exception
request_iterator_received_the_exception = asyncio.Event()
async def request_iterator():
with self.assertRaises(asyncio.CancelledError):
for _ in range(_NUM_STREAM_RESPONSES):
yield request
await asyncio.sleep(_SHORT_TIMEOUT_S)
request_iterator_received_the_exception.set()
call = self._stub.StreamingInputCall(request_iterator())
# Cancel the RPC after at least one response
async def cancel_later():
await asyncio.sleep(_SHORT_TIMEOUT_S * 2)
call.cancel()
cancel_later_task = self.loop.create_task(cancel_later())
with self.assertRaises(asyncio.CancelledError):
await call
await request_iterator_received_the_exception.wait()
# No failures in the cancel later task!
await cancel_later_task
async def test_normal_iterable_requests(self):
# Prepares the request
payload = messages_pb2.Payload(body=b"\0" * _REQUEST_PAYLOAD_SIZE)
request = messages_pb2.StreamingInputCallRequest(payload=payload)
requests = [request] * _NUM_STREAM_RESPONSES
# Sends out requests
call = self._stub.StreamingInputCall(requests)
# RPC should succeed
response = await call
self.assertIsInstance(response, messages_pb2.StreamingInputCallResponse)
self.assertEqual(
_NUM_STREAM_RESPONSES * _REQUEST_PAYLOAD_SIZE,
response.aggregated_payload_size,
)
self.assertEqual(await call.code(), grpc.StatusCode.OK)
async def test_call_rpc_error(self):
async with aio.insecure_channel(UNREACHABLE_TARGET) as channel:
stub = test_pb2_grpc.TestServiceStub(channel)
# The error should be raised automatically without any traffic.
call = stub.StreamingInputCall()
with self.assertRaises(aio.AioRpcError) as exception_context:
await call
self.assertEqual(
grpc.StatusCode.UNAVAILABLE, exception_context.exception.code()
)
self.assertTrue(call.done())
self.assertEqual(grpc.StatusCode.UNAVAILABLE, await call.code())
async def test_timeout(self):
call = self._stub.StreamingInputCall(timeout=_SHORT_TIMEOUT_S)
# The error should be raised automatically without any traffic.
with self.assertRaises(aio.AioRpcError) as exception_context:
await call
rpc_error = exception_context.exception
self.assertEqual(grpc.StatusCode.DEADLINE_EXCEEDED, rpc_error.code())
self.assertTrue(call.done())
self.assertEqual(grpc.StatusCode.DEADLINE_EXCEEDED, await call.code())
# Prepares the request that stream in a ping-pong manner.
_STREAM_OUTPUT_REQUEST_ONE_RESPONSE = messages_pb2.StreamingOutputCallRequest()
_STREAM_OUTPUT_REQUEST_ONE_RESPONSE.response_parameters.append(
messages_pb2.ResponseParameters(size=_RESPONSE_PAYLOAD_SIZE)
)
_STREAM_OUTPUT_REQUEST_ONE_EMPTY_RESPONSE = (
messages_pb2.StreamingOutputCallRequest()
)
_STREAM_OUTPUT_REQUEST_ONE_EMPTY_RESPONSE.response_parameters.append(
messages_pb2.ResponseParameters()
)
class TestStreamStreamCall(_MulticallableTestMixin, AioTestBase):
async def test_cancel(self):
# Invokes the actual RPC
call = self._stub.FullDuplexCall()
for _ in range(_NUM_STREAM_RESPONSES):
await call.write(_STREAM_OUTPUT_REQUEST_ONE_RESPONSE)
response = await call.read()
self.assertIsInstance(
response, messages_pb2.StreamingOutputCallResponse
)
self.assertEqual(_RESPONSE_PAYLOAD_SIZE, len(response.payload.body))
# Cancels the RPC
self.assertFalse(call.done())
self.assertFalse(call.cancelled())
self.assertTrue(call.cancel())
self.assertTrue(call.cancelled())
self.assertEqual(grpc.StatusCode.CANCELLED, await call.code())
async def test_cancel_with_pending_read(self):
call = self._stub.FullDuplexCall()
await call.write(_STREAM_OUTPUT_REQUEST_ONE_RESPONSE)
# Cancels the RPC
self.assertFalse(call.done())
self.assertFalse(call.cancelled())
self.assertTrue(call.cancel())
self.assertTrue(call.cancelled())
self.assertEqual(grpc.StatusCode.CANCELLED, await call.code())
async def test_cancel_with_ongoing_read(self):
call = self._stub.FullDuplexCall()
coro_started = asyncio.Event()
async def read_coro():
coro_started.set()
await call.read()
read_task = self.loop.create_task(read_coro())
await coro_started.wait()
self.assertFalse(read_task.done())
# Cancels the RPC
self.assertFalse(call.done())
self.assertFalse(call.cancelled())
self.assertTrue(call.cancel())
self.assertTrue(call.cancelled())
self.assertEqual(grpc.StatusCode.CANCELLED, await call.code())
async def test_early_cancel(self):
call = self._stub.FullDuplexCall()
# Cancels the RPC
self.assertFalse(call.done())
self.assertFalse(call.cancelled())
self.assertTrue(call.cancel())
self.assertTrue(call.cancelled())
self.assertEqual(grpc.StatusCode.CANCELLED, await call.code())
async def test_cancel_after_done_writing(self):
call = self._stub.FullDuplexCall()
await call.done_writing()
# Cancels the RPC
self.assertFalse(call.done())
self.assertFalse(call.cancelled())
self.assertTrue(call.cancel())
self.assertTrue(call.cancelled())
self.assertEqual(grpc.StatusCode.CANCELLED, await call.code())
async def test_late_cancel(self):
call = self._stub.FullDuplexCall()
await call.done_writing()
self.assertEqual(grpc.StatusCode.OK, await call.code())
# Cancels the RPC
self.assertTrue(call.done())
self.assertFalse(call.cancelled())
self.assertFalse(call.cancel())
self.assertFalse(call.cancelled())
# Status is still OK
self.assertEqual(grpc.StatusCode.OK, await call.code())
async def test_async_generator(self):
async def request_generator():
yield _STREAM_OUTPUT_REQUEST_ONE_RESPONSE
yield _STREAM_OUTPUT_REQUEST_ONE_RESPONSE
call = self._stub.FullDuplexCall(request_generator())
async for response in call:
self.assertEqual(_RESPONSE_PAYLOAD_SIZE, len(response.payload.body))
self.assertEqual(await call.code(), grpc.StatusCode.OK)
async def test_too_many_reads(self):
async def request_generator():
for _ in range(_NUM_STREAM_RESPONSES):
yield _STREAM_OUTPUT_REQUEST_ONE_RESPONSE
call = self._stub.FullDuplexCall(request_generator())
for _ in range(_NUM_STREAM_RESPONSES):
response = await call.read()
self.assertEqual(_RESPONSE_PAYLOAD_SIZE, len(response.payload.body))
self.assertIs(await call.read(), aio.EOF)
self.assertEqual(await call.code(), grpc.StatusCode.OK)
# After the RPC finished, the read should also produce EOF
self.assertIs(await call.read(), aio.EOF)
async def test_read_write_after_done_writing(self):
call = self._stub.FullDuplexCall()
# Writes two requests, and pending two requests
await call.write(_STREAM_OUTPUT_REQUEST_ONE_RESPONSE)
await call.write(_STREAM_OUTPUT_REQUEST_ONE_RESPONSE)
await call.done_writing()
# Further write should fail
with self.assertRaises(asyncio.InvalidStateError):
await call.write(_STREAM_OUTPUT_REQUEST_ONE_RESPONSE)
# But read should be unaffected
response = await call.read()
self.assertEqual(_RESPONSE_PAYLOAD_SIZE, len(response.payload.body))
response = await call.read()
self.assertEqual(_RESPONSE_PAYLOAD_SIZE, len(response.payload.body))
self.assertEqual(await call.code(), grpc.StatusCode.OK)
async def test_error_in_async_generator(self):
# Server will pause between responses
request = messages_pb2.StreamingOutputCallRequest()
request.response_parameters.append(
messages_pb2.ResponseParameters(
size=_RESPONSE_PAYLOAD_SIZE,
interval_us=_RESPONSE_INTERVAL_US,
)
)
# We expect the request iterator to receive the exception
request_iterator_received_the_exception = asyncio.Event()
async def request_iterator():
with self.assertRaises(asyncio.CancelledError):
for _ in range(_NUM_STREAM_RESPONSES):
yield request
await asyncio.sleep(_SHORT_TIMEOUT_S)
request_iterator_received_the_exception.set()
call = self._stub.FullDuplexCall(request_iterator())
# Cancel the RPC after at least one response
async def cancel_later():
await asyncio.sleep(_SHORT_TIMEOUT_S * 2)
call.cancel()
cancel_later_task = self.loop.create_task(cancel_later())
with self.assertRaises(asyncio.CancelledError):
async for response in call:
self.assertEqual(
_RESPONSE_PAYLOAD_SIZE, len(response.payload.body)
)
await request_iterator_received_the_exception.wait()
self.assertEqual(grpc.StatusCode.CANCELLED, await call.code())
# No failures in the cancel later task!
await cancel_later_task
async def test_normal_iterable_requests(self):
requests = [_STREAM_OUTPUT_REQUEST_ONE_RESPONSE] * _NUM_STREAM_RESPONSES
call = self._stub.FullDuplexCall(iter(requests))
async for response in call:
self.assertEqual(_RESPONSE_PAYLOAD_SIZE, len(response.payload.body))
self.assertEqual(await call.code(), grpc.StatusCode.OK)
async def test_empty_ping_pong(self):
call = self._stub.FullDuplexCall()
for _ in range(_NUM_STREAM_RESPONSES):
await call.write(_STREAM_OUTPUT_REQUEST_ONE_EMPTY_RESPONSE)
response = await call.read()
self.assertEqual(b"", response.SerializeToString())
await call.done_writing()
self.assertEqual(await call.code(), grpc.StatusCode.OK)
if __name__ == "__main__":
logging.basicConfig(level=logging.DEBUG)
unittest.main(verbosity=2)
| 34,117
| 34.951528
| 86
|
py
|
grpc
|
grpc-master/src/python/grpcio_tests/tests_aio/channelz/__init__.py
|
# Copyright 2020 The gRPC Authors
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
| 580
| 40.5
| 74
|
py
|
grpc
|
grpc-master/src/python/grpcio_tests/tests_aio/channelz/channelz_servicer_test.py
|
# Copyright 2020 The gRPC Authors
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Tests of grpc_channelz.v1.channelz."""
import asyncio
import logging
import unittest
import grpc
from grpc.experimental import aio
from grpc_channelz.v1 import channelz
from grpc_channelz.v1 import channelz_pb2
from grpc_channelz.v1 import channelz_pb2_grpc
from tests.unit.framework.common import test_constants
from tests_aio.unit._test_base import AioTestBase
_SUCCESSFUL_UNARY_UNARY = "/test/SuccessfulUnaryUnary"
_FAILED_UNARY_UNARY = "/test/FailedUnaryUnary"
_SUCCESSFUL_STREAM_STREAM = "/test/SuccessfulStreamStream"
_REQUEST = b"\x00\x00\x00"
_RESPONSE = b"\x01\x01\x01"
_DISABLE_REUSE_PORT = (("grpc.so_reuseport", 0),)
_ENABLE_CHANNELZ = (("grpc.enable_channelz", 1),)
_DISABLE_CHANNELZ = (("grpc.enable_channelz", 0),)
_LARGE_UNASSIGNED_ID = 10000
async def _successful_unary_unary(request, servicer_context):
return _RESPONSE
async def _failed_unary_unary(request, servicer_context):
servicer_context.set_code(grpc.StatusCode.INTERNAL)
servicer_context.set_details("Channelz Test Intended Failure")
async def _successful_stream_stream(request_iterator, servicer_context):
async for _ in request_iterator:
yield _RESPONSE
class _GenericHandler(grpc.GenericRpcHandler):
def service(self, handler_call_details):
if handler_call_details.method == _SUCCESSFUL_UNARY_UNARY:
return grpc.unary_unary_rpc_method_handler(_successful_unary_unary)
elif handler_call_details.method == _FAILED_UNARY_UNARY:
return grpc.unary_unary_rpc_method_handler(_failed_unary_unary)
elif handler_call_details.method == _SUCCESSFUL_STREAM_STREAM:
return grpc.stream_stream_rpc_method_handler(
_successful_stream_stream
)
else:
return None
class _ChannelServerPair:
def __init__(self):
self.address = ""
self.server = None
self.channel = None
self.server_ref_id = None
self.channel_ref_id = None
async def start(self):
# Server will enable channelz service
self.server = aio.server(options=_DISABLE_REUSE_PORT + _ENABLE_CHANNELZ)
port = self.server.add_insecure_port("[::]:0")
self.address = "localhost:%d" % port
self.server.add_generic_rpc_handlers((_GenericHandler(),))
await self.server.start()
# Channel will enable channelz service...
self.channel = aio.insecure_channel(
self.address, options=_ENABLE_CHANNELZ
)
async def bind_channelz(self, channelz_stub):
resp = await channelz_stub.GetTopChannels(
channelz_pb2.GetTopChannelsRequest(start_channel_id=0)
)
for channel in resp.channel:
if channel.data.target == self.address:
self.channel_ref_id = channel.ref.channel_id
resp = await channelz_stub.GetServers(
channelz_pb2.GetServersRequest(start_server_id=0)
)
self.server_ref_id = resp.server[-1].ref.server_id
async def stop(self):
await self.channel.close()
await self.server.stop(None)
async def _create_channel_server_pairs(n, channelz_stub=None):
"""Create channel-server pairs."""
pairs = [_ChannelServerPair() for i in range(n)]
for pair in pairs:
await pair.start()
if channelz_stub:
await pair.bind_channelz(channelz_stub)
return pairs
async def _destroy_channel_server_pairs(pairs):
for pair in pairs:
await pair.stop()
class ChannelzServicerTest(AioTestBase):
async def setUp(self):
# This server is for Channelz info fetching only
# It self should not enable Channelz
self._server = aio.server(
options=_DISABLE_REUSE_PORT + _DISABLE_CHANNELZ
)
port = self._server.add_insecure_port("[::]:0")
channelz.add_channelz_servicer(self._server)
await self._server.start()
# This channel is used to fetch Channelz info only
# Channelz should not be enabled
self._channel = aio.insecure_channel(
"localhost:%d" % port, options=_DISABLE_CHANNELZ
)
self._channelz_stub = channelz_pb2_grpc.ChannelzStub(self._channel)
async def tearDown(self):
await self._channel.close()
await self._server.stop(None)
async def _get_server_by_ref_id(self, ref_id):
"""Server id may not be consecutive"""
resp = await self._channelz_stub.GetServers(
channelz_pb2.GetServersRequest(start_server_id=ref_id)
)
self.assertEqual(ref_id, resp.server[0].ref.server_id)
return resp.server[0]
async def _send_successful_unary_unary(self, pair):
call = pair.channel.unary_unary(_SUCCESSFUL_UNARY_UNARY)(_REQUEST)
self.assertEqual(grpc.StatusCode.OK, await call.code())
async def _send_failed_unary_unary(self, pair):
try:
await pair.channel.unary_unary(_FAILED_UNARY_UNARY)(_REQUEST)
except grpc.RpcError:
return
else:
self.fail("This call supposed to fail")
async def _send_successful_stream_stream(self, pair):
call = pair.channel.stream_stream(_SUCCESSFUL_STREAM_STREAM)(
iter([_REQUEST] * test_constants.STREAM_LENGTH)
)
cnt = 0
async for _ in call:
cnt += 1
self.assertEqual(cnt, test_constants.STREAM_LENGTH)
async def test_get_top_channels_high_start_id(self):
pairs = await _create_channel_server_pairs(1)
resp = await self._channelz_stub.GetTopChannels(
channelz_pb2.GetTopChannelsRequest(
start_channel_id=_LARGE_UNASSIGNED_ID
)
)
self.assertEqual(len(resp.channel), 0)
self.assertEqual(resp.end, True)
await _destroy_channel_server_pairs(pairs)
async def test_successful_request(self):
pairs = await _create_channel_server_pairs(1, self._channelz_stub)
await self._send_successful_unary_unary(pairs[0])
resp = await self._channelz_stub.GetChannel(
channelz_pb2.GetChannelRequest(channel_id=pairs[0].channel_ref_id)
)
self.assertEqual(resp.channel.data.calls_started, 1)
self.assertEqual(resp.channel.data.calls_succeeded, 1)
self.assertEqual(resp.channel.data.calls_failed, 0)
await _destroy_channel_server_pairs(pairs)
async def test_failed_request(self):
pairs = await _create_channel_server_pairs(1, self._channelz_stub)
await self._send_failed_unary_unary(pairs[0])
resp = await self._channelz_stub.GetChannel(
channelz_pb2.GetChannelRequest(channel_id=pairs[0].channel_ref_id)
)
self.assertEqual(resp.channel.data.calls_started, 1)
self.assertEqual(resp.channel.data.calls_succeeded, 0)
self.assertEqual(resp.channel.data.calls_failed, 1)
await _destroy_channel_server_pairs(pairs)
async def test_many_requests(self):
pairs = await _create_channel_server_pairs(1, self._channelz_stub)
k_success = 7
k_failed = 9
for i in range(k_success):
await self._send_successful_unary_unary(pairs[0])
for i in range(k_failed):
await self._send_failed_unary_unary(pairs[0])
resp = await self._channelz_stub.GetChannel(
channelz_pb2.GetChannelRequest(channel_id=pairs[0].channel_ref_id)
)
self.assertEqual(resp.channel.data.calls_started, k_success + k_failed)
self.assertEqual(resp.channel.data.calls_succeeded, k_success)
self.assertEqual(resp.channel.data.calls_failed, k_failed)
await _destroy_channel_server_pairs(pairs)
async def test_many_requests_many_channel(self):
k_channels = 4
pairs = await _create_channel_server_pairs(
k_channels, self._channelz_stub
)
k_success = 11
k_failed = 13
for i in range(k_success):
await self._send_successful_unary_unary(pairs[0])
await self._send_successful_unary_unary(pairs[2])
for i in range(k_failed):
await self._send_failed_unary_unary(pairs[1])
await self._send_failed_unary_unary(pairs[2])
# The first channel saw only successes
resp = await self._channelz_stub.GetChannel(
channelz_pb2.GetChannelRequest(channel_id=pairs[0].channel_ref_id)
)
self.assertEqual(resp.channel.data.calls_started, k_success)
self.assertEqual(resp.channel.data.calls_succeeded, k_success)
self.assertEqual(resp.channel.data.calls_failed, 0)
# The second channel saw only failures
resp = await self._channelz_stub.GetChannel(
channelz_pb2.GetChannelRequest(channel_id=pairs[1].channel_ref_id)
)
self.assertEqual(resp.channel.data.calls_started, k_failed)
self.assertEqual(resp.channel.data.calls_succeeded, 0)
self.assertEqual(resp.channel.data.calls_failed, k_failed)
# The third channel saw both successes and failures
resp = await self._channelz_stub.GetChannel(
channelz_pb2.GetChannelRequest(channel_id=pairs[2].channel_ref_id)
)
self.assertEqual(resp.channel.data.calls_started, k_success + k_failed)
self.assertEqual(resp.channel.data.calls_succeeded, k_success)
self.assertEqual(resp.channel.data.calls_failed, k_failed)
# The fourth channel saw nothing
resp = await self._channelz_stub.GetChannel(
channelz_pb2.GetChannelRequest(channel_id=pairs[3].channel_ref_id)
)
self.assertEqual(resp.channel.data.calls_started, 0)
self.assertEqual(resp.channel.data.calls_succeeded, 0)
self.assertEqual(resp.channel.data.calls_failed, 0)
await _destroy_channel_server_pairs(pairs)
async def test_many_subchannels(self):
k_channels = 4
pairs = await _create_channel_server_pairs(
k_channels, self._channelz_stub
)
k_success = 17
k_failed = 19
for i in range(k_success):
await self._send_successful_unary_unary(pairs[0])
await self._send_successful_unary_unary(pairs[2])
for i in range(k_failed):
await self._send_failed_unary_unary(pairs[1])
await self._send_failed_unary_unary(pairs[2])
for i in range(k_channels):
gc_resp = await self._channelz_stub.GetChannel(
channelz_pb2.GetChannelRequest(
channel_id=pairs[i].channel_ref_id
)
)
# If no call performed in the channel, there shouldn't be any subchannel
if gc_resp.channel.data.calls_started == 0:
self.assertEqual(len(gc_resp.channel.subchannel_ref), 0)
continue
# Otherwise, the subchannel should exist
self.assertGreater(len(gc_resp.channel.subchannel_ref), 0)
gsc_resp = await self._channelz_stub.GetSubchannel(
channelz_pb2.GetSubchannelRequest(
subchannel_id=gc_resp.channel.subchannel_ref[
0
].subchannel_id
)
)
self.assertEqual(
gc_resp.channel.data.calls_started,
gsc_resp.subchannel.data.calls_started,
)
self.assertEqual(
gc_resp.channel.data.calls_succeeded,
gsc_resp.subchannel.data.calls_succeeded,
)
self.assertEqual(
gc_resp.channel.data.calls_failed,
gsc_resp.subchannel.data.calls_failed,
)
await _destroy_channel_server_pairs(pairs)
async def test_server_call(self):
pairs = await _create_channel_server_pairs(1, self._channelz_stub)
k_success = 23
k_failed = 29
for i in range(k_success):
await self._send_successful_unary_unary(pairs[0])
for i in range(k_failed):
await self._send_failed_unary_unary(pairs[0])
resp = await self._get_server_by_ref_id(pairs[0].server_ref_id)
self.assertEqual(resp.data.calls_started, k_success + k_failed)
self.assertEqual(resp.data.calls_succeeded, k_success)
self.assertEqual(resp.data.calls_failed, k_failed)
await _destroy_channel_server_pairs(pairs)
async def test_many_subchannels_and_sockets(self):
k_channels = 4
pairs = await _create_channel_server_pairs(
k_channels, self._channelz_stub
)
k_success = 3
k_failed = 5
for i in range(k_success):
await self._send_successful_unary_unary(pairs[0])
await self._send_successful_unary_unary(pairs[2])
for i in range(k_failed):
await self._send_failed_unary_unary(pairs[1])
await self._send_failed_unary_unary(pairs[2])
for i in range(k_channels):
gc_resp = await self._channelz_stub.GetChannel(
channelz_pb2.GetChannelRequest(
channel_id=pairs[i].channel_ref_id
)
)
# If no call performed in the channel, there shouldn't be any subchannel
if gc_resp.channel.data.calls_started == 0:
self.assertEqual(len(gc_resp.channel.subchannel_ref), 0)
continue
# Otherwise, the subchannel should exist
self.assertGreater(len(gc_resp.channel.subchannel_ref), 0)
gsc_resp = await self._channelz_stub.GetSubchannel(
channelz_pb2.GetSubchannelRequest(
subchannel_id=gc_resp.channel.subchannel_ref[
0
].subchannel_id
)
)
self.assertEqual(len(gsc_resp.subchannel.socket_ref), 1)
gs_resp = await self._channelz_stub.GetSocket(
channelz_pb2.GetSocketRequest(
socket_id=gsc_resp.subchannel.socket_ref[0].socket_id
)
)
self.assertEqual(
gsc_resp.subchannel.data.calls_started,
gs_resp.socket.data.streams_started,
)
self.assertEqual(0, gs_resp.socket.data.streams_failed)
# Calls started == messages sent, only valid for unary calls
self.assertEqual(
gsc_resp.subchannel.data.calls_started,
gs_resp.socket.data.messages_sent,
)
await _destroy_channel_server_pairs(pairs)
async def test_streaming_rpc(self):
pairs = await _create_channel_server_pairs(1, self._channelz_stub)
# In C++, the argument for _send_successful_stream_stream is message length.
# Here the argument is still channel idx, to be consistent with the other two.
await self._send_successful_stream_stream(pairs[0])
gc_resp = await self._channelz_stub.GetChannel(
channelz_pb2.GetChannelRequest(channel_id=pairs[0].channel_ref_id)
)
self.assertEqual(gc_resp.channel.data.calls_started, 1)
self.assertEqual(gc_resp.channel.data.calls_succeeded, 1)
self.assertEqual(gc_resp.channel.data.calls_failed, 0)
# Subchannel exists
self.assertGreater(len(gc_resp.channel.subchannel_ref), 0)
while True:
gsc_resp = await self._channelz_stub.GetSubchannel(
channelz_pb2.GetSubchannelRequest(
subchannel_id=gc_resp.channel.subchannel_ref[
0
].subchannel_id
)
)
if (
gsc_resp.subchannel.data.calls_started
== gsc_resp.subchannel.data.calls_succeeded
+ gsc_resp.subchannel.data.calls_failed
):
break
self.assertEqual(gsc_resp.subchannel.data.calls_started, 1)
self.assertEqual(gsc_resp.subchannel.data.calls_failed, 0)
self.assertEqual(gsc_resp.subchannel.data.calls_succeeded, 1)
# Socket exists
self.assertEqual(len(gsc_resp.subchannel.socket_ref), 1)
while True:
gs_resp = await self._channelz_stub.GetSocket(
channelz_pb2.GetSocketRequest(
socket_id=gsc_resp.subchannel.socket_ref[0].socket_id
)
)
if (
gs_resp.socket.data.streams_started
== gs_resp.socket.data.streams_succeeded
+ gs_resp.socket.data.streams_failed
):
break
self.assertEqual(gs_resp.socket.data.streams_started, 1)
self.assertEqual(gs_resp.socket.data.streams_failed, 0)
self.assertEqual(gs_resp.socket.data.streams_succeeded, 1)
self.assertEqual(
gs_resp.socket.data.messages_sent, test_constants.STREAM_LENGTH
)
self.assertEqual(
gs_resp.socket.data.messages_received, test_constants.STREAM_LENGTH
)
await _destroy_channel_server_pairs(pairs)
async def test_server_sockets(self):
pairs = await _create_channel_server_pairs(1, self._channelz_stub)
await self._send_successful_unary_unary(pairs[0])
await self._send_failed_unary_unary(pairs[0])
resp = await self._get_server_by_ref_id(pairs[0].server_ref_id)
self.assertEqual(resp.data.calls_started, 2)
self.assertEqual(resp.data.calls_succeeded, 1)
self.assertEqual(resp.data.calls_failed, 1)
gss_resp = await self._channelz_stub.GetServerSockets(
channelz_pb2.GetServerSocketsRequest(
server_id=resp.ref.server_id, start_socket_id=0
)
)
# If the RPC call failed, it will raise a grpc.RpcError
# So, if there is no exception raised, considered pass
await _destroy_channel_server_pairs(pairs)
async def test_server_listen_sockets(self):
pairs = await _create_channel_server_pairs(1, self._channelz_stub)
resp = await self._get_server_by_ref_id(pairs[0].server_ref_id)
self.assertEqual(len(resp.listen_socket), 1)
gs_resp = await self._channelz_stub.GetSocket(
channelz_pb2.GetSocketRequest(
socket_id=resp.listen_socket[0].socket_id
)
)
# If the RPC call failed, it will raise a grpc.RpcError
# So, if there is no exception raised, considered pass
await _destroy_channel_server_pairs(pairs)
async def test_invalid_query_get_server(self):
with self.assertRaises(aio.AioRpcError) as exception_context:
await self._channelz_stub.GetServer(
channelz_pb2.GetServerRequest(server_id=_LARGE_UNASSIGNED_ID)
)
self.assertEqual(
grpc.StatusCode.NOT_FOUND, exception_context.exception.code()
)
async def test_invalid_query_get_channel(self):
with self.assertRaises(aio.AioRpcError) as exception_context:
await self._channelz_stub.GetChannel(
channelz_pb2.GetChannelRequest(channel_id=_LARGE_UNASSIGNED_ID)
)
self.assertEqual(
grpc.StatusCode.NOT_FOUND, exception_context.exception.code()
)
async def test_invalid_query_get_subchannel(self):
with self.assertRaises(aio.AioRpcError) as exception_context:
await self._channelz_stub.GetSubchannel(
channelz_pb2.GetSubchannelRequest(
subchannel_id=_LARGE_UNASSIGNED_ID
)
)
self.assertEqual(
grpc.StatusCode.NOT_FOUND, exception_context.exception.code()
)
async def test_invalid_query_get_socket(self):
with self.assertRaises(aio.AioRpcError) as exception_context:
await self._channelz_stub.GetSocket(
channelz_pb2.GetSocketRequest(socket_id=_LARGE_UNASSIGNED_ID)
)
self.assertEqual(
grpc.StatusCode.NOT_FOUND, exception_context.exception.code()
)
async def test_invalid_query_get_server_sockets(self):
with self.assertRaises(aio.AioRpcError) as exception_context:
await self._channelz_stub.GetServerSockets(
channelz_pb2.GetServerSocketsRequest(
server_id=_LARGE_UNASSIGNED_ID,
start_socket_id=0,
)
)
self.assertEqual(
grpc.StatusCode.NOT_FOUND, exception_context.exception.code()
)
if __name__ == "__main__":
logging.basicConfig(level=logging.DEBUG)
unittest.main(verbosity=2)
| 21,327
| 37.707804
| 86
|
py
|
grpc
|
grpc-master/src/python/grpcio_tests/tests_aio/status/grpc_status_test.py
|
# Copyright 2020 The gRPC Authors
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Tests of grpc_status with gRPC AsyncIO stack."""
import logging
import traceback
import unittest
from google.protobuf import any_pb2
from google.rpc import code_pb2
from google.rpc import error_details_pb2
from google.rpc import status_pb2
import grpc
from grpc.experimental import aio
from grpc_status import rpc_status
from tests_aio.unit._test_base import AioTestBase
_STATUS_OK = "/test/StatusOK"
_STATUS_NOT_OK = "/test/StatusNotOk"
_ERROR_DETAILS = "/test/ErrorDetails"
_INCONSISTENT = "/test/Inconsistent"
_INVALID_CODE = "/test/InvalidCode"
_REQUEST = b"\x00\x00\x00"
_RESPONSE = b"\x01\x01\x01"
_GRPC_DETAILS_METADATA_KEY = "grpc-status-details-bin"
_STATUS_DETAILS = "This is an error detail"
_STATUS_DETAILS_ANOTHER = "This is another error detail"
async def _ok_unary_unary(request, servicer_context):
return _RESPONSE
async def _not_ok_unary_unary(request, servicer_context):
await servicer_context.abort(grpc.StatusCode.INTERNAL, _STATUS_DETAILS)
async def _error_details_unary_unary(request, servicer_context):
details = any_pb2.Any()
details.Pack(
error_details_pb2.DebugInfo(
stack_entries=traceback.format_stack(),
detail="Intentionally invoked",
)
)
rich_status = status_pb2.Status(
code=code_pb2.INTERNAL,
message=_STATUS_DETAILS,
details=[details],
)
await servicer_context.abort_with_status(rpc_status.to_status(rich_status))
async def _inconsistent_unary_unary(request, servicer_context):
rich_status = status_pb2.Status(
code=code_pb2.INTERNAL,
message=_STATUS_DETAILS,
)
servicer_context.set_code(grpc.StatusCode.NOT_FOUND)
servicer_context.set_details(_STATUS_DETAILS_ANOTHER)
# User put inconsistent status information in trailing metadata
servicer_context.set_trailing_metadata(
((_GRPC_DETAILS_METADATA_KEY, rich_status.SerializeToString()),)
)
async def _invalid_code_unary_unary(request, servicer_context):
rich_status = status_pb2.Status(
code=42,
message="Invalid code",
)
await servicer_context.abort_with_status(rpc_status.to_status(rich_status))
class _GenericHandler(grpc.GenericRpcHandler):
def service(self, handler_call_details):
if handler_call_details.method == _STATUS_OK:
return grpc.unary_unary_rpc_method_handler(_ok_unary_unary)
elif handler_call_details.method == _STATUS_NOT_OK:
return grpc.unary_unary_rpc_method_handler(_not_ok_unary_unary)
elif handler_call_details.method == _ERROR_DETAILS:
return grpc.unary_unary_rpc_method_handler(
_error_details_unary_unary
)
elif handler_call_details.method == _INCONSISTENT:
return grpc.unary_unary_rpc_method_handler(
_inconsistent_unary_unary
)
elif handler_call_details.method == _INVALID_CODE:
return grpc.unary_unary_rpc_method_handler(
_invalid_code_unary_unary
)
else:
return None
class StatusTest(AioTestBase):
async def setUp(self):
self._server = aio.server()
self._server.add_generic_rpc_handlers((_GenericHandler(),))
port = self._server.add_insecure_port("[::]:0")
await self._server.start()
self._channel = aio.insecure_channel("localhost:%d" % port)
async def tearDown(self):
await self._server.stop(None)
await self._channel.close()
async def test_status_ok(self):
call = self._channel.unary_unary(_STATUS_OK)(_REQUEST)
# Succeed RPC doesn't have status
status = await rpc_status.aio.from_call(call)
self.assertIs(status, None)
async def test_status_not_ok(self):
call = self._channel.unary_unary(_STATUS_NOT_OK)(_REQUEST)
with self.assertRaises(aio.AioRpcError) as exception_context:
await call
rpc_error = exception_context.exception
self.assertEqual(rpc_error.code(), grpc.StatusCode.INTERNAL)
# Failed RPC doesn't automatically generate status
status = await rpc_status.aio.from_call(call)
self.assertIs(status, None)
async def test_error_details(self):
call = self._channel.unary_unary(_ERROR_DETAILS)(_REQUEST)
with self.assertRaises(aio.AioRpcError) as exception_context:
await call
rpc_error = exception_context.exception
status = await rpc_status.aio.from_call(call)
self.assertEqual(rpc_error.code(), grpc.StatusCode.INTERNAL)
self.assertEqual(status.code, code_pb2.Code.Value("INTERNAL"))
# Check if the underlying proto message is intact
self.assertTrue(
status.details[0].Is(error_details_pb2.DebugInfo.DESCRIPTOR)
)
info = error_details_pb2.DebugInfo()
status.details[0].Unpack(info)
self.assertIn("_error_details_unary_unary", info.stack_entries[-1])
async def test_code_message_validation(self):
call = self._channel.unary_unary(_INCONSISTENT)(_REQUEST)
with self.assertRaises(aio.AioRpcError) as exception_context:
await call
rpc_error = exception_context.exception
self.assertEqual(rpc_error.code(), grpc.StatusCode.NOT_FOUND)
# Code/Message validation failed
with self.assertRaises(ValueError):
await rpc_status.aio.from_call(call)
async def test_invalid_code(self):
with self.assertRaises(aio.AioRpcError) as exception_context:
await self._channel.unary_unary(_INVALID_CODE)(_REQUEST)
rpc_error = exception_context.exception
self.assertEqual(rpc_error.code(), grpc.StatusCode.UNKNOWN)
# Invalid status code exception raised during coversion
self.assertIn("Invalid status code", rpc_error.details())
if __name__ == "__main__":
logging.basicConfig()
unittest.main(verbosity=2)
| 6,542
| 34.559783
| 79
|
py
|
grpc
|
grpc-master/src/python/grpcio_tests/tests_aio/status/__init__.py
|
# Copyright 2020 The gRPC Authors
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
| 580
| 40.5
| 74
|
py
|
grpc
|
grpc-master/src/python/grpcio_tests/tests_aio/health_check/health_servicer_test.py
|
# Copyright 2020 The gRPC Authors
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Tests AsyncIO version of grpcio-health-checking."""
import asyncio
import logging
import random
import time
import unittest
import grpc
from grpc.experimental import aio
from grpc_health.v1 import health
from grpc_health.v1 import health_pb2
from grpc_health.v1 import health_pb2_grpc
from tests.unit.framework.common import test_constants
from tests_aio.unit._test_base import AioTestBase
_SERVING_SERVICE = "grpc.test.TestServiceServing"
_UNKNOWN_SERVICE = "grpc.test.TestServiceUnknown"
_NOT_SERVING_SERVICE = "grpc.test.TestServiceNotServing"
_WATCH_SERVICE = "grpc.test.WatchService"
_LARGE_NUMBER_OF_STATUS_CHANGES = 1000
async def _pipe_to_queue(call, queue):
async for response in call:
await queue.put(response)
class HealthServicerTest(AioTestBase):
async def setUp(self):
self._servicer = health.aio.HealthServicer()
await self._servicer.set(
_SERVING_SERVICE, health_pb2.HealthCheckResponse.SERVING
)
await self._servicer.set(
_UNKNOWN_SERVICE, health_pb2.HealthCheckResponse.UNKNOWN
)
await self._servicer.set(
_NOT_SERVING_SERVICE, health_pb2.HealthCheckResponse.NOT_SERVING
)
self._server = aio.server()
port = self._server.add_insecure_port("[::]:0")
health_pb2_grpc.add_HealthServicer_to_server(
self._servicer, self._server
)
await self._server.start()
self._channel = aio.insecure_channel("localhost:%d" % port)
self._stub = health_pb2_grpc.HealthStub(self._channel)
async def tearDown(self):
await self._channel.close()
await self._server.stop(None)
async def test_check_empty_service(self):
request = health_pb2.HealthCheckRequest()
resp = await self._stub.Check(request)
self.assertEqual(health_pb2.HealthCheckResponse.SERVING, resp.status)
async def test_check_serving_service(self):
request = health_pb2.HealthCheckRequest(service=_SERVING_SERVICE)
resp = await self._stub.Check(request)
self.assertEqual(health_pb2.HealthCheckResponse.SERVING, resp.status)
async def test_check_unknown_service(self):
request = health_pb2.HealthCheckRequest(service=_UNKNOWN_SERVICE)
resp = await self._stub.Check(request)
self.assertEqual(health_pb2.HealthCheckResponse.UNKNOWN, resp.status)
async def test_check_not_serving_service(self):
request = health_pb2.HealthCheckRequest(service=_NOT_SERVING_SERVICE)
resp = await self._stub.Check(request)
self.assertEqual(
health_pb2.HealthCheckResponse.NOT_SERVING, resp.status
)
async def test_check_not_found_service(self):
request = health_pb2.HealthCheckRequest(service="not-found")
with self.assertRaises(aio.AioRpcError) as context:
await self._stub.Check(request)
self.assertEqual(grpc.StatusCode.NOT_FOUND, context.exception.code())
async def test_health_service_name(self):
self.assertEqual(health.SERVICE_NAME, "grpc.health.v1.Health")
async def test_watch_empty_service(self):
request = health_pb2.HealthCheckRequest(service=health.OVERALL_HEALTH)
call = self._stub.Watch(request)
queue = asyncio.Queue()
task = self.loop.create_task(_pipe_to_queue(call, queue))
self.assertEqual(
health_pb2.HealthCheckResponse.SERVING, (await queue.get()).status
)
call.cancel()
with self.assertRaises(asyncio.CancelledError):
await task
self.assertTrue(queue.empty())
async def test_watch_new_service(self):
request = health_pb2.HealthCheckRequest(service=_WATCH_SERVICE)
call = self._stub.Watch(request)
queue = asyncio.Queue()
task = self.loop.create_task(_pipe_to_queue(call, queue))
self.assertEqual(
health_pb2.HealthCheckResponse.SERVICE_UNKNOWN,
(await queue.get()).status,
)
await self._servicer.set(
_WATCH_SERVICE, health_pb2.HealthCheckResponse.SERVING
)
self.assertEqual(
health_pb2.HealthCheckResponse.SERVING, (await queue.get()).status
)
await self._servicer.set(
_WATCH_SERVICE, health_pb2.HealthCheckResponse.NOT_SERVING
)
self.assertEqual(
health_pb2.HealthCheckResponse.NOT_SERVING,
(await queue.get()).status,
)
call.cancel()
with self.assertRaises(asyncio.CancelledError):
await task
self.assertTrue(queue.empty())
async def test_watch_service_isolation(self):
request = health_pb2.HealthCheckRequest(service=_WATCH_SERVICE)
call = self._stub.Watch(request)
queue = asyncio.Queue()
task = self.loop.create_task(_pipe_to_queue(call, queue))
self.assertEqual(
health_pb2.HealthCheckResponse.SERVICE_UNKNOWN,
(await queue.get()).status,
)
await self._servicer.set(
"some-other-service", health_pb2.HealthCheckResponse.SERVING
)
# The change of health status in other service should be isolated.
# Hence, no additional notification should be observed.
with self.assertRaises(asyncio.TimeoutError):
await asyncio.wait_for(queue.get(), test_constants.SHORT_TIMEOUT)
call.cancel()
with self.assertRaises(asyncio.CancelledError):
await task
self.assertTrue(queue.empty())
async def test_two_watchers(self):
request = health_pb2.HealthCheckRequest(service=_WATCH_SERVICE)
queue1 = asyncio.Queue()
queue2 = asyncio.Queue()
call1 = self._stub.Watch(request)
call2 = self._stub.Watch(request)
task1 = self.loop.create_task(_pipe_to_queue(call1, queue1))
task2 = self.loop.create_task(_pipe_to_queue(call2, queue2))
self.assertEqual(
health_pb2.HealthCheckResponse.SERVICE_UNKNOWN,
(await queue1.get()).status,
)
self.assertEqual(
health_pb2.HealthCheckResponse.SERVICE_UNKNOWN,
(await queue2.get()).status,
)
await self._servicer.set(
_WATCH_SERVICE, health_pb2.HealthCheckResponse.SERVING
)
self.assertEqual(
health_pb2.HealthCheckResponse.SERVING, (await queue1.get()).status
)
self.assertEqual(
health_pb2.HealthCheckResponse.SERVING, (await queue2.get()).status
)
call1.cancel()
call2.cancel()
with self.assertRaises(asyncio.CancelledError):
await task1
with self.assertRaises(asyncio.CancelledError):
await task2
self.assertTrue(queue1.empty())
self.assertTrue(queue2.empty())
async def test_cancelled_watch_removed_from_watch_list(self):
request = health_pb2.HealthCheckRequest(service=_WATCH_SERVICE)
call = self._stub.Watch(request)
queue = asyncio.Queue()
task = self.loop.create_task(_pipe_to_queue(call, queue))
self.assertEqual(
health_pb2.HealthCheckResponse.SERVICE_UNKNOWN,
(await queue.get()).status,
)
call.cancel()
await self._servicer.set(
_WATCH_SERVICE, health_pb2.HealthCheckResponse.SERVING
)
with self.assertRaises(asyncio.CancelledError):
await task
# Wait for the serving coroutine to process client cancellation.
timeout = time.monotonic() + test_constants.TIME_ALLOWANCE
while time.monotonic() < timeout and self._servicer._server_watchers:
await asyncio.sleep(1)
self.assertFalse(
self._servicer._server_watchers,
"There should not be any watcher left",
)
self.assertTrue(queue.empty())
async def test_graceful_shutdown(self):
request = health_pb2.HealthCheckRequest(service=health.OVERALL_HEALTH)
call = self._stub.Watch(request)
queue = asyncio.Queue()
task = self.loop.create_task(_pipe_to_queue(call, queue))
self.assertEqual(
health_pb2.HealthCheckResponse.SERVING, (await queue.get()).status
)
await self._servicer.enter_graceful_shutdown()
self.assertEqual(
health_pb2.HealthCheckResponse.NOT_SERVING,
(await queue.get()).status,
)
# This should be a no-op.
await self._servicer.set(
health.OVERALL_HEALTH, health_pb2.HealthCheckResponse.SERVING
)
resp = await self._stub.Check(request)
self.assertEqual(
health_pb2.HealthCheckResponse.NOT_SERVING, resp.status
)
call.cancel()
with self.assertRaises(asyncio.CancelledError):
await task
self.assertTrue(queue.empty())
async def test_no_duplicate_status(self):
request = health_pb2.HealthCheckRequest(service=_WATCH_SERVICE)
call = self._stub.Watch(request)
queue = asyncio.Queue()
task = self.loop.create_task(_pipe_to_queue(call, queue))
self.assertEqual(
health_pb2.HealthCheckResponse.SERVICE_UNKNOWN,
(await queue.get()).status,
)
last_status = health_pb2.HealthCheckResponse.SERVICE_UNKNOWN
for _ in range(_LARGE_NUMBER_OF_STATUS_CHANGES):
if random.randint(0, 1) == 0:
status = health_pb2.HealthCheckResponse.SERVING
else:
status = health_pb2.HealthCheckResponse.NOT_SERVING
await self._servicer.set(_WATCH_SERVICE, status)
if status != last_status:
self.assertEqual(status, (await queue.get()).status)
last_status = status
call.cancel()
with self.assertRaises(asyncio.CancelledError):
await task
self.assertTrue(queue.empty())
if __name__ == "__main__":
logging.basicConfig(level=logging.DEBUG)
unittest.main(verbosity=2)
| 10,691
| 32.942857
| 79
|
py
|
grpc
|
grpc-master/src/python/grpcio_tests/tests_aio/health_check/__init__.py
|
# Copyright 2020 The gRPC Authors
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
| 580
| 40.5
| 74
|
py
|
grpc
|
grpc-master/src/python/grpcio_tests/tests_aio/reflection/reflection_servicer_test.py
|
# Copyright 2016 gRPC authors.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Tests of grpc_reflection.v1alpha.reflection."""
import logging
import unittest
from google.protobuf import descriptor_pb2
import grpc
from grpc.experimental import aio
from grpc_reflection.v1alpha import reflection
from grpc_reflection.v1alpha import reflection_pb2
from grpc_reflection.v1alpha import reflection_pb2_grpc
from src.proto.grpc.testing import empty_pb2
from src.proto.grpc.testing.proto2 import empty2_extensions_pb2
from src.proto.grpc.testing.proto2 import empty2_pb2
from tests_aio.unit._test_base import AioTestBase
_EMPTY_PROTO_FILE_NAME = "src/proto/grpc/testing/empty.proto"
_EMPTY_PROTO_SYMBOL_NAME = "grpc.testing.Empty"
_SERVICE_NAMES = (
"Angstrom",
"Bohr",
"Curie",
"Dyson",
"Einstein",
"Feynman",
"Galilei",
)
_EMPTY_EXTENSIONS_SYMBOL_NAME = "grpc.testing.proto2.EmptyWithExtensions"
_EMPTY_EXTENSIONS_NUMBERS = (
124,
125,
126,
127,
128,
)
def _file_descriptor_to_proto(descriptor):
proto = descriptor_pb2.FileDescriptorProto()
descriptor.CopyToProto(proto)
return proto.SerializeToString()
class ReflectionServicerTest(AioTestBase):
async def setUp(self):
self._server = aio.server()
reflection.enable_server_reflection(_SERVICE_NAMES, self._server)
port = self._server.add_insecure_port("[::]:0")
await self._server.start()
self._channel = aio.insecure_channel("localhost:%d" % port)
self._stub = reflection_pb2_grpc.ServerReflectionStub(self._channel)
async def tearDown(self):
await self._server.stop(None)
await self._channel.close()
async def test_file_by_name(self):
requests = (
reflection_pb2.ServerReflectionRequest(
file_by_filename=_EMPTY_PROTO_FILE_NAME
),
reflection_pb2.ServerReflectionRequest(
file_by_filename="i-donut-exist"
),
)
responses = []
async for response in self._stub.ServerReflectionInfo(iter(requests)):
responses.append(response)
expected_responses = (
reflection_pb2.ServerReflectionResponse(
valid_host="",
file_descriptor_response=reflection_pb2.FileDescriptorResponse(
file_descriptor_proto=(
_file_descriptor_to_proto(empty_pb2.DESCRIPTOR),
)
),
),
reflection_pb2.ServerReflectionResponse(
valid_host="",
error_response=reflection_pb2.ErrorResponse(
error_code=grpc.StatusCode.NOT_FOUND.value[0],
error_message=grpc.StatusCode.NOT_FOUND.value[1].encode(),
),
),
)
self.assertSequenceEqual(expected_responses, responses)
async def test_file_by_symbol(self):
requests = (
reflection_pb2.ServerReflectionRequest(
file_containing_symbol=_EMPTY_PROTO_SYMBOL_NAME
),
reflection_pb2.ServerReflectionRequest(
file_containing_symbol="i.donut.exist.co.uk.org.net.me.name.foo"
),
)
responses = []
async for response in self._stub.ServerReflectionInfo(iter(requests)):
responses.append(response)
expected_responses = (
reflection_pb2.ServerReflectionResponse(
valid_host="",
file_descriptor_response=reflection_pb2.FileDescriptorResponse(
file_descriptor_proto=(
_file_descriptor_to_proto(empty_pb2.DESCRIPTOR),
)
),
),
reflection_pb2.ServerReflectionResponse(
valid_host="",
error_response=reflection_pb2.ErrorResponse(
error_code=grpc.StatusCode.NOT_FOUND.value[0],
error_message=grpc.StatusCode.NOT_FOUND.value[1].encode(),
),
),
)
self.assertSequenceEqual(expected_responses, responses)
async def test_file_containing_extension(self):
requests = (
reflection_pb2.ServerReflectionRequest(
file_containing_extension=reflection_pb2.ExtensionRequest(
containing_type=_EMPTY_EXTENSIONS_SYMBOL_NAME,
extension_number=125,
),
),
reflection_pb2.ServerReflectionRequest(
file_containing_extension=reflection_pb2.ExtensionRequest(
containing_type="i.donut.exist.co.uk.org.net.me.name.foo",
extension_number=55,
),
),
)
responses = []
async for response in self._stub.ServerReflectionInfo(iter(requests)):
responses.append(response)
expected_responses = (
reflection_pb2.ServerReflectionResponse(
valid_host="",
file_descriptor_response=reflection_pb2.FileDescriptorResponse(
file_descriptor_proto=(
_file_descriptor_to_proto(
empty2_extensions_pb2.DESCRIPTOR
),
_file_descriptor_to_proto(empty2_pb2.DESCRIPTOR),
)
),
),
reflection_pb2.ServerReflectionResponse(
valid_host="",
error_response=reflection_pb2.ErrorResponse(
error_code=grpc.StatusCode.NOT_FOUND.value[0],
error_message=grpc.StatusCode.NOT_FOUND.value[1].encode(),
),
),
)
self.assertSequenceEqual(expected_responses, responses)
async def test_extension_numbers_of_type(self):
requests = (
reflection_pb2.ServerReflectionRequest(
all_extension_numbers_of_type=_EMPTY_EXTENSIONS_SYMBOL_NAME
),
reflection_pb2.ServerReflectionRequest(
all_extension_numbers_of_type="i.donut.exist.co.uk.net.name.foo"
),
)
responses = []
async for response in self._stub.ServerReflectionInfo(iter(requests)):
responses.append(response)
expected_responses = (
reflection_pb2.ServerReflectionResponse(
valid_host="",
all_extension_numbers_response=reflection_pb2.ExtensionNumberResponse(
base_type_name=_EMPTY_EXTENSIONS_SYMBOL_NAME,
extension_number=_EMPTY_EXTENSIONS_NUMBERS,
),
),
reflection_pb2.ServerReflectionResponse(
valid_host="",
error_response=reflection_pb2.ErrorResponse(
error_code=grpc.StatusCode.NOT_FOUND.value[0],
error_message=grpc.StatusCode.NOT_FOUND.value[1].encode(),
),
),
)
self.assertSequenceEqual(expected_responses, responses)
async def test_list_services(self):
requests = (
reflection_pb2.ServerReflectionRequest(
list_services="",
),
)
responses = []
async for response in self._stub.ServerReflectionInfo(iter(requests)):
responses.append(response)
expected_responses = (
reflection_pb2.ServerReflectionResponse(
valid_host="",
list_services_response=reflection_pb2.ListServiceResponse(
service=tuple(
reflection_pb2.ServiceResponse(name=name)
for name in _SERVICE_NAMES
)
),
),
)
self.assertSequenceEqual(expected_responses, responses)
def test_reflection_service_name(self):
self.assertEqual(
reflection.SERVICE_NAME, "grpc.reflection.v1alpha.ServerReflection"
)
if __name__ == "__main__":
logging.basicConfig(level=logging.DEBUG)
unittest.main(verbosity=2)
| 8,683
| 35.953191
| 86
|
py
|
grpc
|
grpc-master/src/python/grpcio_tests/tests_aio/reflection/__init__.py
|
# Copyright 2016 gRPC authors.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
| 577
| 40.285714
| 74
|
py
|
grpc
|
grpc-master/src/python/grpcio_tests/tests_aio/_sanity/_sanity_test.py
|
# Copyright 2019 gRPC authors.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import unittest
from tests._sanity import _sanity_test
class AioSanityTest(_sanity_test.SanityTest):
TEST_PKG_MODULE_NAME = "tests_aio"
TEST_PKG_PATH = "tests_aio"
if __name__ == "__main__":
unittest.main(verbosity=2)
| 813
| 29.148148
| 74
|
py
|
grpc
|
grpc-master/src/python/grpcio_tests/tests_aio/_sanity/__init__.py
|
# Copyright 2019 The gRPC Authors.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
| 581
| 40.571429
| 74
|
py
|
grpc
|
grpc-master/src/python/grpcio_tests/tests_aio/benchmark/benchmark_servicer.py
|
# Copyright 2020 The gRPC Authors
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""The Python AsyncIO Benchmark Servicers."""
import asyncio
import logging
import unittest
from grpc.experimental import aio
from src.proto.grpc.testing import benchmark_service_pb2_grpc
from src.proto.grpc.testing import messages_pb2
class BenchmarkServicer(benchmark_service_pb2_grpc.BenchmarkServiceServicer):
async def UnaryCall(self, request, unused_context):
payload = messages_pb2.Payload(body=b"\0" * request.response_size)
return messages_pb2.SimpleResponse(payload=payload)
async def StreamingFromServer(self, request, unused_context):
payload = messages_pb2.Payload(body=b"\0" * request.response_size)
# Sends response at full capacity!
while True:
yield messages_pb2.SimpleResponse(payload=payload)
async def StreamingCall(self, request_iterator, unused_context):
async for request in request_iterator:
payload = messages_pb2.Payload(body=b"\0" * request.response_size)
yield messages_pb2.SimpleResponse(payload=payload)
class GenericBenchmarkServicer(
benchmark_service_pb2_grpc.BenchmarkServiceServicer
):
"""Generic (no-codec) Server implementation for the Benchmark service."""
def __init__(self, resp_size):
self._response = "\0" * resp_size
async def UnaryCall(self, unused_request, unused_context):
return self._response
async def StreamingCall(self, request_iterator, unused_context):
async for _ in request_iterator:
yield self._response
| 2,104
| 35.929825
| 78
|
py
|
grpc
|
grpc-master/src/python/grpcio_tests/tests_aio/benchmark/worker_servicer.py
|
# Copyright 2020 The gRPC Authors
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import asyncio
import collections
import logging
import multiprocessing
import os
import sys
import time
from typing import Tuple
import grpc
from grpc.experimental import aio
from src.proto.grpc.testing import benchmark_service_pb2_grpc
from src.proto.grpc.testing import control_pb2
from src.proto.grpc.testing import stats_pb2
from src.proto.grpc.testing import worker_service_pb2_grpc
from tests.qps import histogram
from tests.unit import resources
from tests.unit.framework.common import get_socket
from tests_aio.benchmark import benchmark_client
from tests_aio.benchmark import benchmark_servicer
_NUM_CORES = multiprocessing.cpu_count()
_WORKER_ENTRY_FILE = os.path.join(
os.path.split(os.path.abspath(__file__))[0], "worker.py"
)
_LOGGER = logging.getLogger(__name__)
class _SubWorker(
collections.namedtuple("_SubWorker", ["process", "port", "channel", "stub"])
):
"""A data class that holds information about a child qps worker."""
def _repr(self):
return f"<_SubWorker pid={self.process.pid} port={self.port}>"
def __repr__(self):
return self._repr()
def __str__(self):
return self._repr()
def _get_server_status(
start_time: float, end_time: float, port: int
) -> control_pb2.ServerStatus:
"""Creates ServerStatus proto message."""
end_time = time.monotonic()
elapsed_time = end_time - start_time
# TODO(lidiz) Collect accurate time system to compute QPS/core-second.
stats = stats_pb2.ServerStats(
time_elapsed=elapsed_time,
time_user=elapsed_time,
time_system=elapsed_time,
)
return control_pb2.ServerStatus(stats=stats, port=port, cores=_NUM_CORES)
def _create_server(config: control_pb2.ServerConfig) -> Tuple[aio.Server, int]:
"""Creates a server object according to the ServerConfig."""
channel_args = tuple(
(arg.name, arg.str_value)
if arg.HasField("str_value")
else (arg.name, int(arg.int_value))
for arg in config.channel_args
)
server = aio.server(options=channel_args + (("grpc.so_reuseport", 1),))
if config.server_type == control_pb2.ASYNC_SERVER:
servicer = benchmark_servicer.BenchmarkServicer()
benchmark_service_pb2_grpc.add_BenchmarkServiceServicer_to_server(
servicer, server
)
elif config.server_type == control_pb2.ASYNC_GENERIC_SERVER:
resp_size = config.payload_config.bytebuf_params.resp_size
servicer = benchmark_servicer.GenericBenchmarkServicer(resp_size)
method_implementations = {
"StreamingCall": grpc.stream_stream_rpc_method_handler(
servicer.StreamingCall
),
"UnaryCall": grpc.unary_unary_rpc_method_handler(
servicer.UnaryCall
),
}
handler = grpc.method_handlers_generic_handler(
"grpc.testing.BenchmarkService", method_implementations
)
server.add_generic_rpc_handlers((handler,))
else:
raise NotImplementedError(
"Unsupported server type {}".format(config.server_type)
)
if config.HasField("security_params"): # Use SSL
server_creds = grpc.ssl_server_credentials(
((resources.private_key(), resources.certificate_chain()),)
)
port = server.add_secure_port(
"[::]:{}".format(config.port), server_creds
)
else:
port = server.add_insecure_port("[::]:{}".format(config.port))
return server, port
def _get_client_status(
start_time: float, end_time: float, qps_data: histogram.Histogram
) -> control_pb2.ClientStatus:
"""Creates ClientStatus proto message."""
latencies = qps_data.get_data()
end_time = time.monotonic()
elapsed_time = end_time - start_time
# TODO(lidiz) Collect accurate time system to compute QPS/core-second.
stats = stats_pb2.ClientStats(
latencies=latencies,
time_elapsed=elapsed_time,
time_user=elapsed_time,
time_system=elapsed_time,
)
return control_pb2.ClientStatus(stats=stats)
def _create_client(
server: str, config: control_pb2.ClientConfig, qps_data: histogram.Histogram
) -> benchmark_client.BenchmarkClient:
"""Creates a client object according to the ClientConfig."""
if config.load_params.WhichOneof("load") != "closed_loop":
raise NotImplementedError(
f"Unsupported load parameter {config.load_params}"
)
if config.client_type == control_pb2.ASYNC_CLIENT:
if config.rpc_type == control_pb2.UNARY:
client_type = benchmark_client.UnaryAsyncBenchmarkClient
elif config.rpc_type == control_pb2.STREAMING:
client_type = benchmark_client.StreamingAsyncBenchmarkClient
elif config.rpc_type == control_pb2.STREAMING_FROM_SERVER:
client_type = benchmark_client.ServerStreamingAsyncBenchmarkClient
else:
raise NotImplementedError(
f"Unsupported rpc_type [{config.rpc_type}]"
)
else:
raise NotImplementedError(
f"Unsupported client type {config.client_type}"
)
return client_type(server, config, qps_data)
def _pick_an_unused_port() -> int:
"""Picks an unused TCP port."""
_, port, sock = get_socket()
sock.close()
return port
async def _create_sub_worker() -> _SubWorker:
"""Creates a child qps worker as a subprocess."""
port = _pick_an_unused_port()
_LOGGER.info("Creating sub worker at port [%d]...", port)
process = await asyncio.create_subprocess_exec(
sys.executable, _WORKER_ENTRY_FILE, "--driver_port", str(port)
)
_LOGGER.info(
"Created sub worker process for port [%d] at pid [%d]",
port,
process.pid,
)
channel = aio.insecure_channel(f"localhost:{port}")
_LOGGER.info("Waiting for sub worker at port [%d]", port)
await channel.channel_ready()
stub = worker_service_pb2_grpc.WorkerServiceStub(channel)
return _SubWorker(
process=process,
port=port,
channel=channel,
stub=stub,
)
class WorkerServicer(worker_service_pb2_grpc.WorkerServiceServicer):
"""Python Worker Server implementation."""
def __init__(self):
self._loop = asyncio.get_event_loop()
self._quit_event = asyncio.Event()
async def _run_single_server(self, config, request_iterator, context):
server, port = _create_server(config)
await server.start()
_LOGGER.info("Server started at port [%d]", port)
start_time = time.monotonic()
await context.write(_get_server_status(start_time, start_time, port))
async for request in request_iterator:
end_time = time.monotonic()
status = _get_server_status(start_time, end_time, port)
if request.mark.reset:
start_time = end_time
await context.write(status)
await server.stop(None)
async def RunServer(self, request_iterator, context):
config_request = await context.read()
config = config_request.setup
_LOGGER.info("Received ServerConfig: %s", config)
if config.server_processes <= 0:
_LOGGER.info("Using server_processes == [%d]", _NUM_CORES)
config.server_processes = _NUM_CORES
if config.port == 0:
config.port = _pick_an_unused_port()
_LOGGER.info("Port picked [%d]", config.port)
if config.server_processes == 1:
# If server_processes == 1, start the server in this process.
await self._run_single_server(config, request_iterator, context)
else:
# If server_processes > 1, offload to other processes.
sub_workers = await asyncio.gather(
*[_create_sub_worker() for _ in range(config.server_processes)]
)
calls = [worker.stub.RunServer() for worker in sub_workers]
config_request.setup.server_processes = 1
for call in calls:
await call.write(config_request)
# An empty status indicates the peer is ready
await call.read()
start_time = time.monotonic()
await context.write(
_get_server_status(
start_time,
start_time,
config.port,
)
)
_LOGGER.info("Servers are ready to serve.")
async for request in request_iterator:
end_time = time.monotonic()
for call in calls:
await call.write(request)
# Reports from sub workers doesn't matter
await call.read()
status = _get_server_status(
start_time,
end_time,
config.port,
)
if request.mark.reset:
start_time = end_time
await context.write(status)
for call in calls:
await call.done_writing()
for worker in sub_workers:
await worker.stub.QuitWorker(control_pb2.Void())
await worker.channel.close()
_LOGGER.info("Waiting for [%s] to quit...", worker)
await worker.process.wait()
async def _run_single_client(self, config, request_iterator, context):
running_tasks = []
qps_data = histogram.Histogram(
config.histogram_params.resolution,
config.histogram_params.max_possible,
)
start_time = time.monotonic()
# Create a client for each channel as asyncio.Task
for i in range(config.client_channels):
server = config.server_targets[i % len(config.server_targets)]
client = _create_client(server, config, qps_data)
_LOGGER.info("Client created against server [%s]", server)
running_tasks.append(self._loop.create_task(client.run()))
end_time = time.monotonic()
await context.write(_get_client_status(start_time, end_time, qps_data))
# Respond to stat requests
async for request in request_iterator:
end_time = time.monotonic()
status = _get_client_status(start_time, end_time, qps_data)
if request.mark.reset:
qps_data.reset()
start_time = time.monotonic()
await context.write(status)
# Cleanup the clients
for task in running_tasks:
task.cancel()
async def RunClient(self, request_iterator, context):
config_request = await context.read()
config = config_request.setup
_LOGGER.info("Received ClientConfig: %s", config)
if config.client_processes <= 0:
_LOGGER.info(
"client_processes can't be [%d]", config.client_processes
)
_LOGGER.info("Using client_processes == [%d]", _NUM_CORES)
config.client_processes = _NUM_CORES
if config.client_processes == 1:
# If client_processes == 1, run the benchmark in this process.
await self._run_single_client(config, request_iterator, context)
else:
# If client_processes > 1, offload the work to other processes.
sub_workers = await asyncio.gather(
*[_create_sub_worker() for _ in range(config.client_processes)]
)
calls = [worker.stub.RunClient() for worker in sub_workers]
config_request.setup.client_processes = 1
for call in calls:
await call.write(config_request)
# An empty status indicates the peer is ready
await call.read()
start_time = time.monotonic()
result = histogram.Histogram(
config.histogram_params.resolution,
config.histogram_params.max_possible,
)
end_time = time.monotonic()
await context.write(
_get_client_status(start_time, end_time, result)
)
async for request in request_iterator:
end_time = time.monotonic()
for call in calls:
_LOGGER.debug("Fetching status...")
await call.write(request)
sub_status = await call.read()
result.merge(sub_status.stats.latencies)
_LOGGER.debug(
"Update from sub worker count=[%d]",
sub_status.stats.latencies.count,
)
status = _get_client_status(start_time, end_time, result)
if request.mark.reset:
result.reset()
start_time = time.monotonic()
_LOGGER.debug(
"Reporting count=[%d]", status.stats.latencies.count
)
await context.write(status)
for call in calls:
await call.done_writing()
for worker in sub_workers:
await worker.stub.QuitWorker(control_pb2.Void())
await worker.channel.close()
_LOGGER.info("Waiting for sub worker [%s] to quit...", worker)
await worker.process.wait()
_LOGGER.info("Sub worker [%s] quit", worker)
@staticmethod
async def CoreCount(unused_request, unused_context):
return control_pb2.CoreResponse(cores=_NUM_CORES)
async def QuitWorker(self, unused_request, unused_context):
_LOGGER.info("QuitWorker command received.")
self._quit_event.set()
return control_pb2.Void()
async def wait_for_quit(self):
await self._quit_event.wait()
| 14,466
| 34.720988
| 80
|
py
|
grpc
|
grpc-master/src/python/grpcio_tests/tests_aio/benchmark/server.py
|
# Copyright 2019 The gRPC Authors.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import asyncio
import logging
import unittest
from grpc.experimental import aio
from src.proto.grpc.testing import benchmark_service_pb2_grpc
from tests_aio.benchmark import benchmark_servicer
async def _start_async_server():
server = aio.server()
port = server.add_insecure_port("localhost:%s" % 50051)
servicer = benchmark_servicer.BenchmarkServicer()
benchmark_service_pb2_grpc.add_BenchmarkServiceServicer_to_server(
servicer, server
)
await server.start()
logging.info("Benchmark server started at :%d" % port)
await server.wait_for_termination()
def main():
loop = asyncio.get_event_loop()
loop.create_task(_start_async_server())
loop.run_forever()
if __name__ == "__main__":
logging.basicConfig(level=logging.DEBUG)
main()
| 1,382
| 27.8125
| 74
|
py
|
grpc
|
grpc-master/src/python/grpcio_tests/tests_aio/benchmark/benchmark_client.py
|
# Copyright 2020 The gRPC Authors
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""The Python AsyncIO Benchmark Clients."""
import abc
import asyncio
import logging
import random
import time
import grpc
from grpc.experimental import aio
from src.proto.grpc.testing import benchmark_service_pb2_grpc
from src.proto.grpc.testing import control_pb2
from src.proto.grpc.testing import messages_pb2
from tests.qps import histogram
from tests.unit import resources
class GenericStub(object):
def __init__(self, channel: aio.Channel):
self.UnaryCall = channel.unary_unary(
"/grpc.testing.BenchmarkService/UnaryCall"
)
self.StreamingFromServer = channel.unary_stream(
"/grpc.testing.BenchmarkService/StreamingFromServer"
)
self.StreamingCall = channel.stream_stream(
"/grpc.testing.BenchmarkService/StreamingCall"
)
class BenchmarkClient(abc.ABC):
"""Benchmark client interface that exposes a non-blocking send_request()."""
def __init__(
self,
address: str,
config: control_pb2.ClientConfig,
hist: histogram.Histogram,
):
# Disables underlying reuse of subchannels
unique_option = (("iv", random.random()),)
# Parses the channel argument from config
channel_args = tuple(
(arg.name, arg.str_value)
if arg.HasField("str_value")
else (arg.name, int(arg.int_value))
for arg in config.channel_args
)
# Creates the channel
if config.HasField("security_params"):
channel_credentials = grpc.ssl_channel_credentials(
resources.test_root_certificates(),
)
server_host_override_option = (
(
"grpc.ssl_target_name_override",
config.security_params.server_host_override,
),
)
self._channel = aio.secure_channel(
address,
channel_credentials,
unique_option + channel_args + server_host_override_option,
)
else:
self._channel = aio.insecure_channel(
address, options=unique_option + channel_args
)
# Creates the stub
if config.payload_config.WhichOneof("payload") == "simple_params":
self._generic = False
self._stub = benchmark_service_pb2_grpc.BenchmarkServiceStub(
self._channel
)
payload = messages_pb2.Payload(
body=b"\0" * config.payload_config.simple_params.req_size
)
self._request = messages_pb2.SimpleRequest(
payload=payload,
response_size=config.payload_config.simple_params.resp_size,
)
else:
self._generic = True
self._stub = GenericStub(self._channel)
self._request = (
b"\0" * config.payload_config.bytebuf_params.req_size
)
self._hist = hist
self._response_callbacks = []
self._concurrency = config.outstanding_rpcs_per_channel
async def run(self) -> None:
await self._channel.channel_ready()
async def stop(self) -> None:
await self._channel.close()
def _record_query_time(self, query_time: float) -> None:
self._hist.add(query_time * 1e9)
class UnaryAsyncBenchmarkClient(BenchmarkClient):
def __init__(
self,
address: str,
config: control_pb2.ClientConfig,
hist: histogram.Histogram,
):
super().__init__(address, config, hist)
self._running = None
self._stopped = asyncio.Event()
async def _send_request(self):
start_time = time.monotonic()
await self._stub.UnaryCall(self._request)
self._record_query_time(time.monotonic() - start_time)
async def _send_indefinitely(self) -> None:
while self._running:
await self._send_request()
async def run(self) -> None:
await super().run()
self._running = True
senders = (self._send_indefinitely() for _ in range(self._concurrency))
await asyncio.gather(*senders)
self._stopped.set()
async def stop(self) -> None:
self._running = False
await self._stopped.wait()
await super().stop()
class StreamingAsyncBenchmarkClient(BenchmarkClient):
def __init__(
self,
address: str,
config: control_pb2.ClientConfig,
hist: histogram.Histogram,
):
super().__init__(address, config, hist)
self._running = None
self._stopped = asyncio.Event()
async def _one_streaming_call(self):
call = self._stub.StreamingCall()
while self._running:
start_time = time.time()
await call.write(self._request)
await call.read()
self._record_query_time(time.time() - start_time)
await call.done_writing()
async def run(self):
await super().run()
self._running = True
senders = (self._one_streaming_call() for _ in range(self._concurrency))
await asyncio.gather(*senders)
self._stopped.set()
async def stop(self):
self._running = False
await self._stopped.wait()
await super().stop()
class ServerStreamingAsyncBenchmarkClient(BenchmarkClient):
def __init__(
self,
address: str,
config: control_pb2.ClientConfig,
hist: histogram.Histogram,
):
super().__init__(address, config, hist)
self._running = None
self._stopped = asyncio.Event()
async def _one_server_streaming_call(self):
call = self._stub.StreamingFromServer(self._request)
while self._running:
start_time = time.time()
await call.read()
self._record_query_time(time.time() - start_time)
async def run(self):
await super().run()
self._running = True
senders = (
self._one_server_streaming_call() for _ in range(self._concurrency)
)
await asyncio.gather(*senders)
self._stopped.set()
async def stop(self):
self._running = False
await self._stopped.wait()
await super().stop()
| 6,867
| 30.64977
| 80
|
py
|
grpc
|
grpc-master/src/python/grpcio_tests/tests_aio/benchmark/worker.py
|
# Copyright 2020 The gRPC Authors
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import argparse
import asyncio
import logging
from grpc.experimental import aio
from src.proto.grpc.testing import worker_service_pb2_grpc
from tests_aio.benchmark import worker_servicer
async def run_worker_server(port: int) -> None:
server = aio.server()
servicer = worker_servicer.WorkerServicer()
worker_service_pb2_grpc.add_WorkerServiceServicer_to_server(
servicer, server
)
server.add_insecure_port("[::]:{}".format(port))
await server.start()
await servicer.wait_for_quit()
await server.stop(None)
if __name__ == "__main__":
logging.basicConfig(level=logging.DEBUG)
parser = argparse.ArgumentParser(
description="gRPC Python performance testing worker"
)
parser.add_argument(
"--driver_port",
type=int,
dest="port",
help="The port the worker should listen on",
)
parser.add_argument(
"--uvloop", action="store_true", help="Use uvloop or not"
)
args = parser.parse_args()
if args.uvloop:
import uvloop
asyncio.set_event_loop_policy(uvloop.EventLoopPolicy())
loop = uvloop.new_event_loop()
asyncio.set_event_loop(loop)
asyncio.get_event_loop().run_until_complete(run_worker_server(args.port))
| 1,853
| 27.523077
| 77
|
py
|
grpc
|
grpc-master/src/python/grpcio_tests/tests_aio/interop/local_interop_test.py
|
# Copyright 2020 The gRPC Authors
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Conducts interop tests locally."""
import logging
import unittest
import grpc
from grpc.experimental import aio
from src.proto.grpc.testing import test_pb2_grpc
from tests.interop import resources
from tests_aio.interop import methods
from tests_aio.unit._test_base import AioTestBase
from tests_aio.unit._test_server import start_test_server
_SERVER_HOST_OVERRIDE = "foo.test.google.fr"
class InteropTestCaseMixin:
"""Unit test methods.
This class must be mixed in with unittest.TestCase and a class that defines
setUp and tearDown methods that manage a stub attribute.
"""
_stub: test_pb2_grpc.TestServiceStub
async def test_empty_unary(self):
await methods.test_interoperability(
methods.TestCase.EMPTY_UNARY, self._stub, None
)
async def test_large_unary(self):
await methods.test_interoperability(
methods.TestCase.LARGE_UNARY, self._stub, None
)
async def test_server_streaming(self):
await methods.test_interoperability(
methods.TestCase.SERVER_STREAMING, self._stub, None
)
async def test_client_streaming(self):
await methods.test_interoperability(
methods.TestCase.CLIENT_STREAMING, self._stub, None
)
async def test_ping_pong(self):
await methods.test_interoperability(
methods.TestCase.PING_PONG, self._stub, None
)
async def test_cancel_after_begin(self):
await methods.test_interoperability(
methods.TestCase.CANCEL_AFTER_BEGIN, self._stub, None
)
async def test_cancel_after_first_response(self):
await methods.test_interoperability(
methods.TestCase.CANCEL_AFTER_FIRST_RESPONSE, self._stub, None
)
async def test_timeout_on_sleeping_server(self):
await methods.test_interoperability(
methods.TestCase.TIMEOUT_ON_SLEEPING_SERVER, self._stub, None
)
async def test_empty_stream(self):
await methods.test_interoperability(
methods.TestCase.EMPTY_STREAM, self._stub, None
)
async def test_status_code_and_message(self):
await methods.test_interoperability(
methods.TestCase.STATUS_CODE_AND_MESSAGE, self._stub, None
)
async def test_unimplemented_method(self):
await methods.test_interoperability(
methods.TestCase.UNIMPLEMENTED_METHOD, self._stub, None
)
async def test_unimplemented_service(self):
await methods.test_interoperability(
methods.TestCase.UNIMPLEMENTED_SERVICE, self._stub, None
)
async def test_custom_metadata(self):
await methods.test_interoperability(
methods.TestCase.CUSTOM_METADATA, self._stub, None
)
async def test_special_status_message(self):
await methods.test_interoperability(
methods.TestCase.SPECIAL_STATUS_MESSAGE, self._stub, None
)
class InsecureLocalInteropTest(InteropTestCaseMixin, AioTestBase):
async def setUp(self):
address, self._server = await start_test_server()
self._channel = aio.insecure_channel(address)
self._stub = test_pb2_grpc.TestServiceStub(self._channel)
async def tearDown(self):
await self._channel.close()
await self._server.stop(None)
class SecureLocalInteropTest(InteropTestCaseMixin, AioTestBase):
async def setUp(self):
server_credentials = grpc.ssl_server_credentials(
[(resources.private_key(), resources.certificate_chain())]
)
channel_credentials = grpc.ssl_channel_credentials(
resources.test_root_certificates()
)
channel_options = (
(
"grpc.ssl_target_name_override",
_SERVER_HOST_OVERRIDE,
),
)
address, self._server = await start_test_server(
secure=True, server_credentials=server_credentials
)
self._channel = aio.secure_channel(
address, channel_credentials, channel_options
)
self._stub = test_pb2_grpc.TestServiceStub(self._channel)
async def tearDown(self):
await self._channel.close()
await self._server.stop(None)
if __name__ == "__main__":
logging.basicConfig(level=logging.INFO)
unittest.main(verbosity=2)
| 4,953
| 31.379085
| 79
|
py
|
grpc
|
grpc-master/src/python/grpcio_tests/tests_aio/interop/server.py
|
# Copyright 2019 The gRPC Authors
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""The gRPC interoperability test server using AsyncIO stack."""
import argparse
import asyncio
import logging
import grpc
from tests.interop import server as interop_server_lib
from tests_aio.unit import _test_server
logging.basicConfig(level=logging.DEBUG)
_LOGGER = logging.getLogger(__name__)
_LOGGER.setLevel(logging.DEBUG)
async def serve():
args = interop_server_lib.parse_interop_server_arguments()
if args.use_tls or args.use_alts:
credentials = interop_server_lib.get_server_credentials(args.use_tls)
address, server = await _test_server.start_test_server(
port=args.port, secure=True, server_credentials=credentials
)
else:
address, server = await _test_server.start_test_server(
port=args.port,
secure=False,
)
_LOGGER.info("Server serving at %s", address)
await server.wait_for_termination()
_LOGGER.info("Server stopped; exiting.")
if __name__ == "__main__":
asyncio.get_event_loop().run_until_complete(serve())
| 1,620
| 30.784314
| 77
|
py
|
grpc
|
grpc-master/src/python/grpcio_tests/tests_aio/interop/client.py
|
# Copyright 2019 The gRPC Authors
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import argparse
import asyncio
import logging
import os
import grpc
from grpc.experimental import aio
from tests.interop import client as interop_client_lib
from tests_aio.interop import methods
_LOGGER = logging.getLogger(__name__)
_LOGGER.setLevel(logging.DEBUG)
def _create_channel(args):
target = f"{args.server_host}:{args.server_port}"
if (
args.use_tls
or args.use_alts
or args.custom_credentials_type is not None
):
(
channel_credentials,
options,
) = interop_client_lib.get_secure_channel_parameters(args)
return aio.secure_channel(target, channel_credentials, options)
else:
return aio.insecure_channel(target)
def _test_case_from_arg(test_case_arg):
for test_case in methods.TestCase:
if test_case_arg == test_case.value:
return test_case
else:
raise ValueError('No test case "%s"!' % test_case_arg)
async def test_interoperability():
args = interop_client_lib.parse_interop_client_args()
channel = _create_channel(args)
stub = interop_client_lib.create_stub(channel, args)
test_case = _test_case_from_arg(args.test_case)
await methods.test_interoperability(test_case, stub, args)
if __name__ == "__main__":
logging.basicConfig(level=logging.DEBUG)
asyncio.get_event_loop().set_debug(True)
asyncio.get_event_loop().run_until_complete(test_interoperability())
| 2,027
| 29.268657
| 74
|
py
|
grpc
|
grpc-master/src/python/grpcio_tests/tests_aio/interop/methods.py
|
# Copyright 2019 The gRPC Authors
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Implementations of interoperability test methods."""
import argparse
import asyncio
import collections
import datetime
import enum
import inspect
import json
import os
import threading
import time
from typing import Any, Optional, Union
from google import auth as google_auth
from google.auth import environment_vars as google_auth_environment_vars
from google.auth.transport import grpc as google_auth_transport_grpc
from google.auth.transport import requests as google_auth_transport_requests
import grpc
from grpc.experimental import aio
from src.proto.grpc.testing import empty_pb2
from src.proto.grpc.testing import messages_pb2
from src.proto.grpc.testing import test_pb2_grpc
_INITIAL_METADATA_KEY = "x-grpc-test-echo-initial"
_TRAILING_METADATA_KEY = "x-grpc-test-echo-trailing-bin"
async def _expect_status_code(
call: aio.Call, expected_code: grpc.StatusCode
) -> None:
code = await call.code()
if code != expected_code:
raise ValueError(
"expected code %s, got %s" % (expected_code, await call.code())
)
async def _expect_status_details(call: aio.Call, expected_details: str) -> None:
details = await call.details()
if details != expected_details:
raise ValueError(
"expected message %s, got %s"
% (expected_details, await call.details())
)
async def _validate_status_code_and_details(
call: aio.Call, expected_code: grpc.StatusCode, expected_details: str
) -> None:
await _expect_status_code(call, expected_code)
await _expect_status_details(call, expected_details)
def _validate_payload_type_and_length(
response: Union[
messages_pb2.SimpleResponse, messages_pb2.StreamingOutputCallResponse
],
expected_type: Any,
expected_length: int,
) -> None:
if response.payload.type is not expected_type:
raise ValueError(
"expected payload type %s, got %s"
% (expected_type, type(response.payload.type))
)
elif len(response.payload.body) != expected_length:
raise ValueError(
"expected payload body size %d, got %d"
% (expected_length, len(response.payload.body))
)
async def _large_unary_common_behavior(
stub: test_pb2_grpc.TestServiceStub,
fill_username: bool,
fill_oauth_scope: bool,
call_credentials: Optional[grpc.CallCredentials],
) -> messages_pb2.SimpleResponse:
size = 314159
request = messages_pb2.SimpleRequest(
response_type=messages_pb2.COMPRESSABLE,
response_size=size,
payload=messages_pb2.Payload(body=b"\x00" * 271828),
fill_username=fill_username,
fill_oauth_scope=fill_oauth_scope,
)
response = await stub.UnaryCall(request, credentials=call_credentials)
_validate_payload_type_and_length(response, messages_pb2.COMPRESSABLE, size)
return response
async def _empty_unary(stub: test_pb2_grpc.TestServiceStub) -> None:
response = await stub.EmptyCall(empty_pb2.Empty())
if not isinstance(response, empty_pb2.Empty):
raise TypeError(
'response is of type "%s", not empty_pb2.Empty!' % type(response)
)
async def _large_unary(stub: test_pb2_grpc.TestServiceStub) -> None:
await _large_unary_common_behavior(stub, False, False, None)
async def _client_streaming(stub: test_pb2_grpc.TestServiceStub) -> None:
payload_body_sizes = (
27182,
8,
1828,
45904,
)
async def request_gen():
for size in payload_body_sizes:
yield messages_pb2.StreamingInputCallRequest(
payload=messages_pb2.Payload(body=b"\x00" * size)
)
response = await stub.StreamingInputCall(request_gen())
if response.aggregated_payload_size != sum(payload_body_sizes):
raise ValueError(
"incorrect size %d!" % response.aggregated_payload_size
)
async def _server_streaming(stub: test_pb2_grpc.TestServiceStub) -> None:
sizes = (
31415,
9,
2653,
58979,
)
request = messages_pb2.StreamingOutputCallRequest(
response_type=messages_pb2.COMPRESSABLE,
response_parameters=(
messages_pb2.ResponseParameters(size=sizes[0]),
messages_pb2.ResponseParameters(size=sizes[1]),
messages_pb2.ResponseParameters(size=sizes[2]),
messages_pb2.ResponseParameters(size=sizes[3]),
),
)
call = stub.StreamingOutputCall(request)
for size in sizes:
response = await call.read()
_validate_payload_type_and_length(
response, messages_pb2.COMPRESSABLE, size
)
async def _ping_pong(stub: test_pb2_grpc.TestServiceStub) -> None:
request_response_sizes = (
31415,
9,
2653,
58979,
)
request_payload_sizes = (
27182,
8,
1828,
45904,
)
call = stub.FullDuplexCall()
for response_size, payload_size in zip(
request_response_sizes, request_payload_sizes
):
request = messages_pb2.StreamingOutputCallRequest(
response_type=messages_pb2.COMPRESSABLE,
response_parameters=(
messages_pb2.ResponseParameters(size=response_size),
),
payload=messages_pb2.Payload(body=b"\x00" * payload_size),
)
await call.write(request)
response = await call.read()
_validate_payload_type_and_length(
response, messages_pb2.COMPRESSABLE, response_size
)
await call.done_writing()
await _validate_status_code_and_details(call, grpc.StatusCode.OK, "")
async def _cancel_after_begin(stub: test_pb2_grpc.TestServiceStub):
call = stub.StreamingInputCall()
call.cancel()
if not call.cancelled():
raise ValueError("expected cancelled method to return True")
code = await call.code()
if code is not grpc.StatusCode.CANCELLED:
raise ValueError("expected status code CANCELLED")
async def _cancel_after_first_response(stub: test_pb2_grpc.TestServiceStub):
request_response_sizes = (
31415,
9,
2653,
58979,
)
request_payload_sizes = (
27182,
8,
1828,
45904,
)
call = stub.FullDuplexCall()
response_size = request_response_sizes[0]
payload_size = request_payload_sizes[0]
request = messages_pb2.StreamingOutputCallRequest(
response_type=messages_pb2.COMPRESSABLE,
response_parameters=(
messages_pb2.ResponseParameters(size=response_size),
),
payload=messages_pb2.Payload(body=b"\x00" * payload_size),
)
await call.write(request)
await call.read()
call.cancel()
try:
await call.read()
except asyncio.CancelledError:
assert await call.code() is grpc.StatusCode.CANCELLED
else:
raise ValueError("expected call to be cancelled")
async def _timeout_on_sleeping_server(stub: test_pb2_grpc.TestServiceStub):
request_payload_size = 27182
time_limit = datetime.timedelta(seconds=1)
call = stub.FullDuplexCall(timeout=time_limit.total_seconds())
request = messages_pb2.StreamingOutputCallRequest(
response_type=messages_pb2.COMPRESSABLE,
payload=messages_pb2.Payload(body=b"\x00" * request_payload_size),
response_parameters=(
messages_pb2.ResponseParameters(
interval_us=int(time_limit.total_seconds() * 2 * 10**6)
),
),
)
await call.write(request)
await call.done_writing()
try:
await call.read()
except aio.AioRpcError as rpc_error:
if rpc_error.code() is not grpc.StatusCode.DEADLINE_EXCEEDED:
raise
else:
raise ValueError("expected call to exceed deadline")
async def _empty_stream(stub: test_pb2_grpc.TestServiceStub):
call = stub.FullDuplexCall()
await call.done_writing()
assert await call.read() == aio.EOF
async def _status_code_and_message(stub: test_pb2_grpc.TestServiceStub):
details = "test status message"
status = grpc.StatusCode.UNKNOWN # code = 2
# Test with a UnaryCall
request = messages_pb2.SimpleRequest(
response_type=messages_pb2.COMPRESSABLE,
response_size=1,
payload=messages_pb2.Payload(body=b"\x00"),
response_status=messages_pb2.EchoStatus(
code=status.value[0], message=details
),
)
call = stub.UnaryCall(request)
await _validate_status_code_and_details(call, status, details)
# Test with a FullDuplexCall
call = stub.FullDuplexCall()
request = messages_pb2.StreamingOutputCallRequest(
response_type=messages_pb2.COMPRESSABLE,
response_parameters=(messages_pb2.ResponseParameters(size=1),),
payload=messages_pb2.Payload(body=b"\x00"),
response_status=messages_pb2.EchoStatus(
code=status.value[0], message=details
),
)
await call.write(request) # sends the initial request.
await call.done_writing()
try:
await call.read()
except aio.AioRpcError as rpc_error:
assert rpc_error.code() == status
await _validate_status_code_and_details(call, status, details)
async def _unimplemented_method(stub: test_pb2_grpc.TestServiceStub):
call = stub.UnimplementedCall(empty_pb2.Empty())
await _expect_status_code(call, grpc.StatusCode.UNIMPLEMENTED)
async def _unimplemented_service(stub: test_pb2_grpc.UnimplementedServiceStub):
call = stub.UnimplementedCall(empty_pb2.Empty())
await _expect_status_code(call, grpc.StatusCode.UNIMPLEMENTED)
async def _custom_metadata(stub: test_pb2_grpc.TestServiceStub):
initial_metadata_value = "test_initial_metadata_value"
trailing_metadata_value = b"\x0a\x0b\x0a\x0b\x0a\x0b"
metadata = aio.Metadata(
(_INITIAL_METADATA_KEY, initial_metadata_value),
(_TRAILING_METADATA_KEY, trailing_metadata_value),
)
async def _validate_metadata(call):
initial_metadata = await call.initial_metadata()
if initial_metadata[_INITIAL_METADATA_KEY] != initial_metadata_value:
raise ValueError(
"expected initial metadata %s, got %s"
% (
initial_metadata_value,
initial_metadata[_INITIAL_METADATA_KEY],
)
)
trailing_metadata = await call.trailing_metadata()
if trailing_metadata[_TRAILING_METADATA_KEY] != trailing_metadata_value:
raise ValueError(
"expected trailing metadata %s, got %s"
% (
trailing_metadata_value,
trailing_metadata[_TRAILING_METADATA_KEY],
)
)
# Testing with UnaryCall
request = messages_pb2.SimpleRequest(
response_type=messages_pb2.COMPRESSABLE,
response_size=1,
payload=messages_pb2.Payload(body=b"\x00"),
)
call = stub.UnaryCall(request, metadata=metadata)
await _validate_metadata(call)
# Testing with FullDuplexCall
call = stub.FullDuplexCall(metadata=metadata)
request = messages_pb2.StreamingOutputCallRequest(
response_type=messages_pb2.COMPRESSABLE,
response_parameters=(messages_pb2.ResponseParameters(size=1),),
)
await call.write(request)
await call.read()
await call.done_writing()
await _validate_metadata(call)
async def _compute_engine_creds(
stub: test_pb2_grpc.TestServiceStub, args: argparse.Namespace
):
response = await _large_unary_common_behavior(stub, True, True, None)
if args.default_service_account != response.username:
raise ValueError(
"expected username %s, got %s"
% (args.default_service_account, response.username)
)
async def _oauth2_auth_token(
stub: test_pb2_grpc.TestServiceStub, args: argparse.Namespace
):
json_key_filename = os.environ[google_auth_environment_vars.CREDENTIALS]
wanted_email = json.load(open(json_key_filename, "r"))["client_email"]
response = await _large_unary_common_behavior(stub, True, True, None)
if wanted_email != response.username:
raise ValueError(
"expected username %s, got %s" % (wanted_email, response.username)
)
if args.oauth_scope.find(response.oauth_scope) == -1:
raise ValueError(
'expected to find oauth scope "{}" in received "{}"'.format(
response.oauth_scope, args.oauth_scope
)
)
async def _jwt_token_creds(stub: test_pb2_grpc.TestServiceStub):
json_key_filename = os.environ[google_auth_environment_vars.CREDENTIALS]
wanted_email = json.load(open(json_key_filename, "r"))["client_email"]
response = await _large_unary_common_behavior(stub, True, False, None)
if wanted_email != response.username:
raise ValueError(
"expected username %s, got %s" % (wanted_email, response.username)
)
async def _per_rpc_creds(
stub: test_pb2_grpc.TestServiceStub, args: argparse.Namespace
):
json_key_filename = os.environ[google_auth_environment_vars.CREDENTIALS]
wanted_email = json.load(open(json_key_filename, "r"))["client_email"]
google_credentials, unused_project_id = google_auth.default(
scopes=[args.oauth_scope]
)
call_credentials = grpc.metadata_call_credentials(
google_auth_transport_grpc.AuthMetadataPlugin(
credentials=google_credentials,
request=google_auth_transport_requests.Request(),
)
)
response = await _large_unary_common_behavior(
stub, True, False, call_credentials
)
if wanted_email != response.username:
raise ValueError(
"expected username %s, got %s" % (wanted_email, response.username)
)
async def _special_status_message(stub: test_pb2_grpc.TestServiceStub):
details = (
b"\t\ntest with whitespace\r\nand Unicode BMP \xe2\x98\xba and non-BMP"
b" \xf0\x9f\x98\x88\t\n".decode("utf-8")
)
status = grpc.StatusCode.UNKNOWN # code = 2
# Test with a UnaryCall
request = messages_pb2.SimpleRequest(
response_type=messages_pb2.COMPRESSABLE,
response_size=1,
payload=messages_pb2.Payload(body=b"\x00"),
response_status=messages_pb2.EchoStatus(
code=status.value[0], message=details
),
)
call = stub.UnaryCall(request)
await _validate_status_code_and_details(call, status, details)
@enum.unique
class TestCase(enum.Enum):
EMPTY_UNARY = "empty_unary"
LARGE_UNARY = "large_unary"
SERVER_STREAMING = "server_streaming"
CLIENT_STREAMING = "client_streaming"
PING_PONG = "ping_pong"
CANCEL_AFTER_BEGIN = "cancel_after_begin"
CANCEL_AFTER_FIRST_RESPONSE = "cancel_after_first_response"
TIMEOUT_ON_SLEEPING_SERVER = "timeout_on_sleeping_server"
EMPTY_STREAM = "empty_stream"
STATUS_CODE_AND_MESSAGE = "status_code_and_message"
UNIMPLEMENTED_METHOD = "unimplemented_method"
UNIMPLEMENTED_SERVICE = "unimplemented_service"
CUSTOM_METADATA = "custom_metadata"
COMPUTE_ENGINE_CREDS = "compute_engine_creds"
OAUTH2_AUTH_TOKEN = "oauth2_auth_token"
JWT_TOKEN_CREDS = "jwt_token_creds"
PER_RPC_CREDS = "per_rpc_creds"
SPECIAL_STATUS_MESSAGE = "special_status_message"
_TEST_CASE_IMPLEMENTATION_MAPPING = {
TestCase.EMPTY_UNARY: _empty_unary,
TestCase.LARGE_UNARY: _large_unary,
TestCase.SERVER_STREAMING: _server_streaming,
TestCase.CLIENT_STREAMING: _client_streaming,
TestCase.PING_PONG: _ping_pong,
TestCase.CANCEL_AFTER_BEGIN: _cancel_after_begin,
TestCase.CANCEL_AFTER_FIRST_RESPONSE: _cancel_after_first_response,
TestCase.TIMEOUT_ON_SLEEPING_SERVER: _timeout_on_sleeping_server,
TestCase.EMPTY_STREAM: _empty_stream,
TestCase.STATUS_CODE_AND_MESSAGE: _status_code_and_message,
TestCase.UNIMPLEMENTED_METHOD: _unimplemented_method,
TestCase.UNIMPLEMENTED_SERVICE: _unimplemented_service,
TestCase.CUSTOM_METADATA: _custom_metadata,
TestCase.COMPUTE_ENGINE_CREDS: _compute_engine_creds,
TestCase.OAUTH2_AUTH_TOKEN: _oauth2_auth_token,
TestCase.JWT_TOKEN_CREDS: _jwt_token_creds,
TestCase.PER_RPC_CREDS: _per_rpc_creds,
TestCase.SPECIAL_STATUS_MESSAGE: _special_status_message,
}
async def test_interoperability(
case: TestCase,
stub: test_pb2_grpc.TestServiceStub,
args: Optional[argparse.Namespace] = None,
) -> None:
method = _TEST_CASE_IMPLEMENTATION_MAPPING.get(case)
if method is None:
raise NotImplementedError(f'Test case "{case}" not implemented!')
else:
num_params = len(inspect.signature(method).parameters)
if num_params == 1:
await method(stub)
elif num_params == 2:
if args is not None:
await method(stub, args)
else:
raise ValueError(f"Failed to run case [{case}]: args is None")
else:
raise ValueError(f"Invalid number of parameters [{num_params}]")
| 17,709
| 32.992322
| 80
|
py
|
grpc
|
grpc-master/src/python/grpcio_tests/tests_aio/interop/__init__.py
|
# Copyright 2019 The gRPC Authors
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
| 580
| 40.5
| 74
|
py
|
grpc
|
grpc-master/src/python/grpcio_admin/setup.py
|
# Copyright 2021 The gRPC Authors
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Setup module for admin interface in gRPC Python."""
import os
import sys
import setuptools
_PACKAGE_PATH = os.path.realpath(os.path.dirname(__file__))
_README_PATH = os.path.join(_PACKAGE_PATH, "README.rst")
# Ensure we're in the proper directory whether or not we're being used by pip.
os.chdir(os.path.dirname(os.path.abspath(__file__)))
# Break import-style to ensure we can actually find our local modules.
import grpc_version
CLASSIFIERS = [
"Development Status :: 5 - Production/Stable",
"Programming Language :: Python",
"Programming Language :: Python :: 3",
"License :: OSI Approved :: Apache Software License",
]
PACKAGE_DIRECTORIES = {
"": ".",
}
INSTALL_REQUIRES = (
"grpcio-channelz>={version}".format(version=grpc_version.VERSION),
"grpcio-csds>={version}".format(version=grpc_version.VERSION),
)
SETUP_REQUIRES = INSTALL_REQUIRES
setuptools.setup(
name="grpcio-admin",
version=grpc_version.VERSION,
license="Apache License 2.0",
description="a collection of admin services",
long_description=open(_README_PATH, "r").read(),
author="The gRPC Authors",
author_email="grpc-io@googlegroups.com",
classifiers=CLASSIFIERS,
url="https://grpc.io",
package_dir=PACKAGE_DIRECTORIES,
packages=setuptools.find_packages("."),
python_requires=">=3.6",
install_requires=INSTALL_REQUIRES,
setup_requires=SETUP_REQUIRES,
)
| 1,997
| 30.714286
| 78
|
py
|
grpc
|
grpc-master/src/python/grpcio_admin/grpc_version.py
|
# Copyright 2021 The gRPC Authors
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# AUTO-GENERATED FROM `$REPO_ROOT/templates/src/python/grpcio_admin/grpc_version.py.template`!!!
VERSION = '1.57.0.dev0'
| 703
| 38.111111
| 96
|
py
|
grpc
|
grpc-master/src/python/grpcio_admin/grpc_admin/__init__.py
|
# Copyright 2021 The gRPC Authors
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""gRPC Python's Admin interface."""
from grpc_channelz.v1 import channelz
import grpc_csds
def add_admin_servicers(server):
"""Register admin servicers to a server.
gRPC provides some predefined admin services to make debugging easier by
exposing gRPC's internal states. Each existing admin service is packaged as
a separate library, and the documentation of the predefined admin services
is usually scattered. It can be time consuming to get the dependency
management, module initialization, and library import right for each one of
them.
This API provides a convenient way to create a gRPC server to expose admin
services. With this, any new admin services that you may add in the future
are automatically available via the admin interface just by upgrading your
gRPC version.
Args:
server: A gRPC server to which all admin services will be added.
"""
channelz.add_channelz_servicer(server)
grpc_csds.add_csds_servicer(server)
__all__ = ["add_admin_servicers"]
| 1,620
| 36.697674
| 79
|
py
|
grpc
|
grpc-master/src/python/grpcio_status/setup.py
|
# Copyright 2018 The gRPC Authors
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Setup module for the GRPC Python package's status mapping."""
import os
import setuptools
_PACKAGE_PATH = os.path.realpath(os.path.dirname(__file__))
_README_PATH = os.path.join(_PACKAGE_PATH, "README.rst")
# Ensure we're in the proper directory whether or not we're being used by pip.
os.chdir(os.path.dirname(os.path.abspath(__file__)))
# Break import-style to ensure we can actually find our local modules.
import grpc_version
class _NoOpCommand(setuptools.Command):
"""No-op command."""
description = ""
user_options = []
def initialize_options(self):
pass
def finalize_options(self):
pass
def run(self):
pass
CLASSIFIERS = [
"Development Status :: 5 - Production/Stable",
"Programming Language :: Python",
"Programming Language :: Python :: 3",
"Programming Language :: Python :: 3.4",
"Programming Language :: Python :: 3.5",
"Programming Language :: Python :: 3.6",
"Programming Language :: Python :: 3.7",
"Programming Language :: Python :: 3.8",
"Programming Language :: Python :: 3.9",
"Programming Language :: Python :: 3.10",
"Programming Language :: Python :: 3.11",
"License :: OSI Approved :: Apache Software License",
]
PACKAGE_DIRECTORIES = {
"": ".",
}
INSTALL_REQUIRES = (
"protobuf>=4.21.6",
"grpcio>={version}".format(version=grpc_version.VERSION),
"googleapis-common-protos>=1.5.5",
)
try:
import status_commands as _status_commands
# we are in the build environment, otherwise the above import fails
COMMAND_CLASS = {
# Run preprocess from the repository *before* doing any packaging!
"preprocess": _status_commands.Preprocess,
"build_package_protos": _NoOpCommand,
}
except ImportError:
COMMAND_CLASS = {
# wire up commands to no-op not to break the external dependencies
"preprocess": _NoOpCommand,
"build_package_protos": _NoOpCommand,
}
setuptools.setup(
name="grpcio-status",
version=grpc_version.VERSION,
description="Status proto mapping for gRPC",
long_description=open(_README_PATH, "r").read(),
author="The gRPC Authors",
author_email="grpc-io@googlegroups.com",
url="https://grpc.io",
license="Apache License 2.0",
classifiers=CLASSIFIERS,
package_dir=PACKAGE_DIRECTORIES,
packages=setuptools.find_packages("."),
python_requires=">=3.6",
install_requires=INSTALL_REQUIRES,
cmdclass=COMMAND_CLASS,
)
| 3,074
| 28.854369
| 78
|
py
|
grpc
|
grpc-master/src/python/grpcio_status/grpc_version.py
|
# Copyright 2018 The gRPC Authors
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# AUTO-GENERATED FROM `$REPO_ROOT/templates/src/python/grpcio_status/grpc_version.py.template`!!!
VERSION = '1.57.0.dev0'
| 704
| 38.166667
| 97
|
py
|
grpc
|
grpc-master/src/python/grpcio_status/status_commands.py
|
# Copyright 2018 The gRPC Authors
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Provides distutils command classes for the GRPC Python setup process."""
import os
import shutil
import setuptools
ROOT_DIR = os.path.abspath(os.path.dirname(os.path.abspath(__file__)))
STATUS_PROTO = os.path.join(
ROOT_DIR, "../../../third_party/googleapis/google/rpc/status.proto"
)
PACKAGE_STATUS_PROTO_PATH = "grpc_status/google/rpc"
LICENSE = os.path.join(ROOT_DIR, "../../../LICENSE")
class Preprocess(setuptools.Command):
"""Command to copy LICENSE from root directory."""
description = ""
user_options = []
def initialize_options(self):
pass
def finalize_options(self):
pass
def run(self):
if os.path.isfile(STATUS_PROTO):
if not os.path.isdir(PACKAGE_STATUS_PROTO_PATH):
os.makedirs(PACKAGE_STATUS_PROTO_PATH)
shutil.copyfile(
STATUS_PROTO,
os.path.join(
ROOT_DIR, PACKAGE_STATUS_PROTO_PATH, "status.proto"
),
)
if os.path.isfile(LICENSE):
shutil.copyfile(LICENSE, os.path.join(ROOT_DIR, "LICENSE"))
| 1,693
| 30.962264
| 75
|
py
|
grpc
|
grpc-master/src/python/grpcio_status/grpc_status/_async.py
|
# Copyright 2020 The gRPC Authors
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Reference implementation for status mapping in gRPC Python."""
from google.rpc import status_pb2
from grpc.experimental import aio
from ._common import GRPC_DETAILS_METADATA_KEY
from ._common import code_to_grpc_status_code
async def from_call(call: aio.Call):
"""Returns a google.rpc.status.Status message from a given grpc.aio.Call.
This is an EXPERIMENTAL API.
Args:
call: An grpc.aio.Call instance.
Returns:
A google.rpc.status.Status message representing the status of the RPC.
"""
code = await call.code()
details = await call.details()
trailing_metadata = await call.trailing_metadata()
if trailing_metadata is None:
return None
for key, value in trailing_metadata:
if key == GRPC_DETAILS_METADATA_KEY:
rich_status = status_pb2.Status.FromString(value)
if code.value[0] != rich_status.code:
raise ValueError(
"Code in Status proto (%s) doesn't match status code (%s)"
% (code_to_grpc_status_code(rich_status.code), code)
)
if details != rich_status.message:
raise ValueError(
"Message in Status proto (%s) doesn't match status details"
" (%s)" % (rich_status.message, details)
)
return rich_status
return None
__all__ = [
"from_call",
]
| 2,004
| 32.983051
| 79
|
py
|
grpc
|
grpc-master/src/python/grpcio_status/grpc_status/__init__.py
|
# Copyright 2018 The gRPC Authors
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
| 580
| 40.5
| 74
|
py
|
grpc
|
grpc-master/src/python/grpcio_status/grpc_status/rpc_status.py
|
# Copyright 2018 The gRPC Authors
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Reference implementation for status mapping in gRPC Python."""
import collections
import sys
from google.rpc import status_pb2
import grpc
from ._common import GRPC_DETAILS_METADATA_KEY
from ._common import code_to_grpc_status_code
class _Status(
collections.namedtuple("_Status", ("code", "details", "trailing_metadata")),
grpc.Status,
):
pass
def from_call(call):
"""Returns a google.rpc.status.Status message corresponding to a given grpc.Call.
This is an EXPERIMENTAL API.
Args:
call: A grpc.Call instance.
Returns:
A google.rpc.status.Status message representing the status of the RPC.
Raises:
ValueError: If the gRPC call's code or details are inconsistent with the
status code and message inside of the google.rpc.status.Status.
"""
if call.trailing_metadata() is None:
return None
for key, value in call.trailing_metadata():
if key == GRPC_DETAILS_METADATA_KEY:
rich_status = status_pb2.Status.FromString(value)
if call.code().value[0] != rich_status.code:
raise ValueError(
"Code in Status proto (%s) doesn't match status code (%s)"
% (code_to_grpc_status_code(rich_status.code), call.code())
)
if call.details() != rich_status.message:
raise ValueError(
"Message in Status proto (%s) doesn't match status details"
" (%s)" % (rich_status.message, call.details())
)
return rich_status
return None
def to_status(status):
"""Convert a google.rpc.status.Status message to grpc.Status.
This is an EXPERIMENTAL API.
Args:
status: a google.rpc.status.Status message representing the non-OK status
to terminate the RPC with and communicate it to the client.
Returns:
A grpc.Status instance representing the input google.rpc.status.Status message.
"""
return _Status(
code=code_to_grpc_status_code(status.code),
details=status.message,
trailing_metadata=(
(GRPC_DETAILS_METADATA_KEY, status.SerializeToString()),
),
)
__all__ = [
"from_call",
"to_status",
]
if sys.version_info[0] >= 3 and sys.version_info[1] >= 6:
from . import _async as aio # pylint: disable=unused-import
__all__.append("aio")
| 2,992
| 29.85567
| 85
|
py
|
grpc
|
grpc-master/src/python/grpcio_status/grpc_status/_common.py
|
# Copyright 2020 The gRPC Authors
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Reference implementation for status mapping in gRPC Python."""
import grpc
_CODE_TO_GRPC_CODE_MAPPING = {x.value[0]: x for x in grpc.StatusCode}
GRPC_DETAILS_METADATA_KEY = "grpc-status-details-bin"
def code_to_grpc_status_code(code):
try:
return _CODE_TO_GRPC_CODE_MAPPING[code]
except KeyError:
raise ValueError("Invalid status code %s" % code)
| 959
| 33.285714
| 74
|
py
|
grpc
|
grpc-master/src/python/grpcio_health_checking/setup.py
|
# Copyright 2015 gRPC authors.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Setup module for the GRPC Python package's optional health checking."""
import os
import setuptools
_PACKAGE_PATH = os.path.realpath(os.path.dirname(__file__))
_README_PATH = os.path.join(_PACKAGE_PATH, "README.rst")
# Ensure we're in the proper directory whether or not we're being used by pip.
os.chdir(os.path.dirname(os.path.abspath(__file__)))
# Break import-style to ensure we can actually find our local modules.
import grpc_version
class _NoOpCommand(setuptools.Command):
"""No-op command."""
description = ""
user_options = []
def initialize_options(self):
pass
def finalize_options(self):
pass
def run(self):
pass
CLASSIFIERS = [
"Development Status :: 5 - Production/Stable",
"Programming Language :: Python",
"Programming Language :: Python :: 3",
"Programming Language :: Python :: 3.4",
"Programming Language :: Python :: 3.5",
"Programming Language :: Python :: 3.6",
"Programming Language :: Python :: 3.7",
"Programming Language :: Python :: 3.8",
"Programming Language :: Python :: 3.9",
"Programming Language :: Python :: 3.10",
"Programming Language :: Python :: 3.11",
"License :: OSI Approved :: Apache Software License",
]
PACKAGE_DIRECTORIES = {
"": ".",
}
INSTALL_REQUIRES = (
"protobuf>=4.21.6",
"grpcio>={version}".format(version=grpc_version.VERSION),
)
try:
import health_commands as _health_commands
# we are in the build environment, otherwise the above import fails
SETUP_REQUIRES = (
"grpcio-tools=={version}".format(version=grpc_version.VERSION),
)
COMMAND_CLASS = {
# Run preprocess from the repository *before* doing any packaging!
"preprocess": _health_commands.Preprocess,
"build_package_protos": _health_commands.BuildPackageProtos,
}
except ImportError:
SETUP_REQUIRES = ()
COMMAND_CLASS = {
# wire up commands to no-op not to break the external dependencies
"preprocess": _NoOpCommand,
"build_package_protos": _NoOpCommand,
}
setuptools.setup(
name="grpcio-health-checking",
version=grpc_version.VERSION,
description="Standard Health Checking Service for gRPC",
long_description=open(_README_PATH, "r").read(),
author="The gRPC Authors",
author_email="grpc-io@googlegroups.com",
url="https://grpc.io",
license="Apache License 2.0",
classifiers=CLASSIFIERS,
package_dir=PACKAGE_DIRECTORIES,
packages=setuptools.find_packages("."),
python_requires=">=3.6",
install_requires=INSTALL_REQUIRES,
setup_requires=SETUP_REQUIRES,
cmdclass=COMMAND_CLASS,
)
| 3,246
| 29.345794
| 78
|
py
|
grpc
|
grpc-master/src/python/grpcio_health_checking/grpc_version.py
|
# Copyright 2016 gRPC authors.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# AUTO-GENERATED FROM `$REPO_ROOT/templates/src/python/grpcio_health_checking/grpc_version.py.template`!!!
VERSION = '1.57.0.dev0'
| 710
| 38.5
| 106
|
py
|
grpc
|
grpc-master/src/python/grpcio_health_checking/health_commands.py
|
# Copyright 2015 gRPC authors.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Provides distutils command classes for the GRPC Python setup process."""
import os
import shutil
import setuptools
ROOT_DIR = os.path.abspath(os.path.dirname(os.path.abspath(__file__)))
HEALTH_PROTO = os.path.join(ROOT_DIR, "../../proto/grpc/health/v1/health.proto")
LICENSE = os.path.join(ROOT_DIR, "../../../LICENSE")
class Preprocess(setuptools.Command):
"""Command to copy proto modules from grpc/src/proto and LICENSE from
the root directory"""
description = ""
user_options = []
def initialize_options(self):
pass
def finalize_options(self):
pass
def run(self):
if os.path.isfile(HEALTH_PROTO):
shutil.copyfile(
HEALTH_PROTO,
os.path.join(ROOT_DIR, "grpc_health/v1/health.proto"),
)
if os.path.isfile(LICENSE):
shutil.copyfile(LICENSE, os.path.join(ROOT_DIR, "LICENSE"))
class BuildPackageProtos(setuptools.Command):
"""Command to generate project *_pb2.py modules from proto files."""
description = "build grpc protobuf modules"
user_options = []
def initialize_options(self):
pass
def finalize_options(self):
pass
def run(self):
# due to limitations of the proto generator, we require that only *one*
# directory is provided as an 'include' directory. We assume it's the '' key
# to `self.distribution.package_dir` (and get a key error if it's not
# there).
from grpc_tools import command
command.build_package_protos(self.distribution.package_dir[""])
| 2,173
| 30.507246
| 84
|
py
|
grpc
|
grpc-master/src/python/grpcio_health_checking/grpc_health/__init__.py
|
# Copyright 2015 gRPC authors.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
| 577
| 40.285714
| 74
|
py
|
grpc
|
grpc-master/src/python/grpcio_health_checking/grpc_health/v1/_async.py
|
# Copyright 2020 The gRPC Authors
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Reference implementation for health checking in gRPC Python."""
import asyncio
import collections
from typing import MutableMapping
import grpc
from grpc_health.v1 import health_pb2 as _health_pb2
from grpc_health.v1 import health_pb2_grpc as _health_pb2_grpc
class HealthServicer(_health_pb2_grpc.HealthServicer):
"""An AsyncIO implementation of health checking servicer."""
_server_status: MutableMapping[
str, "_health_pb2.HealthCheckResponse.ServingStatus"
]
_server_watchers: MutableMapping[str, asyncio.Condition]
_gracefully_shutting_down: bool
def __init__(self) -> None:
self._server_status = {"": _health_pb2.HealthCheckResponse.SERVING}
self._server_watchers = collections.defaultdict(asyncio.Condition)
self._gracefully_shutting_down = False
async def Check(
self, request: _health_pb2.HealthCheckRequest, context
) -> None:
status = self._server_status.get(request.service)
if status is None:
await context.abort(grpc.StatusCode.NOT_FOUND)
else:
return _health_pb2.HealthCheckResponse(status=status)
async def Watch(
self, request: _health_pb2.HealthCheckRequest, context
) -> None:
condition = self._server_watchers[request.service]
last_status = None
try:
async with condition:
while True:
status = self._server_status.get(
request.service,
_health_pb2.HealthCheckResponse.SERVICE_UNKNOWN,
)
# NOTE(lidiz) If the observed status is the same, it means
# there are missing intermediate statuses. It's considered
# acceptable since peer only interested in eventual status.
if status != last_status:
# Responds with current health state
await context.write(
_health_pb2.HealthCheckResponse(status=status)
)
# Records the last sent status
last_status = status
# Polling on health state changes
await condition.wait()
finally:
if request.service in self._server_watchers:
del self._server_watchers[request.service]
async def _set(
self,
service: str,
status: _health_pb2.HealthCheckResponse.ServingStatus,
) -> None:
if service in self._server_watchers:
condition = self._server_watchers.get(service)
async with condition:
self._server_status[service] = status
condition.notify_all()
else:
self._server_status[service] = status
async def set(
self,
service: str,
status: _health_pb2.HealthCheckResponse.ServingStatus,
) -> None:
"""Sets the status of a service.
Args:
service: string, the name of the service.
status: HealthCheckResponse.status enum value indicating the status of
the service
"""
if self._gracefully_shutting_down:
return
else:
await self._set(service, status)
async def enter_graceful_shutdown(self) -> None:
"""Permanently sets the status of all services to NOT_SERVING.
This should be invoked when the server is entering a graceful shutdown
period. After this method is invoked, future attempts to set the status
of a service will be ignored.
"""
if self._gracefully_shutting_down:
return
else:
self._gracefully_shutting_down = True
for service in self._server_status:
await self._set(
service, _health_pb2.HealthCheckResponse.NOT_SERVING
)
| 4,534
| 35.28
| 80
|
py
|
grpc
|
grpc-master/src/python/grpcio_health_checking/grpc_health/v1/__init__.py
|
# Copyright 2015 gRPC authors.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
| 577
| 40.285714
| 74
|
py
|
grpc
|
grpc-master/src/python/grpcio_health_checking/grpc_health/v1/health.py
|
# Copyright 2015 gRPC authors.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Reference implementation for health checking in gRPC Python."""
import collections
import sys
import threading
import grpc
from grpc_health.v1 import health_pb2 as _health_pb2
from grpc_health.v1 import health_pb2_grpc as _health_pb2_grpc
if sys.version_info[0] >= 3 and sys.version_info[1] >= 6:
# Exposes AsyncHealthServicer as public API.
from . import _async as aio # pylint: disable=unused-import
# The service name of the health checking servicer.
SERVICE_NAME = _health_pb2.DESCRIPTOR.services_by_name["Health"].full_name
# The entry of overall health for the entire server.
OVERALL_HEALTH = ""
class _Watcher:
def __init__(self):
self._condition = threading.Condition()
self._responses = collections.deque()
self._open = True
def __iter__(self):
return self
def _next(self):
with self._condition:
while not self._responses and self._open:
self._condition.wait()
if self._responses:
return self._responses.popleft()
else:
raise StopIteration()
def next(self):
return self._next()
def __next__(self):
return self._next()
def add(self, response):
with self._condition:
self._responses.append(response)
self._condition.notify()
def close(self):
with self._condition:
self._open = False
self._condition.notify()
def _watcher_to_send_response_callback_adapter(watcher):
def send_response_callback(response):
if response is None:
watcher.close()
else:
watcher.add(response)
return send_response_callback
class HealthServicer(_health_pb2_grpc.HealthServicer):
"""Servicer handling RPCs for service statuses."""
def __init__(
self, experimental_non_blocking=True, experimental_thread_pool=None
):
self._lock = threading.RLock()
self._server_status = {"": _health_pb2.HealthCheckResponse.SERVING}
self._send_response_callbacks = {}
self.Watch.__func__.experimental_non_blocking = (
experimental_non_blocking
)
self.Watch.__func__.experimental_thread_pool = experimental_thread_pool
self._gracefully_shutting_down = False
def _on_close_callback(self, send_response_callback, service):
def callback():
with self._lock:
self._send_response_callbacks[service].remove(
send_response_callback
)
send_response_callback(None)
return callback
def Check(self, request, context):
with self._lock:
status = self._server_status.get(request.service)
if status is None:
context.set_code(grpc.StatusCode.NOT_FOUND)
return _health_pb2.HealthCheckResponse()
else:
return _health_pb2.HealthCheckResponse(status=status)
# pylint: disable=arguments-differ
def Watch(self, request, context, send_response_callback=None):
blocking_watcher = None
if send_response_callback is None:
# The server does not support the experimental_non_blocking
# parameter. For backwards compatibility, return a blocking response
# generator.
blocking_watcher = _Watcher()
send_response_callback = _watcher_to_send_response_callback_adapter(
blocking_watcher
)
service = request.service
with self._lock:
status = self._server_status.get(service)
if status is None:
status = (
_health_pb2.HealthCheckResponse.SERVICE_UNKNOWN
) # pylint: disable=no-member
send_response_callback(
_health_pb2.HealthCheckResponse(status=status)
)
if service not in self._send_response_callbacks:
self._send_response_callbacks[service] = set()
self._send_response_callbacks[service].add(send_response_callback)
context.add_callback(
self._on_close_callback(send_response_callback, service)
)
return blocking_watcher
def set(self, service, status):
"""Sets the status of a service.
Args:
service: string, the name of the service.
status: HealthCheckResponse.status enum value indicating the status of
the service
"""
with self._lock:
if self._gracefully_shutting_down:
return
else:
self._server_status[service] = status
if service in self._send_response_callbacks:
for send_response_callback in self._send_response_callbacks[
service
]:
send_response_callback(
_health_pb2.HealthCheckResponse(status=status)
)
def enter_graceful_shutdown(self):
"""Permanently sets the status of all services to NOT_SERVING.
This should be invoked when the server is entering a graceful shutdown
period. After this method is invoked, future attempts to set the status
of a service will be ignored.
This is an EXPERIMENTAL API.
"""
with self._lock:
if self._gracefully_shutting_down:
return
else:
for service in self._server_status:
self.set(
service, _health_pb2.HealthCheckResponse.NOT_SERVING
) # pylint: disable=no-member
self._gracefully_shutting_down = True
| 6,365
| 34.171271
| 80
|
py
|
grpc
|
grpc-master/src/python/grpcio_reflection/reflection_commands.py
|
# Copyright 2016 gRPC authors.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Provides distutils command classes for the GRPC Python setup process."""
import os
import shutil
import setuptools
ROOT_DIR = os.path.abspath(os.path.dirname(os.path.abspath(__file__)))
REFLECTION_PROTO = os.path.join(
ROOT_DIR, "../../proto/grpc/reflection/v1alpha/reflection.proto"
)
LICENSE = os.path.join(ROOT_DIR, "../../../LICENSE")
class Preprocess(setuptools.Command):
"""Command to copy proto modules from grpc/src/proto and LICENSE from
the root directory"""
description = ""
user_options = []
def initialize_options(self):
pass
def finalize_options(self):
pass
def run(self):
if os.path.isfile(REFLECTION_PROTO):
shutil.copyfile(
REFLECTION_PROTO,
os.path.join(
ROOT_DIR, "grpc_reflection/v1alpha/reflection.proto"
),
)
if os.path.isfile(LICENSE):
shutil.copyfile(LICENSE, os.path.join(ROOT_DIR, "LICENSE"))
class BuildPackageProtos(setuptools.Command):
"""Command to generate project *_pb2.py modules from proto files."""
description = "build grpc protobuf modules"
user_options = []
def initialize_options(self):
pass
def finalize_options(self):
pass
def run(self):
# due to limitations of the proto generator, we require that only *one*
# directory is provided as an 'include' directory. We assume it's the '' key
# to `self.distribution.package_dir` (and get a key error if it's not
# there).
from grpc_tools import command
command.build_package_protos(self.distribution.package_dir[""])
| 2,255
| 29.90411
| 84
|
py
|
grpc
|
grpc-master/src/python/grpcio_reflection/setup.py
|
# Copyright 2016 gRPC authors.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Setup module for the GRPC Python package's optional reflection."""
import os
import sys
import setuptools
_PACKAGE_PATH = os.path.realpath(os.path.dirname(__file__))
_README_PATH = os.path.join(_PACKAGE_PATH, "README.rst")
# Ensure we're in the proper directory whether or not we're being used by pip.
os.chdir(os.path.dirname(os.path.abspath(__file__)))
# Break import-style to ensure we can actually find our local modules.
import grpc_version
class _NoOpCommand(setuptools.Command):
"""No-op command."""
description = ""
user_options = []
def initialize_options(self):
pass
def finalize_options(self):
pass
def run(self):
pass
CLASSIFIERS = [
"Development Status :: 5 - Production/Stable",
"Programming Language :: Python",
"Programming Language :: Python :: 3",
"Programming Language :: Python :: 3.4",
"Programming Language :: Python :: 3.5",
"Programming Language :: Python :: 3.6",
"Programming Language :: Python :: 3.7",
"Programming Language :: Python :: 3.8",
"Programming Language :: Python :: 3.9",
"Programming Language :: Python :: 3.10",
"Programming Language :: Python :: 3.11",
"License :: OSI Approved :: Apache Software License",
]
PACKAGE_DIRECTORIES = {
"": ".",
}
INSTALL_REQUIRES = (
"protobuf>=4.21.6",
"grpcio>={version}".format(version=grpc_version.VERSION),
)
try:
import reflection_commands as _reflection_commands
# we are in the build environment, otherwise the above import fails
SETUP_REQUIRES = (
"grpcio-tools=={version}".format(version=grpc_version.VERSION),
)
COMMAND_CLASS = {
# Run preprocess from the repository *before* doing any packaging!
"preprocess": _reflection_commands.Preprocess,
"build_package_protos": _reflection_commands.BuildPackageProtos,
}
except ImportError:
SETUP_REQUIRES = ()
COMMAND_CLASS = {
# wire up commands to no-op not to break the external dependencies
"preprocess": _NoOpCommand,
"build_package_protos": _NoOpCommand,
}
setuptools.setup(
name="grpcio-reflection",
version=grpc_version.VERSION,
license="Apache License 2.0",
description="Standard Protobuf Reflection Service for gRPC",
long_description=open(_README_PATH, "r").read(),
author="The gRPC Authors",
author_email="grpc-io@googlegroups.com",
classifiers=CLASSIFIERS,
url="https://grpc.io",
package_dir=PACKAGE_DIRECTORIES,
packages=setuptools.find_packages("."),
python_requires=">=3.6",
install_requires=INSTALL_REQUIRES,
setup_requires=SETUP_REQUIRES,
cmdclass=COMMAND_CLASS,
)
| 3,267
| 29.259259
| 78
|
py
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.