repo
stringlengths 2
99
| file
stringlengths 13
225
| code
stringlengths 0
18.3M
| file_length
int64 0
18.3M
| avg_line_length
float64 0
1.36M
| max_line_length
int64 0
4.26M
| extension_type
stringclasses 1
value |
|---|---|---|---|---|---|---|
grpc
|
grpc-master/examples/python/health_checking/helloworld_pb2_grpc.py
|
# Generated by the gRPC Python protocol compiler plugin. DO NOT EDIT!
"""Client and server classes corresponding to protobuf-defined services."""
import grpc
import helloworld_pb2 as helloworld__pb2
class GreeterStub(object):
"""The greeting service definition.
"""
def __init__(self, channel):
"""Constructor.
Args:
channel: A grpc.Channel.
"""
self.SayHello = channel.unary_unary(
'/helloworld.Greeter/SayHello',
request_serializer=helloworld__pb2.HelloRequest.SerializeToString,
response_deserializer=helloworld__pb2.HelloReply.FromString,
)
class GreeterServicer(object):
"""The greeting service definition.
"""
def SayHello(self, request, context):
"""Sends a greeting
"""
context.set_code(grpc.StatusCode.UNIMPLEMENTED)
context.set_details('Method not implemented!')
raise NotImplementedError('Method not implemented!')
def add_GreeterServicer_to_server(servicer, server):
rpc_method_handlers = {
'SayHello': grpc.unary_unary_rpc_method_handler(
servicer.SayHello,
request_deserializer=helloworld__pb2.HelloRequest.FromString,
response_serializer=helloworld__pb2.HelloReply.SerializeToString,
),
}
generic_handler = grpc.method_handlers_generic_handler(
'helloworld.Greeter', rpc_method_handlers)
server.add_generic_rpc_handlers((generic_handler,))
# This class is part of an EXPERIMENTAL API.
class Greeter(object):
"""The greeting service definition.
"""
@staticmethod
def SayHello(request,
target,
options=(),
channel_credentials=None,
call_credentials=None,
insecure=False,
compression=None,
wait_for_ready=None,
timeout=None,
metadata=None):
return grpc.experimental.unary_unary(request, target, '/helloworld.Greeter/SayHello',
helloworld__pb2.HelloRequest.SerializeToString,
helloworld__pb2.HelloReply.FromString,
options, channel_credentials,
insecure, call_credentials, compression, wait_for_ready, timeout, metadata)
| 2,310
| 31.549296
| 93
|
py
|
grpc
|
grpc-master/examples/python/health_checking/greeter_server.py
|
# Copyright 2023 gRPC authors.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""The Python implementation of the GRPC helloworld.Greeter server with health checking."""
from concurrent import futures
import logging
import threading
from time import sleep
import grpc
from grpc_health.v1 import health
from grpc_health.v1 import health_pb2
from grpc_health.v1 import health_pb2_grpc
import helloworld_pb2
import helloworld_pb2_grpc
class Greeter(helloworld_pb2_grpc.GreeterServicer):
def SayHello(self, request, context):
return helloworld_pb2.HelloReply(message=request.name)
def _toggle_health(health_servicer: health.HealthServicer, service: str):
next_status = health_pb2.HealthCheckResponse.SERVING
while True:
if next_status == health_pb2.HealthCheckResponse.SERVING:
next_status = health_pb2.HealthCheckResponse.NOT_SERVING
else:
next_status = health_pb2.HealthCheckResponse.SERVING
health_servicer.set(service, next_status)
sleep(5)
def _configure_health_server(server: grpc.Server):
health_servicer = health.HealthServicer(
experimental_non_blocking=True,
experimental_thread_pool=futures.ThreadPoolExecutor(max_workers=10),
)
health_pb2_grpc.add_HealthServicer_to_server(health_servicer, server)
# Use a daemon thread to toggle health status
toggle_health_status_thread = threading.Thread(
target=_toggle_health,
args=(health_servicer, "helloworld.Greeter"),
daemon=True,
)
toggle_health_status_thread.start()
def serve():
port = "50051"
server = grpc.server(futures.ThreadPoolExecutor(max_workers=10))
helloworld_pb2_grpc.add_GreeterServicer_to_server(Greeter(), server)
server.add_insecure_port("[::]:" + port)
_configure_health_server(server)
server.start()
print("Server started, listening on " + port)
server.wait_for_termination()
if __name__ == "__main__":
logging.basicConfig()
serve()
| 2,501
| 31.921053
| 91
|
py
|
grpc
|
grpc-master/examples/python/health_checking/helloworld_pb2.py
|
# -*- coding: utf-8 -*-
# Generated by the protocol buffer compiler. DO NOT EDIT!
# source: helloworld.proto
"""Generated protocol buffer code."""
from google.protobuf.internal import builder as _builder
from google.protobuf import descriptor as _descriptor
from google.protobuf import descriptor_pool as _descriptor_pool
from google.protobuf import symbol_database as _symbol_database
# @@protoc_insertion_point(imports)
_sym_db = _symbol_database.Default()
DESCRIPTOR = _descriptor_pool.Default().AddSerializedFile(b'\n\x10helloworld.proto\x12\nhelloworld\"\x1c\n\x0cHelloRequest\x12\x0c\n\x04name\x18\x01 \x01(\t\"\x1d\n\nHelloReply\x12\x0f\n\x07message\x18\x01 \x01(\t2I\n\x07Greeter\x12>\n\x08SayHello\x12\x18.helloworld.HelloRequest\x1a\x16.helloworld.HelloReply\"\x00\x42\x36\n\x1bio.grpc.examples.helloworldB\x0fHelloWorldProtoP\x01\xa2\x02\x03HLWb\x06proto3')
_builder.BuildMessageAndEnumDescriptors(DESCRIPTOR, globals())
_builder.BuildTopDescriptorsAndMessages(DESCRIPTOR, 'helloworld_pb2', globals())
if _descriptor._USE_C_DESCRIPTORS == False:
DESCRIPTOR._options = None
DESCRIPTOR._serialized_options = b'\n\033io.grpc.examples.helloworldB\017HelloWorldProtoP\001\242\002\003HLW'
_HELLOREQUEST._serialized_start=32
_HELLOREQUEST._serialized_end=60
_HELLOREPLY._serialized_start=62
_HELLOREPLY._serialized_end=91
_GREETER._serialized_start=93
_GREETER._serialized_end=166
# @@protoc_insertion_point(module_scope)
| 1,450
| 45.806452
| 409
|
py
|
grpc
|
grpc-master/examples/python/health_checking/greeter_client.py
|
# Copyright 2023 gRPC authors.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""gRPC Python helloworld.Greeter client with health checking."""
import logging
from time import sleep
import grpc
from grpc_health.v1 import health_pb2
from grpc_health.v1 import health_pb2_grpc
import helloworld_pb2
import helloworld_pb2_grpc
def unary_call(stub: helloworld_pb2_grpc.GreeterStub, message: str):
response = stub.SayHello(
helloworld_pb2.HelloRequest(name=message), timeout=3
)
print(f"Greeter client received: {response.message}")
def health_check_call(stub: health_pb2_grpc.HealthStub):
request = health_pb2.HealthCheckRequest(service="helloworld.Greeter")
resp = stub.Check(request)
if resp.status == health_pb2.HealthCheckResponse.SERVING:
print("server is serving")
elif resp.status == health_pb2.HealthCheckResponse.NOT_SERVING:
print("server stopped serving")
def run():
with grpc.insecure_channel("localhost:50051") as channel:
stub = helloworld_pb2_grpc.GreeterStub(channel)
health_stub = health_pb2_grpc.HealthStub(channel)
# Should succeed
unary_call(stub, "you")
# Check health status every 1 second for 30 seconds
for _ in range(30):
health_check_call(health_stub)
sleep(1)
if __name__ == "__main__":
logging.basicConfig()
run()
| 1,884
| 31.5
| 74
|
py
|
grpc
|
grpc-master/examples/python/uds/async_greeter_client.py
|
# Copyright 2022 The gRPC Authors
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""The gRPC AsyncIO client for the UDS example."""
import asyncio
import logging
import grpc
import helloworld_pb2
import helloworld_pb2_grpc
async def run() -> None:
uds_addresses = ["unix:helloworld.sock", "unix:///tmp/helloworld.sock"]
for uds_address in uds_addresses:
async with grpc.aio.insecure_channel(uds_address) as channel:
stub = helloworld_pb2_grpc.GreeterStub(channel)
response = await stub.SayHello(
helloworld_pb2.HelloRequest(name="you")
)
logging.info("Received: %s", response.message)
if __name__ == "__main__":
logging.basicConfig(level=logging.INFO)
asyncio.run(run())
| 1,264
| 32.289474
| 75
|
py
|
grpc
|
grpc-master/examples/python/uds/helloworld_pb2_grpc.py
|
# Generated by the gRPC Python protocol compiler plugin. DO NOT EDIT!
"""Client and server classes corresponding to protobuf-defined services."""
import grpc
import helloworld_pb2 as helloworld__pb2
class GreeterStub(object):
"""The greeting service definition.
"""
def __init__(self, channel):
"""Constructor.
Args:
channel: A grpc.Channel.
"""
self.SayHello = channel.unary_unary(
'/helloworld.Greeter/SayHello',
request_serializer=helloworld__pb2.HelloRequest.SerializeToString,
response_deserializer=helloworld__pb2.HelloReply.FromString,
)
class GreeterServicer(object):
"""The greeting service definition.
"""
def SayHello(self, request, context):
"""Sends a greeting
"""
context.set_code(grpc.StatusCode.UNIMPLEMENTED)
context.set_details('Method not implemented!')
raise NotImplementedError('Method not implemented!')
def add_GreeterServicer_to_server(servicer, server):
rpc_method_handlers = {
'SayHello': grpc.unary_unary_rpc_method_handler(
servicer.SayHello,
request_deserializer=helloworld__pb2.HelloRequest.FromString,
response_serializer=helloworld__pb2.HelloReply.SerializeToString,
),
}
generic_handler = grpc.method_handlers_generic_handler(
'helloworld.Greeter', rpc_method_handlers)
server.add_generic_rpc_handlers((generic_handler,))
# This class is part of an EXPERIMENTAL API.
class Greeter(object):
"""The greeting service definition.
"""
@staticmethod
def SayHello(request,
target,
options=(),
channel_credentials=None,
call_credentials=None,
insecure=False,
compression=None,
wait_for_ready=None,
timeout=None,
metadata=None):
return grpc.experimental.unary_unary(request, target, '/helloworld.Greeter/SayHello',
helloworld__pb2.HelloRequest.SerializeToString,
helloworld__pb2.HelloReply.FromString,
options, channel_credentials,
insecure, call_credentials, compression, wait_for_ready, timeout, metadata)
| 2,310
| 31.549296
| 93
|
py
|
grpc
|
grpc-master/examples/python/uds/async_greeter_server.py
|
# Copyright 2022 The gRPC Authors
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""The gRPC AsyncIO server for the UDS example."""
import asyncio
import logging
import grpc
import helloworld_pb2
import helloworld_pb2_grpc
class Greeter(helloworld_pb2_grpc.GreeterServicer):
async def SayHello(
self,
request: helloworld_pb2.HelloRequest,
context: grpc.aio.ServicerContext,
) -> helloworld_pb2.HelloReply:
del request
return helloworld_pb2.HelloReply(message=f"Hello to {context.peer()}!")
async def serve() -> None:
uds_addresses = ["unix:helloworld.sock", "unix:///tmp/helloworld.sock"]
server = grpc.aio.server()
helloworld_pb2_grpc.add_GreeterServicer_to_server(Greeter(), server)
for uds_address in uds_addresses:
server.add_insecure_port(uds_address)
logging.info("Server listening on: %s", uds_address)
await server.start()
await server.wait_for_termination()
if __name__ == "__main__":
logging.basicConfig(level=logging.INFO)
asyncio.run(serve())
| 1,558
| 31.479167
| 79
|
py
|
grpc
|
grpc-master/examples/python/uds/greeter_server.py
|
# Copyright 2022 The gRPC Authors
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""The gRPC Python server for the UDS example."""
from concurrent import futures
import logging
import grpc
import helloworld_pb2
import helloworld_pb2_grpc
class Greeter(helloworld_pb2_grpc.GreeterServicer):
def SayHello(self, request, context):
del request
return helloworld_pb2.HelloReply(message=f"Hello to {context.peer()}!")
def serve():
uds_addresses = ["unix:helloworld.sock", "unix:///tmp/helloworld.sock"]
server = grpc.server(futures.ThreadPoolExecutor(max_workers=10))
helloworld_pb2_grpc.add_GreeterServicer_to_server(Greeter(), server)
for uds_address in uds_addresses:
server.add_insecure_port(uds_address)
logging.info("Server listening on: %s", uds_address)
server.start()
server.wait_for_termination()
if __name__ == "__main__":
logging.basicConfig(level=logging.INFO)
serve()
| 1,451
| 32
| 79
|
py
|
grpc
|
grpc-master/examples/python/uds/helloworld_pb2.py
|
# -*- coding: utf-8 -*-
# Generated by the protocol buffer compiler. DO NOT EDIT!
# source: helloworld.proto
"""Generated protocol buffer code."""
from google.protobuf.internal import builder as _builder
from google.protobuf import descriptor as _descriptor
from google.protobuf import descriptor_pool as _descriptor_pool
from google.protobuf import symbol_database as _symbol_database
# @@protoc_insertion_point(imports)
_sym_db = _symbol_database.Default()
DESCRIPTOR = _descriptor_pool.Default().AddSerializedFile(b'\n\x10helloworld.proto\x12\nhelloworld\"\x1c\n\x0cHelloRequest\x12\x0c\n\x04name\x18\x01 \x01(\t\"\x1d\n\nHelloReply\x12\x0f\n\x07message\x18\x01 \x01(\t2I\n\x07Greeter\x12>\n\x08SayHello\x12\x18.helloworld.HelloRequest\x1a\x16.helloworld.HelloReply\"\x00\x42\x36\n\x1bio.grpc.examples.helloworldB\x0fHelloWorldProtoP\x01\xa2\x02\x03HLWb\x06proto3')
_builder.BuildMessageAndEnumDescriptors(DESCRIPTOR, globals())
_builder.BuildTopDescriptorsAndMessages(DESCRIPTOR, 'helloworld_pb2', globals())
if _descriptor._USE_C_DESCRIPTORS == False:
DESCRIPTOR._options = None
DESCRIPTOR._serialized_options = b'\n\033io.grpc.examples.helloworldB\017HelloWorldProtoP\001\242\002\003HLW'
_HELLOREQUEST._serialized_start=32
_HELLOREQUEST._serialized_end=60
_HELLOREPLY._serialized_start=62
_HELLOREPLY._serialized_end=91
_GREETER._serialized_start=93
_GREETER._serialized_end=166
# @@protoc_insertion_point(module_scope)
| 1,450
| 45.806452
| 409
|
py
|
grpc
|
grpc-master/examples/python/uds/greeter_client.py
|
# Copyright 2022 The gRPC Authors
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""The gRPC Python client for the UDS example."""
from __future__ import print_function
import logging
import grpc
import helloworld_pb2
import helloworld_pb2_grpc
def run():
uds_addresses = ["unix:helloworld.sock", "unix:///tmp/helloworld.sock"]
for uds_address in uds_addresses:
with grpc.insecure_channel(uds_address) as channel:
stub = helloworld_pb2_grpc.GreeterStub(channel)
response = stub.SayHello(helloworld_pb2.HelloRequest(name="you"))
logging.info("Received: %s", response.message)
if __name__ == "__main__":
logging.basicConfig(level=logging.INFO)
run()
| 1,214
| 31.837838
| 77
|
py
|
grpc
|
grpc-master/examples/python/hellostreamingworld/async_greeter_client.py
|
# Copyright 2021 gRPC authors.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""The Python AsyncIO implementation of the GRPC hellostreamingworld.MultiGreeter client."""
import asyncio
import logging
import grpc
import hellostreamingworld_pb2
import hellostreamingworld_pb2_grpc
async def run() -> None:
async with grpc.aio.insecure_channel("localhost:50051") as channel:
stub = hellostreamingworld_pb2_grpc.MultiGreeterStub(channel)
# Read from an async generator
async for response in stub.sayHello(
hellostreamingworld_pb2.HelloRequest(name="you")
):
print(
"Greeter client received from async generator: "
+ response.message
)
# Direct read from the stub
hello_stream = stub.sayHello(
hellostreamingworld_pb2.HelloRequest(name="you")
)
while True:
response = await hello_stream.read()
if response == grpc.aio.EOF:
break
print(
"Greeter client received from direct read: " + response.message
)
if __name__ == "__main__":
logging.basicConfig()
asyncio.run(run())
| 1,709
| 31.264151
| 92
|
py
|
grpc
|
grpc-master/examples/python/hellostreamingworld/hellostreamingworld_pb2_grpc.py
|
# Generated by the gRPC Python protocol compiler plugin. DO NOT EDIT!
"""Client and server classes corresponding to protobuf-defined services."""
import grpc
import hellostreamingworld_pb2 as hellostreamingworld__pb2
class MultiGreeterStub(object):
"""The greeting service definition.
"""
def __init__(self, channel):
"""Constructor.
Args:
channel: A grpc.Channel.
"""
self.sayHello = channel.unary_stream(
'/hellostreamingworld.MultiGreeter/sayHello',
request_serializer=hellostreamingworld__pb2.HelloRequest.SerializeToString,
response_deserializer=hellostreamingworld__pb2.HelloReply.FromString,
)
class MultiGreeterServicer(object):
"""The greeting service definition.
"""
def sayHello(self, request, context):
"""Sends multiple greetings
"""
context.set_code(grpc.StatusCode.UNIMPLEMENTED)
context.set_details('Method not implemented!')
raise NotImplementedError('Method not implemented!')
def add_MultiGreeterServicer_to_server(servicer, server):
rpc_method_handlers = {
'sayHello': grpc.unary_stream_rpc_method_handler(
servicer.sayHello,
request_deserializer=hellostreamingworld__pb2.HelloRequest.FromString,
response_serializer=hellostreamingworld__pb2.HelloReply.SerializeToString,
),
}
generic_handler = grpc.method_handlers_generic_handler(
'hellostreamingworld.MultiGreeter', rpc_method_handlers)
server.add_generic_rpc_handlers((generic_handler,))
# This class is part of an EXPERIMENTAL API.
class MultiGreeter(object):
"""The greeting service definition.
"""
@staticmethod
def sayHello(request,
target,
options=(),
channel_credentials=None,
call_credentials=None,
insecure=False,
compression=None,
wait_for_ready=None,
timeout=None,
metadata=None):
return grpc.experimental.unary_stream(request, target, '/hellostreamingworld.MultiGreeter/sayHello',
hellostreamingworld__pb2.HelloRequest.SerializeToString,
hellostreamingworld__pb2.HelloReply.FromString,
options, channel_credentials,
insecure, call_credentials, compression, wait_for_ready, timeout, metadata)
| 2,455
| 33.591549
| 108
|
py
|
grpc
|
grpc-master/examples/python/hellostreamingworld/async_greeter_server.py
|
# Copyright 2021 gRPC authors.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""The Python AsyncIO implementation of the GRPC hellostreamingworld.MultiGreeter server."""
import asyncio
import logging
import grpc
from hellostreamingworld_pb2 import HelloReply
from hellostreamingworld_pb2 import HelloRequest
from hellostreamingworld_pb2_grpc import MultiGreeterServicer
from hellostreamingworld_pb2_grpc import add_MultiGreeterServicer_to_server
NUMBER_OF_REPLY = 10
class Greeter(MultiGreeterServicer):
async def sayHello(
self, request: HelloRequest, context: grpc.aio.ServicerContext
) -> HelloReply:
logging.info("Serving sayHello request %s", request)
for i in range(NUMBER_OF_REPLY):
yield HelloReply(message=f"Hello number {i}, {request.name}!")
async def serve() -> None:
server = grpc.aio.server()
add_MultiGreeterServicer_to_server(Greeter(), server)
listen_addr = "[::]:50051"
server.add_insecure_port(listen_addr)
logging.info("Starting server on %s", listen_addr)
await server.start()
await server.wait_for_termination()
if __name__ == "__main__":
logging.basicConfig(level=logging.INFO)
asyncio.run(serve())
| 1,711
| 33.24
| 92
|
py
|
grpc
|
grpc-master/examples/python/hellostreamingworld/hellostreamingworld_pb2.py
|
# -*- coding: utf-8 -*-
# Generated by the protocol buffer compiler. DO NOT EDIT!
# source: hellostreamingworld.proto
"""Generated protocol buffer code."""
from google.protobuf.internal import builder as _builder
from google.protobuf import descriptor as _descriptor
from google.protobuf import descriptor_pool as _descriptor_pool
from google.protobuf import symbol_database as _symbol_database
# @@protoc_insertion_point(imports)
_sym_db = _symbol_database.Default()
DESCRIPTOR = _descriptor_pool.Default().AddSerializedFile(b'\n\x19hellostreamingworld.proto\x12\x13hellostreamingworld\"3\n\x0cHelloRequest\x12\x0c\n\x04name\x18\x01 \x01(\t\x12\x15\n\rnum_greetings\x18\x02 \x01(\t\"\x1d\n\nHelloReply\x12\x0f\n\x07message\x18\x01 \x01(\t2b\n\x0cMultiGreeter\x12R\n\x08sayHello\x12!.hellostreamingworld.HelloRequest\x1a\x1f.hellostreamingworld.HelloReply\"\x00\x30\x01\x42\x0f\n\x07\x65x.grpc\xa2\x02\x03HSWb\x06proto3')
_builder.BuildMessageAndEnumDescriptors(DESCRIPTOR, globals())
_builder.BuildTopDescriptorsAndMessages(DESCRIPTOR, 'hellostreamingworld_pb2', globals())
if _descriptor._USE_C_DESCRIPTORS == False:
DESCRIPTOR._options = None
DESCRIPTOR._serialized_options = b'\n\007ex.grpc\242\002\003HSW'
_HELLOREQUEST._serialized_start=50
_HELLOREQUEST._serialized_end=101
_HELLOREPLY._serialized_start=103
_HELLOREPLY._serialized_end=132
_MULTIGREETER._serialized_start=134
_MULTIGREETER._serialized_end=232
# @@protoc_insertion_point(module_scope)
| 1,481
| 46.806452
| 453
|
py
|
grpc
|
grpc-master/examples/python/interceptors/async/async_greeter_client.py
|
# Copyright 2023 gRPC authors.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""The Python AsyncIO implementation of the GRPC helloworld.Greeter client."""
import asyncio
import contextvars
import logging
import random
import grpc
import helloworld_pb2
import helloworld_pb2_grpc
test_var = contextvars.ContextVar("test", default="test")
async def run() -> None:
async with grpc.aio.insecure_channel("localhost:50051") as channel:
stub = helloworld_pb2_grpc.GreeterStub(channel)
rpc_id = "{:032x}".format(random.getrandbits(128))
metadata = grpc.aio.Metadata(
("client-rpc-id", rpc_id),
)
print(f"Sending request with rpc id: {rpc_id}")
response = await stub.SayHello(
helloworld_pb2.HelloRequest(name="you"), metadata=metadata
)
print("Greeter client received: " + response.message)
if __name__ == "__main__":
logging.basicConfig()
asyncio.run(run())
| 1,455
| 31.355556
| 78
|
py
|
grpc
|
grpc-master/examples/python/interceptors/async/helloworld_pb2_grpc.py
|
# Generated by the gRPC Python protocol compiler plugin. DO NOT EDIT!
"""Client and server classes corresponding to protobuf-defined services."""
import grpc
import helloworld_pb2 as helloworld__pb2
class GreeterStub(object):
"""The greeting service definition.
"""
def __init__(self, channel):
"""Constructor.
Args:
channel: A grpc.Channel.
"""
self.SayHello = channel.unary_unary(
'/helloworld.Greeter/SayHello',
request_serializer=helloworld__pb2.HelloRequest.SerializeToString,
response_deserializer=helloworld__pb2.HelloReply.FromString,
)
class GreeterServicer(object):
"""The greeting service definition.
"""
def SayHello(self, request, context):
"""Sends a greeting
"""
context.set_code(grpc.StatusCode.UNIMPLEMENTED)
context.set_details('Method not implemented!')
raise NotImplementedError('Method not implemented!')
def add_GreeterServicer_to_server(servicer, server):
rpc_method_handlers = {
'SayHello': grpc.unary_unary_rpc_method_handler(
servicer.SayHello,
request_deserializer=helloworld__pb2.HelloRequest.FromString,
response_serializer=helloworld__pb2.HelloReply.SerializeToString,
),
}
generic_handler = grpc.method_handlers_generic_handler(
'helloworld.Greeter', rpc_method_handlers)
server.add_generic_rpc_handlers((generic_handler,))
# This class is part of an EXPERIMENTAL API.
class Greeter(object):
"""The greeting service definition.
"""
@staticmethod
def SayHello(request,
target,
options=(),
channel_credentials=None,
call_credentials=None,
insecure=False,
compression=None,
wait_for_ready=None,
timeout=None,
metadata=None):
return grpc.experimental.unary_unary(request, target, '/helloworld.Greeter/SayHello',
helloworld__pb2.HelloRequest.SerializeToString,
helloworld__pb2.HelloReply.FromString,
options, channel_credentials,
insecure, call_credentials, compression, wait_for_ready, timeout, metadata)
| 2,310
| 31.549296
| 93
|
py
|
grpc
|
grpc-master/examples/python/interceptors/async/helloworld_pb2.py
|
# -*- coding: utf-8 -*-
# Generated by the protocol buffer compiler. DO NOT EDIT!
# source: helloworld.proto
"""Generated protocol buffer code."""
from google.protobuf.internal import builder as _builder
from google.protobuf import descriptor as _descriptor
from google.protobuf import descriptor_pool as _descriptor_pool
from google.protobuf import symbol_database as _symbol_database
# @@protoc_insertion_point(imports)
_sym_db = _symbol_database.Default()
DESCRIPTOR = _descriptor_pool.Default().AddSerializedFile(b'\n\x10helloworld.proto\x12\nhelloworld\"\x1c\n\x0cHelloRequest\x12\x0c\n\x04name\x18\x01 \x01(\t\"\x1d\n\nHelloReply\x12\x0f\n\x07message\x18\x01 \x01(\t2I\n\x07Greeter\x12>\n\x08SayHello\x12\x18.helloworld.HelloRequest\x1a\x16.helloworld.HelloReply\"\x00\x42\x36\n\x1bio.grpc.examples.helloworldB\x0fHelloWorldProtoP\x01\xa2\x02\x03HLWb\x06proto3')
_builder.BuildMessageAndEnumDescriptors(DESCRIPTOR, globals())
_builder.BuildTopDescriptorsAndMessages(DESCRIPTOR, 'helloworld_pb2', globals())
if _descriptor._USE_C_DESCRIPTORS == False:
DESCRIPTOR._options = None
DESCRIPTOR._serialized_options = b'\n\033io.grpc.examples.helloworldB\017HelloWorldProtoP\001\242\002\003HLW'
_HELLOREQUEST._serialized_start=32
_HELLOREQUEST._serialized_end=60
_HELLOREPLY._serialized_start=62
_HELLOREPLY._serialized_end=91
_GREETER._serialized_start=93
_GREETER._serialized_end=166
# @@protoc_insertion_point(module_scope)
| 1,450
| 45.806452
| 409
|
py
|
grpc
|
grpc-master/examples/python/interceptors/async/async_greeter_server_with_interceptor.py
|
# Copyright 2023 gRPC authors.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""The Python AsyncIO implementation of the GRPC helloworld.Greeter server."""
import asyncio
import contextvars
import logging
from typing import Awaitable, Callable, Optional
import grpc
import helloworld_pb2
import helloworld_pb2_grpc
rpc_id_var = contextvars.ContextVar("rpc_id", default="default")
class RPCIdInterceptor(grpc.aio.ServerInterceptor):
def __init__(self, tag: str, rpc_id: Optional[str] = None) -> None:
self.tag = tag
self.rpc_id = rpc_id
async def intercept_service(
self,
continuation: Callable[
[grpc.HandlerCallDetails], Awaitable[grpc.RpcMethodHandler]
],
handler_call_details: grpc.HandlerCallDetails,
) -> grpc.RpcMethodHandler:
"""
This interceptor prepends its tag to the rpc_id.
If two of these interceptors are chained together, the resulting rpc_id
will be something like this: Interceptor2-Interceptor1-RPC_ID.
"""
logging.info("%s called with rpc_id: %s", self.tag, rpc_id_var.get())
if rpc_id_var.get() == "default":
_metadata = dict(handler_call_details.invocation_metadata)
rpc_id_var.set(self.decorate(_metadata["client-rpc-id"]))
else:
rpc_id_var.set(self.decorate(rpc_id_var.get()))
return await continuation(handler_call_details)
def decorate(self, rpc_id: str):
return f"{self.tag}-{rpc_id}"
class Greeter(helloworld_pb2_grpc.GreeterServicer):
async def SayHello(
self,
request: helloworld_pb2.HelloRequest,
context: grpc.aio.ServicerContext,
) -> helloworld_pb2.HelloReply:
logging.info(
"Handle rpc with id %s in server handler.", rpc_id_var.get()
)
return helloworld_pb2.HelloReply(message="Hello, %s!" % request.name)
async def serve() -> None:
interceptors = [
RPCIdInterceptor("Interceptor1"),
RPCIdInterceptor("Interceptor2"),
]
server = grpc.aio.server(interceptors=interceptors)
helloworld_pb2_grpc.add_GreeterServicer_to_server(Greeter(), server)
listen_addr = "[::]:50051"
server.add_insecure_port(listen_addr)
logging.info("Starting server on %s", listen_addr)
await server.start()
await server.wait_for_termination()
if __name__ == "__main__":
logging.basicConfig(level=logging.INFO)
asyncio.run(serve())
| 2,971
| 33.16092
| 79
|
py
|
grpc
|
grpc-master/examples/python/interceptors/default_value/helloworld_pb2_grpc.py
|
# Generated by the gRPC Python protocol compiler plugin. DO NOT EDIT!
"""Client and server classes corresponding to protobuf-defined services."""
import grpc
import helloworld_pb2 as helloworld__pb2
class GreeterStub(object):
"""The greeting service definition.
"""
def __init__(self, channel):
"""Constructor.
Args:
channel: A grpc.Channel.
"""
self.SayHello = channel.unary_unary(
'/helloworld.Greeter/SayHello',
request_serializer=helloworld__pb2.HelloRequest.SerializeToString,
response_deserializer=helloworld__pb2.HelloReply.FromString,
)
class GreeterServicer(object):
"""The greeting service definition.
"""
def SayHello(self, request, context):
"""Sends a greeting
"""
context.set_code(grpc.StatusCode.UNIMPLEMENTED)
context.set_details('Method not implemented!')
raise NotImplementedError('Method not implemented!')
def add_GreeterServicer_to_server(servicer, server):
rpc_method_handlers = {
'SayHello': grpc.unary_unary_rpc_method_handler(
servicer.SayHello,
request_deserializer=helloworld__pb2.HelloRequest.FromString,
response_serializer=helloworld__pb2.HelloReply.SerializeToString,
),
}
generic_handler = grpc.method_handlers_generic_handler(
'helloworld.Greeter', rpc_method_handlers)
server.add_generic_rpc_handlers((generic_handler,))
# This class is part of an EXPERIMENTAL API.
class Greeter(object):
"""The greeting service definition.
"""
@staticmethod
def SayHello(request,
target,
options=(),
channel_credentials=None,
call_credentials=None,
insecure=False,
compression=None,
wait_for_ready=None,
timeout=None,
metadata=None):
return grpc.experimental.unary_unary(request, target, '/helloworld.Greeter/SayHello',
helloworld__pb2.HelloRequest.SerializeToString,
helloworld__pb2.HelloReply.FromString,
options, channel_credentials,
insecure, call_credentials, compression, wait_for_ready, timeout, metadata)
| 2,310
| 31.549296
| 93
|
py
|
grpc
|
grpc-master/examples/python/interceptors/default_value/default_value_client_interceptor.py
|
# Copyright 2017 gRPC authors.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Interceptor that adds headers to outgoing requests."""
import grpc
class _ConcreteValue(grpc.Future):
def __init__(self, result):
self._result = result
def cancel(self):
return False
def cancelled(self):
return False
def running(self):
return False
def done(self):
return True
def result(self, timeout=None):
return self._result
def exception(self, timeout=None):
return None
def traceback(self, timeout=None):
return None
def add_done_callback(self, fn):
fn(self._result)
class DefaultValueClientInterceptor(
grpc.UnaryUnaryClientInterceptor, grpc.StreamUnaryClientInterceptor
):
def __init__(self, value):
self._default = _ConcreteValue(value)
def _intercept_call(
self, continuation, client_call_details, request_or_iterator
):
response = continuation(client_call_details, request_or_iterator)
return self._default if response.exception() else response
def intercept_unary_unary(self, continuation, client_call_details, request):
return self._intercept_call(continuation, client_call_details, request)
def intercept_stream_unary(
self, continuation, client_call_details, request_iterator
):
return self._intercept_call(
continuation, client_call_details, request_iterator
)
| 1,985
| 27.782609
| 80
|
py
|
grpc
|
grpc-master/examples/python/interceptors/default_value/helloworld_pb2.py
|
# -*- coding: utf-8 -*-
# Generated by the protocol buffer compiler. DO NOT EDIT!
# source: helloworld.proto
"""Generated protocol buffer code."""
from google.protobuf.internal import builder as _builder
from google.protobuf import descriptor as _descriptor
from google.protobuf import descriptor_pool as _descriptor_pool
from google.protobuf import symbol_database as _symbol_database
# @@protoc_insertion_point(imports)
_sym_db = _symbol_database.Default()
DESCRIPTOR = _descriptor_pool.Default().AddSerializedFile(b'\n\x10helloworld.proto\x12\nhelloworld\"\x1c\n\x0cHelloRequest\x12\x0c\n\x04name\x18\x01 \x01(\t\"\x1d\n\nHelloReply\x12\x0f\n\x07message\x18\x01 \x01(\t2I\n\x07Greeter\x12>\n\x08SayHello\x12\x18.helloworld.HelloRequest\x1a\x16.helloworld.HelloReply\"\x00\x42\x36\n\x1bio.grpc.examples.helloworldB\x0fHelloWorldProtoP\x01\xa2\x02\x03HLWb\x06proto3')
_builder.BuildMessageAndEnumDescriptors(DESCRIPTOR, globals())
_builder.BuildTopDescriptorsAndMessages(DESCRIPTOR, 'helloworld_pb2', globals())
if _descriptor._USE_C_DESCRIPTORS == False:
DESCRIPTOR._options = None
DESCRIPTOR._serialized_options = b'\n\033io.grpc.examples.helloworldB\017HelloWorldProtoP\001\242\002\003HLW'
_HELLOREQUEST._serialized_start=32
_HELLOREQUEST._serialized_end=60
_HELLOREPLY._serialized_start=62
_HELLOREPLY._serialized_end=91
_GREETER._serialized_start=93
_GREETER._serialized_end=166
# @@protoc_insertion_point(module_scope)
| 1,450
| 45.806452
| 409
|
py
|
grpc
|
grpc-master/examples/python/interceptors/default_value/greeter_client.py
|
# Copyright 2017 gRPC authors.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""The Python implementation of the gRPC helloworld.Greeter client."""
from __future__ import print_function
import logging
import default_value_client_interceptor
import grpc
import helloworld_pb2
import helloworld_pb2_grpc
def run():
default_value = helloworld_pb2.HelloReply(
message="Hello from your local interceptor!"
)
default_value_interceptor = (
default_value_client_interceptor.DefaultValueClientInterceptor(
default_value
)
)
# NOTE(gRPC Python Team): .close() is possible on a channel and should be
# used in circumstances in which the with statement does not fit the needs
# of the code.
with grpc.insecure_channel("localhost:50051") as channel:
intercept_channel = grpc.intercept_channel(
channel, default_value_interceptor
)
stub = helloworld_pb2_grpc.GreeterStub(intercept_channel)
response = stub.SayHello(helloworld_pb2.HelloRequest(name="you"))
print("Greeter client received: " + response.message)
if __name__ == "__main__":
logging.basicConfig()
run()
| 1,682
| 32.66
| 78
|
py
|
grpc
|
grpc-master/examples/python/interceptors/headers/header_manipulator_client_interceptor.py
|
# Copyright 2017 gRPC authors.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Interceptor that adds headers to outgoing requests."""
import collections
import generic_client_interceptor
import grpc
class _ClientCallDetails(
collections.namedtuple(
"_ClientCallDetails", ("method", "timeout", "metadata", "credentials")
),
grpc.ClientCallDetails,
):
pass
def header_adder_interceptor(header, value):
def intercept_call(
client_call_details,
request_iterator,
request_streaming,
response_streaming,
):
metadata = []
if client_call_details.metadata is not None:
metadata = list(client_call_details.metadata)
metadata.append(
(
header,
value,
)
)
client_call_details = _ClientCallDetails(
client_call_details.method,
client_call_details.timeout,
metadata,
client_call_details.credentials,
)
return client_call_details, request_iterator, None
return generic_client_interceptor.create(intercept_call)
| 1,644
| 28.375
| 78
|
py
|
grpc
|
grpc-master/examples/python/interceptors/headers/helloworld_pb2_grpc.py
|
# Generated by the gRPC Python protocol compiler plugin. DO NOT EDIT!
"""Client and server classes corresponding to protobuf-defined services."""
import grpc
import helloworld_pb2 as helloworld__pb2
class GreeterStub(object):
"""The greeting service definition.
"""
def __init__(self, channel):
"""Constructor.
Args:
channel: A grpc.Channel.
"""
self.SayHello = channel.unary_unary(
'/helloworld.Greeter/SayHello',
request_serializer=helloworld__pb2.HelloRequest.SerializeToString,
response_deserializer=helloworld__pb2.HelloReply.FromString,
)
class GreeterServicer(object):
"""The greeting service definition.
"""
def SayHello(self, request, context):
"""Sends a greeting
"""
context.set_code(grpc.StatusCode.UNIMPLEMENTED)
context.set_details('Method not implemented!')
raise NotImplementedError('Method not implemented!')
def add_GreeterServicer_to_server(servicer, server):
rpc_method_handlers = {
'SayHello': grpc.unary_unary_rpc_method_handler(
servicer.SayHello,
request_deserializer=helloworld__pb2.HelloRequest.FromString,
response_serializer=helloworld__pb2.HelloReply.SerializeToString,
),
}
generic_handler = grpc.method_handlers_generic_handler(
'helloworld.Greeter', rpc_method_handlers)
server.add_generic_rpc_handlers((generic_handler,))
# This class is part of an EXPERIMENTAL API.
class Greeter(object):
"""The greeting service definition.
"""
@staticmethod
def SayHello(request,
target,
options=(),
channel_credentials=None,
call_credentials=None,
insecure=False,
compression=None,
wait_for_ready=None,
timeout=None,
metadata=None):
return grpc.experimental.unary_unary(request, target, '/helloworld.Greeter/SayHello',
helloworld__pb2.HelloRequest.SerializeToString,
helloworld__pb2.HelloReply.FromString,
options, channel_credentials,
insecure, call_credentials, compression, wait_for_ready, timeout, metadata)
| 2,310
| 31.549296
| 93
|
py
|
grpc
|
grpc-master/examples/python/interceptors/headers/greeter_server.py
|
# Copyright 2017 gRPC authors.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""The Python implementation of the GRPC helloworld.Greeter server."""
from concurrent import futures
import logging
import grpc
import helloworld_pb2
import helloworld_pb2_grpc
from request_header_validator_interceptor import (
RequestHeaderValidatorInterceptor,
)
class Greeter(helloworld_pb2_grpc.GreeterServicer):
def SayHello(self, request, context):
return helloworld_pb2.HelloReply(message="Hello, %s!" % request.name)
def serve():
header_validator = RequestHeaderValidatorInterceptor(
"one-time-password",
"42",
grpc.StatusCode.UNAUTHENTICATED,
"Access denied!",
)
server = grpc.server(
futures.ThreadPoolExecutor(max_workers=10),
interceptors=(header_validator,),
)
helloworld_pb2_grpc.add_GreeterServicer_to_server(Greeter(), server)
server.add_insecure_port("[::]:50051")
server.start()
server.wait_for_termination()
if __name__ == "__main__":
logging.basicConfig()
serve()
| 1,574
| 29.288462
| 77
|
py
|
grpc
|
grpc-master/examples/python/interceptors/headers/request_header_validator_interceptor.py
|
# Copyright 2017 gRPC authors.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Interceptor that ensures a specific header is present."""
import grpc
def _unary_unary_rpc_terminator(code, details):
def terminate(ignored_request, context):
context.abort(code, details)
return grpc.unary_unary_rpc_method_handler(terminate)
class RequestHeaderValidatorInterceptor(grpc.ServerInterceptor):
def __init__(self, header, value, code, details):
self._header = header
self._value = value
self._terminator = _unary_unary_rpc_terminator(code, details)
def intercept_service(self, continuation, handler_call_details):
if (
self._header,
self._value,
) in handler_call_details.invocation_metadata:
return continuation(handler_call_details)
else:
return self._terminator
| 1,384
| 33.625
| 74
|
py
|
grpc
|
grpc-master/examples/python/interceptors/headers/generic_client_interceptor.py
|
# Copyright 2017 gRPC authors.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Base class for interceptors that operate on all RPC types."""
import grpc
class _GenericClientInterceptor(
grpc.UnaryUnaryClientInterceptor,
grpc.UnaryStreamClientInterceptor,
grpc.StreamUnaryClientInterceptor,
grpc.StreamStreamClientInterceptor,
):
def __init__(self, interceptor_function):
self._fn = interceptor_function
def intercept_unary_unary(self, continuation, client_call_details, request):
new_details, new_request_iterator, postprocess = self._fn(
client_call_details, iter((request,)), False, False
)
response = continuation(new_details, next(new_request_iterator))
return postprocess(response) if postprocess else response
def intercept_unary_stream(
self, continuation, client_call_details, request
):
new_details, new_request_iterator, postprocess = self._fn(
client_call_details, iter((request,)), False, True
)
response_it = continuation(new_details, next(new_request_iterator))
return postprocess(response_it) if postprocess else response_it
def intercept_stream_unary(
self, continuation, client_call_details, request_iterator
):
new_details, new_request_iterator, postprocess = self._fn(
client_call_details, request_iterator, True, False
)
response = continuation(new_details, new_request_iterator)
return postprocess(response) if postprocess else response
def intercept_stream_stream(
self, continuation, client_call_details, request_iterator
):
new_details, new_request_iterator, postprocess = self._fn(
client_call_details, request_iterator, True, True
)
response_it = continuation(new_details, new_request_iterator)
return postprocess(response_it) if postprocess else response_it
def create(intercept_call):
return _GenericClientInterceptor(intercept_call)
| 2,532
| 37.969231
| 80
|
py
|
grpc
|
grpc-master/examples/python/interceptors/headers/helloworld_pb2.py
|
# -*- coding: utf-8 -*-
# Generated by the protocol buffer compiler. DO NOT EDIT!
# source: helloworld.proto
"""Generated protocol buffer code."""
from google.protobuf.internal import builder as _builder
from google.protobuf import descriptor as _descriptor
from google.protobuf import descriptor_pool as _descriptor_pool
from google.protobuf import symbol_database as _symbol_database
# @@protoc_insertion_point(imports)
_sym_db = _symbol_database.Default()
DESCRIPTOR = _descriptor_pool.Default().AddSerializedFile(b'\n\x10helloworld.proto\x12\nhelloworld\"\x1c\n\x0cHelloRequest\x12\x0c\n\x04name\x18\x01 \x01(\t\"\x1d\n\nHelloReply\x12\x0f\n\x07message\x18\x01 \x01(\t2I\n\x07Greeter\x12>\n\x08SayHello\x12\x18.helloworld.HelloRequest\x1a\x16.helloworld.HelloReply\"\x00\x42\x36\n\x1bio.grpc.examples.helloworldB\x0fHelloWorldProtoP\x01\xa2\x02\x03HLWb\x06proto3')
_builder.BuildMessageAndEnumDescriptors(DESCRIPTOR, globals())
_builder.BuildTopDescriptorsAndMessages(DESCRIPTOR, 'helloworld_pb2', globals())
if _descriptor._USE_C_DESCRIPTORS == False:
DESCRIPTOR._options = None
DESCRIPTOR._serialized_options = b'\n\033io.grpc.examples.helloworldB\017HelloWorldProtoP\001\242\002\003HLW'
_HELLOREQUEST._serialized_start=32
_HELLOREQUEST._serialized_end=60
_HELLOREPLY._serialized_start=62
_HELLOREPLY._serialized_end=91
_GREETER._serialized_start=93
_GREETER._serialized_end=166
# @@protoc_insertion_point(module_scope)
| 1,450
| 45.806452
| 409
|
py
|
grpc
|
grpc-master/examples/python/interceptors/headers/greeter_client.py
|
# Copyright 2017 gRPC authors.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""The Python implementation of the GRPC helloworld.Greeter client."""
from __future__ import print_function
import logging
import grpc
import header_manipulator_client_interceptor
import helloworld_pb2
import helloworld_pb2_grpc
def run():
header_adder_interceptor = (
header_manipulator_client_interceptor.header_adder_interceptor(
"one-time-password", "42"
)
)
# NOTE(gRPC Python Team): .close() is possible on a channel and should be
# used in circumstances in which the with statement does not fit the needs
# of the code.
with grpc.insecure_channel("localhost:50051") as channel:
intercept_channel = grpc.intercept_channel(
channel, header_adder_interceptor
)
stub = helloworld_pb2_grpc.GreeterStub(intercept_channel)
response = stub.SayHello(helloworld_pb2.HelloRequest(name="you"))
print("Greeter client received: " + response.message)
if __name__ == "__main__":
logging.basicConfig()
run()
| 1,591
| 32.87234
| 78
|
py
|
grpc
|
grpc-master/examples/python/metadata/helloworld_pb2_grpc.py
|
# Generated by the gRPC Python protocol compiler plugin. DO NOT EDIT!
"""Client and server classes corresponding to protobuf-defined services."""
import grpc
import helloworld_pb2 as helloworld__pb2
class GreeterStub(object):
"""The greeting service definition.
"""
def __init__(self, channel):
"""Constructor.
Args:
channel: A grpc.Channel.
"""
self.SayHello = channel.unary_unary(
'/helloworld.Greeter/SayHello',
request_serializer=helloworld__pb2.HelloRequest.SerializeToString,
response_deserializer=helloworld__pb2.HelloReply.FromString,
)
class GreeterServicer(object):
"""The greeting service definition.
"""
def SayHello(self, request, context):
"""Sends a greeting
"""
context.set_code(grpc.StatusCode.UNIMPLEMENTED)
context.set_details('Method not implemented!')
raise NotImplementedError('Method not implemented!')
def add_GreeterServicer_to_server(servicer, server):
rpc_method_handlers = {
'SayHello': grpc.unary_unary_rpc_method_handler(
servicer.SayHello,
request_deserializer=helloworld__pb2.HelloRequest.FromString,
response_serializer=helloworld__pb2.HelloReply.SerializeToString,
),
}
generic_handler = grpc.method_handlers_generic_handler(
'helloworld.Greeter', rpc_method_handlers)
server.add_generic_rpc_handlers((generic_handler,))
# This class is part of an EXPERIMENTAL API.
class Greeter(object):
"""The greeting service definition.
"""
@staticmethod
def SayHello(request,
target,
options=(),
channel_credentials=None,
call_credentials=None,
insecure=False,
compression=None,
wait_for_ready=None,
timeout=None,
metadata=None):
return grpc.experimental.unary_unary(request, target, '/helloworld.Greeter/SayHello',
helloworld__pb2.HelloRequest.SerializeToString,
helloworld__pb2.HelloReply.FromString,
options, channel_credentials,
insecure, call_credentials, compression, wait_for_ready, timeout, metadata)
| 2,310
| 31.549296
| 93
|
py
|
grpc
|
grpc-master/examples/python/metadata/metadata_server.py
|
# Copyright 2018 The gRPC Authors
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Example gRPC server that gets/sets metadata (HTTP2 headers)"""
from __future__ import print_function
from concurrent import futures
import logging
import grpc
import helloworld_pb2
import helloworld_pb2_grpc
class Greeter(helloworld_pb2_grpc.GreeterServicer):
def SayHello(self, request, context):
for key, value in context.invocation_metadata():
print("Received initial metadata: key=%s value=%s" % (key, value))
context.set_trailing_metadata(
(
("checksum-bin", b"I agree"),
("retry", "false"),
)
)
return helloworld_pb2.HelloReply(message="Hello, %s!" % request.name)
def serve():
server = grpc.server(futures.ThreadPoolExecutor(max_workers=10))
helloworld_pb2_grpc.add_GreeterServicer_to_server(Greeter(), server)
server.add_insecure_port("[::]:50051")
server.start()
server.wait_for_termination()
if __name__ == "__main__":
logging.basicConfig()
serve()
| 1,584
| 30.078431
| 78
|
py
|
grpc
|
grpc-master/examples/python/metadata/helloworld_pb2.py
|
# -*- coding: utf-8 -*-
# Generated by the protocol buffer compiler. DO NOT EDIT!
# source: helloworld.proto
"""Generated protocol buffer code."""
from google.protobuf.internal import builder as _builder
from google.protobuf import descriptor as _descriptor
from google.protobuf import descriptor_pool as _descriptor_pool
from google.protobuf import symbol_database as _symbol_database
# @@protoc_insertion_point(imports)
_sym_db = _symbol_database.Default()
DESCRIPTOR = _descriptor_pool.Default().AddSerializedFile(b'\n\x10helloworld.proto\x12\nhelloworld\"\x1c\n\x0cHelloRequest\x12\x0c\n\x04name\x18\x01 \x01(\t\"\x1d\n\nHelloReply\x12\x0f\n\x07message\x18\x01 \x01(\t2I\n\x07Greeter\x12>\n\x08SayHello\x12\x18.helloworld.HelloRequest\x1a\x16.helloworld.HelloReply\"\x00\x42\x36\n\x1bio.grpc.examples.helloworldB\x0fHelloWorldProtoP\x01\xa2\x02\x03HLWb\x06proto3')
_builder.BuildMessageAndEnumDescriptors(DESCRIPTOR, globals())
_builder.BuildTopDescriptorsAndMessages(DESCRIPTOR, 'helloworld_pb2', globals())
if _descriptor._USE_C_DESCRIPTORS == False:
DESCRIPTOR._options = None
DESCRIPTOR._serialized_options = b'\n\033io.grpc.examples.helloworldB\017HelloWorldProtoP\001\242\002\003HLW'
_HELLOREQUEST._serialized_start=32
_HELLOREQUEST._serialized_end=60
_HELLOREPLY._serialized_start=62
_HELLOREPLY._serialized_end=91
_GREETER._serialized_start=93
_GREETER._serialized_end=166
# @@protoc_insertion_point(module_scope)
| 1,450
| 45.806452
| 409
|
py
|
grpc
|
grpc-master/examples/python/metadata/metadata_client.py
|
# Copyright 2018 The gRPC Authors
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Example gRPC client that gets/sets metadata (HTTP2 headers)"""
from __future__ import print_function
import logging
import grpc
import helloworld_pb2
import helloworld_pb2_grpc
def run():
# NOTE(gRPC Python Team): .close() is possible on a channel and should be
# used in circumstances in which the with statement does not fit the needs
# of the code.
with grpc.insecure_channel("localhost:50051") as channel:
stub = helloworld_pb2_grpc.GreeterStub(channel)
response, call = stub.SayHello.with_call(
helloworld_pb2.HelloRequest(name="you"),
metadata=(
("initial-metadata-1", "The value should be str"),
(
"binary-metadata-bin",
b"With -bin surffix, the value can be bytes",
),
("accesstoken", "gRPC Python is great"),
),
)
print("Greeter client received: " + response.message)
for key, value in call.trailing_metadata():
print(
"Greeter client received trailing metadata: key=%s value=%s"
% (key, value)
)
if __name__ == "__main__":
logging.basicConfig()
run()
| 1,788
| 32.12963
| 78
|
py
|
grpc
|
grpc-master/third_party/upb/benchmarks/gen_protobuf_binary_cc.py
|
#!/usr/bin/python3
#
# Copyright (c) 2009-2021, Google LLC
# All rights reserved.
#
# Redistribution and use in source and binary forms, with or without
# modification, are permitted provided that the following conditions are met:
# * Redistributions of source code must retain the above copyright
# notice, this list of conditions and the following disclaimer.
# * Redistributions in binary form must reproduce the above copyright
# notice, this list of conditions and the following disclaimer in the
# documentation and/or other materials provided with the distribution.
# * Neither the name of Google LLC nor the
# names of its contributors may be used to endorse or promote products
# derived from this software without specific prior written permission.
#
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND
# ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED
# WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE
# DISCLAIMED. IN NO EVENT SHALL Google LLC BE LIABLE FOR ANY
# DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES
# (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES;
# LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND
# ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
# (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS
# SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
import sys
import re
include = sys.argv[1]
msg_basename = sys.argv[2]
count = 1
m = re.search(r'(.*\D)(\d+)$', sys.argv[2])
if m:
msg_basename = m.group(1)
count = int(m.group(2))
print('''
#include "{include}"
char buf[1];
int main() {{
'''.format(include=include))
def RefMessage(name):
print('''
{{
{name} proto;
proto.ParseFromArray(buf, 0);
proto.SerializePartialToArray(&buf[0], 0);
}}
'''.format(name=name))
RefMessage(msg_basename)
for i in range(2, count + 1):
RefMessage(msg_basename + str(i))
print('''
return 0;
}''')
| 2,121
| 31.646154
| 81
|
py
|
grpc
|
grpc-master/third_party/upb/benchmarks/gen_upb_binary_c.py
|
#!/usr/bin/python3
#
# Copyright (c) 2009-2021, Google LLC
# All rights reserved.
#
# Redistribution and use in source and binary forms, with or without
# modification, are permitted provided that the following conditions are met:
# * Redistributions of source code must retain the above copyright
# notice, this list of conditions and the following disclaimer.
# * Redistributions in binary form must reproduce the above copyright
# notice, this list of conditions and the following disclaimer in the
# documentation and/or other materials provided with the distribution.
# * Neither the name of Google LLC nor the
# names of its contributors may be used to endorse or promote products
# derived from this software without specific prior written permission.
#
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND
# ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED
# WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE
# DISCLAIMED. IN NO EVENT SHALL Google LLC BE LIABLE FOR ANY
# DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES
# (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES;
# LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND
# ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
# (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS
# SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
import sys
import re
include = sys.argv[1]
msg_basename = sys.argv[2]
count = 1
m = re.search(r'(.*\D)(\d+)$', sys.argv[2])
if m:
msg_basename = m.group(1)
count = int(m.group(2))
print('''
#include "{include}"
char buf[1];
int main() {{
upb_Arena *arena = upb_Arena_New();
size_t size;
'''.format(include=include))
def RefMessage(name):
print('''
{{
{name} *proto = {name}_parse(buf, 1, arena);
{name}_serialize(proto, arena, &size);
}}
'''.format(name=name))
RefMessage(msg_basename)
for i in range(2, count + 1):
RefMessage(msg_basename + str(i))
print('''
return 0;
}''')
| 2,167
| 31.848485
| 81
|
py
|
grpc
|
grpc-master/third_party/upb/benchmarks/gen_synthetic_protos.py
|
#!/usr/bin/python3
#
# Copyright (c) 2009-2021, Google LLC
# All rights reserved.
#
# Redistribution and use in source and binary forms, with or without
# modification, are permitted provided that the following conditions are met:
# * Redistributions of source code must retain the above copyright
# notice, this list of conditions and the following disclaimer.
# * Redistributions in binary form must reproduce the above copyright
# notice, this list of conditions and the following disclaimer in the
# documentation and/or other materials provided with the distribution.
# * Neither the name of Google LLC nor the
# names of its contributors may be used to endorse or promote products
# derived from this software without specific prior written permission.
#
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND
# ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED
# WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE
# DISCLAIMED. IN NO EVENT SHALL Google LLC BE LIABLE FOR ANY
# DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES
# (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES;
# LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND
# ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
# (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS
# SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
import sys
import random
base = sys.argv[1]
field_freqs = [
(('bool', 'optional'), 8.321),
(('bool', 'repeated'), 0.033),
(('bytes', 'optional'), 0.809),
(('bytes', 'repeated'), 0.065),
(('double', 'optional'), 2.845),
(('double', 'repeated'), 0.143),
(('fixed32', 'optional'), 0.084),
(('fixed32', 'repeated'), 0.012),
(('fixed64', 'optional'), 0.204),
(('fixed64', 'repeated'), 0.027),
(('float', 'optional'), 2.355),
(('float', 'repeated'), 0.132),
(('int32', 'optional'), 6.717),
(('int32', 'repeated'), 0.366),
(('int64', 'optional'), 9.678),
(('int64', 'repeated'), 0.425),
(('sfixed32', 'optional'), 0.018),
(('sfixed32', 'repeated'), 0.005),
(('sfixed64', 'optional'), 0.022),
(('sfixed64', 'repeated'), 0.005),
(('sint32', 'optional'), 0.026),
(('sint32', 'repeated'), 0.009),
(('sint64', 'optional'), 0.018),
(('sint64', 'repeated'), 0.006),
(('string', 'optional'), 25.461),
(('string', 'repeated'), 2.606),
(('Enum', 'optional'), 6.16),
(('Enum', 'repeated'), 0.576),
(('Message', 'optional'), 22.472),
(('Message', 'repeated'), 7.766),
(('uint32', 'optional'), 1.289),
(('uint32', 'repeated'), 0.051),
(('uint64', 'optional'), 1.044),
(('uint64', 'repeated'), 0.079),
]
population = [item[0] for item in field_freqs]
weights = [item[1] for item in field_freqs]
def choices(k):
if sys.version_info >= (3, 6):
return random.choices(population=population, weights=weights, k=k)
else:
print("WARNING: old Python version, field types are not properly weighted!")
return [random.choice(population) for _ in range(k)]
with open(base + "/100_msgs.proto", "w") as f:
f.write('syntax = "proto3";\n')
f.write('package upb_benchmark;\n')
f.write('message Message {}\n')
for i in range(2, 101):
f.write('message Message{i} {{}}\n'.format(i=i))
with open(base + "/200_msgs.proto", "w") as f:
f.write('syntax = "proto3";\n')
f.write('package upb_benchmark;\n')
f.write('message Message {}\n')
for i in range(2, 501):
f.write('message Message{i} {{}}\n'.format(i=i))
with open(base + "/100_fields.proto", "w") as f:
f.write('syntax = "proto2";\n')
f.write('package upb_benchmark;\n')
f.write('enum Enum { ZERO = 0; }\n')
f.write('message Message {\n')
i = 1
random.seed(a=0, version=2)
for field in choices(100):
field_type, label = field
f.write(' {label} {field_type} field{i} = {i};\n'.format(i=i, label=label, field_type=field_type))
i += 1
f.write('}\n')
with open(base + "/200_fields.proto", "w") as f:
f.write('syntax = "proto2";\n')
f.write('package upb_benchmark;\n')
f.write('enum Enum { ZERO = 0; }\n')
f.write('message Message {\n')
i = 1
random.seed(a=0, version=2)
for field in choices(200):
field_type, label = field
f.write(' {label} {field_type} field{i} = {i};\n'.format(i=i, label=label,field_type=field_type))
i += 1
f.write('}\n')
| 4,545
| 37.201681
| 103
|
py
|
grpc
|
grpc-master/third_party/upb/benchmarks/compare.py
|
#!/usr/bin/python3
#
# Copyright (c) 2009-2021, Google LLC
# All rights reserved.
#
# Redistribution and use in source and binary forms, with or without
# modification, are permitted provided that the following conditions are met:
# * Redistributions of source code must retain the above copyright
# notice, this list of conditions and the following disclaimer.
# * Redistributions in binary form must reproduce the above copyright
# notice, this list of conditions and the following disclaimer in the
# documentation and/or other materials provided with the distribution.
# * Neither the name of Google LLC nor the
# names of its contributors may be used to endorse or promote products
# derived from this software without specific prior written permission.
#
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND
# ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED
# WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE
# DISCLAIMED. IN NO EVENT SHALL Google LLC BE LIABLE FOR ANY
# DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES
# (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES;
# LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND
# ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
# (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS
# SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
"""Benchmarks the current working directory against a given baseline.
This script benchmarks both size and speed. Sample output:
"""
import contextlib
import json
import os
import re
import subprocess
import sys
import tempfile
@contextlib.contextmanager
def GitWorktree(commit):
tmpdir = tempfile.mkdtemp()
subprocess.run(['git', 'worktree', 'add', '-q', '-d', tmpdir, commit], check=True)
cwd = os.getcwd()
os.chdir(tmpdir)
try:
yield tmpdir
finally:
os.chdir(cwd)
subprocess.run(['git', 'worktree', 'remove', tmpdir], check=True)
def Run(cmd):
subprocess.check_call(cmd, shell=True)
def Benchmark(outbase, bench_cpu=True, runs=12, fasttable=False):
tmpfile = "/tmp/bench-output.json"
Run("rm -rf {}".format(tmpfile))
#Run("CC=clang bazel test ...")
if fasttable:
extra_args = " --//:fasttable_enabled=true"
else:
extra_args = ""
if bench_cpu:
Run("CC=clang bazel build -c opt --copt=-march=native benchmarks:benchmark" + extra_args)
Run("./bazel-bin/benchmarks/benchmark --benchmark_out_format=json --benchmark_out={} --benchmark_repetitions={} --benchmark_min_time=0.05 --benchmark_enable_random_interleaving=true".format(tmpfile, runs))
with open(tmpfile) as f:
bench_json = json.load(f)
# Translate into the format expected by benchstat.
txt_filename = outbase + ".txt"
with open(txt_filename, "w") as f:
for run in bench_json["benchmarks"]:
if run["run_type"] == "aggregate":
continue
name = run["name"]
name = name.replace(" ", "")
name = re.sub(r'^BM_', 'Benchmark', name)
values = (name, run["iterations"], run["cpu_time"])
print("{} {} {} ns/op".format(*values), file=f)
Run("sort {} -o {} ".format(txt_filename, txt_filename))
Run("CC=clang bazel build -c opt --copt=-g --copt=-march=native :conformance_upb"
+ extra_args)
Run("cp -f bazel-bin/conformance_upb {}.bin".format(outbase))
baseline = "main"
bench_cpu = True
fasttable = False
if len(sys.argv) > 1:
baseline = sys.argv[1]
# Quickly verify that the baseline exists.
with GitWorktree(baseline):
pass
# Benchmark our current directory first, since it's more likely to be broken.
Benchmark("/tmp/new", bench_cpu, fasttable=fasttable)
# Benchmark the baseline.
with GitWorktree(baseline):
Benchmark("/tmp/old", bench_cpu, fasttable=fasttable)
print()
print()
if bench_cpu:
Run("~/go/bin/benchstat /tmp/old.txt /tmp/new.txt")
print()
print()
Run("objcopy --strip-debug /tmp/old.bin /tmp/old.bin.stripped")
Run("objcopy --strip-debug /tmp/new.bin /tmp/new.bin.stripped")
Run("~/code/bloaty/bloaty /tmp/new.bin.stripped -- /tmp/old.bin.stripped --debug-file=/tmp/old.bin --debug-file=/tmp/new.bin -d compileunits,symbols")
| 4,328
| 35.378151
| 209
|
py
|
grpc
|
grpc-master/third_party/upb/python/minimal_test.py
|
# Copyright (c) 2009-2021, Google LLC
# All rights reserved.
#
# Redistribution and use in source and binary forms, with or without
# modification, are permitted provided that the following conditions are met:
# * Redistributions of source code must retain the above copyright
# notice, this list of conditions and the following disclaimer.
# * Redistributions in binary form must reproduce the above copyright
# notice, this list of conditions and the following disclaimer in the
# documentation and/or other materials provided with the distribution.
# * Neither the name of Google LLC nor the
# names of its contributors may be used to endorse or promote products
# derived from this software without specific prior written permission.
#
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND
# ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED
# WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE
# DISCLAIMED. IN NO EVENT SHALL Google LLC BE LIABLE FOR ANY
# DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES
# (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES;
# LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND
# ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
# (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS
# SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
"""A bare-bones unit test that doesn't load any generated code."""
import unittest
from google.protobuf.pyext import _message
from google3.net.proto2.python.internal import api_implementation
from google.protobuf import unittest_pb2
from google.protobuf import map_unittest_pb2
from google.protobuf import descriptor_pool
from google.protobuf import text_format
from google.protobuf import message_factory
from google.protobuf import message
from google3.net.proto2.python.internal import factory_test1_pb2
from google3.net.proto2.python.internal import factory_test2_pb2
from google3.net.proto2.python.internal import more_extensions_pb2
from google.protobuf import descriptor_pb2
class TestMessageExtension(unittest.TestCase):
def test_descriptor_pool(self):
serialized_desc = b'\n\ntest.proto\"\x0e\n\x02M1*\x08\x08\x01\x10\x80\x80\x80\x80\x02:\x15\n\x08test_ext\x12\x03.M1\x18\x01 \x01(\x05'
pool = _message.DescriptorPool()
file_desc = pool.AddSerializedFile(serialized_desc)
self.assertEqual("test.proto", file_desc.name)
ext_desc = pool.FindExtensionByName("test_ext")
self.assertEqual(1, ext_desc.number)
# Test object cache: repeatedly retrieving the same descriptor
# should result in the same object
self.assertIs(ext_desc, pool.FindExtensionByName("test_ext"))
def test_lib_is_upb(self):
# Ensure we are not pulling in a different protobuf library on the
# system.
print(_message._IS_UPB)
self.assertTrue(_message._IS_UPB)
self.assertEqual(api_implementation.Type(), "cpp")
def test_repeated_field_slice_delete(self):
def test_slice(start, end, step):
vals = list(range(20))
message = unittest_pb2.TestAllTypes(repeated_int32=vals)
del vals[start:end:step]
del message.repeated_int32[start:end:step]
self.assertEqual(vals, list(message.repeated_int32))
test_slice(3, 11, 1)
test_slice(3, 11, 2)
test_slice(3, 11, 3)
test_slice(11, 3, -1)
test_slice(11, 3, -2)
test_slice(11, 3, -3)
test_slice(10, 25, 4)
def testExtensionsErrors(self):
msg = unittest_pb2.TestAllTypes()
self.assertRaises(AttributeError, getattr, msg, 'Extensions')
def testClearStubMapField(self):
msg = map_unittest_pb2.TestMapSubmessage()
int32_map = msg.test_map.map_int32_int32
msg.test_map.ClearField("map_int32_int32")
int32_map[123] = 456
self.assertEqual(0, msg.test_map.ByteSize())
def testClearReifiedMapField(self):
msg = map_unittest_pb2.TestMap()
int32_map = msg.map_int32_int32
int32_map[123] = 456
msg.ClearField("map_int32_int32")
int32_map[111] = 222
self.assertEqual(0, msg.ByteSize())
def testClearStubRepeatedField(self):
msg = unittest_pb2.NestedTestAllTypes()
int32_array = msg.payload.repeated_int32
msg.payload.ClearField("repeated_int32")
int32_array.append(123)
self.assertEqual(0, msg.payload.ByteSize())
def testClearReifiedRepeatdField(self):
msg = unittest_pb2.TestAllTypes()
int32_array = msg.repeated_int32
int32_array.append(123)
self.assertNotEqual(0, msg.ByteSize())
msg.ClearField("repeated_int32")
int32_array.append(123)
self.assertEqual(0, msg.ByteSize())
def testFloatPrinting(self):
message = unittest_pb2.TestAllTypes()
message.optional_float = -0.0
self.assertEqual(str(message), 'optional_float: -0\n')
class OversizeProtosTest(unittest.TestCase):
def setUp(self):
msg = unittest_pb2.NestedTestAllTypes()
m = msg
for i in range(101):
m = m.child
m.Clear()
self.p_serialized = msg.SerializeToString()
def testAssertOversizeProto(self):
from google.protobuf.pyext._message import SetAllowOversizeProtos
SetAllowOversizeProtos(False)
q = unittest_pb2.NestedTestAllTypes()
with self.assertRaises(message.DecodeError):
q.ParseFromString(self.p_serialized)
print(q)
def testSucceedOversizeProto(self):
from google.protobuf.pyext._message import SetAllowOversizeProtos
SetAllowOversizeProtos(True)
q = unittest_pb2.NestedTestAllTypes()
q.ParseFromString(self.p_serialized)
def testExtensionIter(self):
extendee_proto = more_extensions_pb2.ExtendedMessage()
extension_int32 = more_extensions_pb2.optional_int_extension
extendee_proto.Extensions[extension_int32] = 23
extension_repeated = more_extensions_pb2.repeated_int_extension
extendee_proto.Extensions[extension_repeated].append(11)
extension_msg = more_extensions_pb2.optional_message_extension
extendee_proto.Extensions[extension_msg].foreign_message_int = 56
# Set some normal fields.
extendee_proto.optional_int32 = 1
extendee_proto.repeated_string.append('hi')
expected = {
extension_int32: True,
extension_msg: True,
extension_repeated: True
}
count = 0
for item in extendee_proto.Extensions:
del expected[item]
self.assertIn(item, extendee_proto.Extensions)
count += 1
self.assertEqual(count, 3)
self.assertEqual(len(expected), 0)
def testIsInitializedStub(self):
proto = unittest_pb2.TestRequiredForeign()
self.assertTrue(proto.IsInitialized())
self.assertFalse(proto.optional_message.IsInitialized())
errors = []
self.assertFalse(proto.optional_message.IsInitialized(errors))
self.assertEqual(['a', 'b', 'c'], errors)
self.assertRaises(message.EncodeError, proto.optional_message.SerializeToString)
if __name__ == '__main__':
unittest.main(verbosity=2)
| 7,321
| 38.793478
| 142
|
py
|
grpc
|
grpc-master/third_party/upb/python/pb_unit_tests/wire_format_test_wrapper.py
|
# Copyright (c) 2009-2021, Google LLC
# All rights reserved.
#
# Redistribution and use in source and binary forms, with or without
# modification, are permitted provided that the following conditions are met:
# * Redistributions of source code must retain the above copyright
# notice, this list of conditions and the following disclaimer.
# * Redistributions in binary form must reproduce the above copyright
# notice, this list of conditions and the following disclaimer in the
# documentation and/or other materials provided with the distribution.
# * Neither the name of Google LLC nor the
# names of its contributors may be used to endorse or promote products
# derived from this software without specific prior written permission.
#
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND
# ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED
# WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE
# DISCLAIMED. IN NO EVENT SHALL Google LLC BE LIABLE FOR ANY
# DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES
# (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES;
# LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND
# ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
# (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS
# SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
from google.protobuf.internal.wire_format_test import *
import unittest
if __name__ == '__main__':
unittest.main(verbosity=2)
| 1,662
| 52.645161
| 81
|
py
|
grpc
|
grpc-master/third_party/upb/python/pb_unit_tests/descriptor_test_wrapper.py
|
# Copyright (c) 2009-2021, Google LLC
# All rights reserved.
#
# Redistribution and use in source and binary forms, with or without
# modification, are permitted provided that the following conditions are met:
# * Redistributions of source code must retain the above copyright
# notice, this list of conditions and the following disclaimer.
# * Redistributions in binary form must reproduce the above copyright
# notice, this list of conditions and the following disclaimer in the
# documentation and/or other materials provided with the distribution.
# * Neither the name of Google LLC nor the
# names of its contributors may be used to endorse or promote products
# derived from this software without specific prior written permission.
#
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND
# ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED
# WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE
# DISCLAIMED. IN NO EVENT SHALL Google LLC BE LIABLE FOR ANY
# DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES
# (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES;
# LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND
# ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
# (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS
# SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
from google.protobuf.internal.descriptor_test import *
import unittest
# These fail because they attempt to add fields with conflicting JSON names.
# We don't want to support this going forward.
MakeDescriptorTest.testCamelcaseName.__unittest_expecting_failure__ = True
MakeDescriptorTest.testJsonName.__unittest_expecting_failure__ = True
# We pass this test, but the error message is slightly different.
# Our error message is better.
NewDescriptorTest.testImmutableCppDescriptor.__unittest_expecting_failure__ = True
DescriptorTest.testGetDebugString.__unittest_expecting_failure__ = True
if __name__ == '__main__':
unittest.main(verbosity=2)
| 2,185
| 51.047619
| 82
|
py
|
grpc
|
grpc-master/third_party/upb/python/pb_unit_tests/message_factory_test_wrapper.py
|
# Copyright (c) 2009-2021, Google LLC
# All rights reserved.
#
# Redistribution and use in source and binary forms, with or without
# modification, are permitted provided that the following conditions are met:
# * Redistributions of source code must retain the above copyright
# notice, this list of conditions and the following disclaimer.
# * Redistributions in binary form must reproduce the above copyright
# notice, this list of conditions and the following disclaimer in the
# documentation and/or other materials provided with the distribution.
# * Neither the name of Google LLC nor the
# names of its contributors may be used to endorse or promote products
# derived from this software without specific prior written permission.
#
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND
# ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED
# WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE
# DISCLAIMED. IN NO EVENT SHALL Google LLC BE LIABLE FOR ANY
# DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES
# (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES;
# LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND
# ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
# (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS
# SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
from google.protobuf.internal.message_factory_test import *
import unittest
MessageFactoryTest.testDuplicateExtensionNumber.__unittest_expecting_failure__ = True
MessageFactoryTest.testGetMessages.__unittest_expecting_failure__ = True
MessageFactoryTest.testGetPrototype.__unittest_expecting_failure__ = True
if hasattr(MessageFactoryTest, 'testExtensionValueInDifferentFile'):
MessageFactoryTest.testExtensionValueInDifferentFile.__unittest_expecting_failure__ = True
if __name__ == '__main__':
unittest.main(verbosity=2)
| 2,062
| 54.756757
| 92
|
py
|
grpc
|
grpc-master/third_party/upb/python/pb_unit_tests/symbol_database_test_wrapper.py
|
# Copyright (c) 2009-2021, Google LLC
# All rights reserved.
#
# Redistribution and use in source and binary forms, with or without
# modification, are permitted provided that the following conditions are met:
# * Redistributions of source code must retain the above copyright
# notice, this list of conditions and the following disclaimer.
# * Redistributions in binary form must reproduce the above copyright
# notice, this list of conditions and the following disclaimer in the
# documentation and/or other materials provided with the distribution.
# * Neither the name of Google LLC nor the
# names of its contributors may be used to endorse or promote products
# derived from this software without specific prior written permission.
#
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND
# ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED
# WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE
# DISCLAIMED. IN NO EVENT SHALL Google LLC BE LIABLE FOR ANY
# DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES
# (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES;
# LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND
# ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
# (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS
# SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
from google.protobuf.internal.symbol_database_test import *
import unittest
if __name__ == '__main__':
unittest.main(verbosity=2)
| 1,666
| 52.774194
| 81
|
py
|
grpc
|
grpc-master/third_party/upb/python/pb_unit_tests/descriptor_pool_test_wrapper.py
|
# Copyright (c) 2009-2021, Google LLC
# All rights reserved.
#
# Redistribution and use in source and binary forms, with or without
# modification, are permitted provided that the following conditions are met:
# * Redistributions of source code must retain the above copyright
# notice, this list of conditions and the following disclaimer.
# * Redistributions in binary form must reproduce the above copyright
# notice, this list of conditions and the following disclaimer in the
# documentation and/or other materials provided with the distribution.
# * Neither the name of Google LLC nor the
# names of its contributors may be used to endorse or promote products
# derived from this software without specific prior written permission.
#
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND
# ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED
# WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE
# DISCLAIMED. IN NO EVENT SHALL Google LLC BE LIABLE FOR ANY
# DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES
# (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES;
# LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND
# ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
# (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS
# SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
import unittest
from google.protobuf.internal.descriptor_pool_test import *
# This is testing that certain methods unconditionally throw TypeError.
# In the new extension we simply don't define them at all.
AddDescriptorTest.testAddTypeError.__unittest_expecting_failure__ = True
SecondaryDescriptorFromDescriptorDB.testErrorCollector.__unittest_expecting_failure__ = True
# begin:github_only
if __name__ == '__main__':
unittest.main(verbosity=2)
# end:github_only
# begin:google_only
# from absl import app
# if __name__ == '__main__':
# app.run(lambda argv: unittest.main(verbosity=2))
# end:google_only
| 2,147
| 46.733333
| 92
|
py
|
grpc
|
grpc-master/third_party/upb/python/pb_unit_tests/generator_test_wrapper.py
|
# Copyright (c) 2009-2021, Google LLC
# All rights reserved.
#
# Redistribution and use in source and binary forms, with or without
# modification, are permitted provided that the following conditions are met:
# * Redistributions of source code must retain the above copyright
# notice, this list of conditions and the following disclaimer.
# * Redistributions in binary form must reproduce the above copyright
# notice, this list of conditions and the following disclaimer in the
# documentation and/or other materials provided with the distribution.
# * Neither the name of Google LLC nor the
# names of its contributors may be used to endorse or promote products
# derived from this software without specific prior written permission.
#
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND
# ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED
# WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE
# DISCLAIMED. IN NO EVENT SHALL Google LLC BE LIABLE FOR ANY
# DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES
# (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES;
# LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND
# ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
# (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS
# SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
from google.protobuf.internal.generator_test import *
import unittest
if __name__ == '__main__':
unittest.main(verbosity=2)
| 1,660
| 52.580645
| 81
|
py
|
grpc
|
grpc-master/third_party/upb/python/pb_unit_tests/reflection_test_wrapper.py
|
# Copyright (c) 2009-2021, Google LLC
# All rights reserved.
#
# Redistribution and use in source and binary forms, with or without
# modification, are permitted provided that the following conditions are met:
# * Redistributions of source code must retain the above copyright
# notice, this list of conditions and the following disclaimer.
# * Redistributions in binary form must reproduce the above copyright
# notice, this list of conditions and the following disclaimer in the
# documentation and/or other materials provided with the distribution.
# * Neither the name of Google LLC nor the
# names of its contributors may be used to endorse or promote products
# derived from this software without specific prior written permission.
#
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND
# ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED
# WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE
# DISCLAIMED. IN NO EVENT SHALL Google LLC BE LIABLE FOR ANY
# DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES
# (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES;
# LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND
# ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
# (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS
# SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
from google.protobuf.internal.reflection_test import *
import unittest
# These tests depend on a specific iteration order for extensions, which is not
# reasonable to guarantee.
Proto2ReflectionTest.testExtensionIter.__unittest_expecting_failure__ = True
# These tests depend on a specific serialization order for extensions, which is
# not reasonable to guarantee.
SerializationTest.testCanonicalSerializationOrder.__unittest_expecting_failure__ = True
SerializationTest.testCanonicalSerializationOrderSameAsCpp.__unittest_expecting_failure__ = True
# This test relies on the internal implementation using Python descriptors.
# This is an implementation detail that users should not depend on.
SerializationTest.testFieldDataDescriptor.__unittest_expecting_failure__ = True
SerializationTest.testFieldProperties.__unittest_expecting_failure__ = True
# TODO(259423340) Python Docker image on MacOS failing.
ClassAPITest.testParsingNestedClass.__unittest_skip__ = True
if __name__ == '__main__':
unittest.main(verbosity=2)
| 2,563
| 51.326531
| 96
|
py
|
grpc
|
grpc-master/third_party/upb/python/pb_unit_tests/service_reflection_test_wrapper.py
|
# Copyright (c) 2009-2021, Google LLC
# All rights reserved.
#
# Redistribution and use in source and binary forms, with or without
# modification, are permitted provided that the following conditions are met:
# * Redistributions of source code must retain the above copyright
# notice, this list of conditions and the following disclaimer.
# * Redistributions in binary form must reproduce the above copyright
# notice, this list of conditions and the following disclaimer in the
# documentation and/or other materials provided with the distribution.
# * Neither the name of Google LLC nor the
# names of its contributors may be used to endorse or promote products
# derived from this software without specific prior written permission.
#
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND
# ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED
# WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE
# DISCLAIMED. IN NO EVENT SHALL Google LLC BE LIABLE FOR ANY
# DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES
# (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES;
# LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND
# ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
# (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS
# SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
from google.protobuf.internal.service_reflection_test import *
import unittest
if __name__ == '__main__':
unittest.main(verbosity=2)
| 1,669
| 52.870968
| 81
|
py
|
grpc
|
grpc-master/third_party/upb/python/pb_unit_tests/numpy_test_wrapper.py
|
# Copyright (c) 2009-2021, Google LLC
# All rights reserved.
#
# Redistribution and use in source and binary forms, with or without
# modification, are permitted provided that the following conditions are met:
# * Redistributions of source code must retain the above copyright
# notice, this list of conditions and the following disclaimer.
# * Redistributions in binary form must reproduce the above copyright
# notice, this list of conditions and the following disclaimer in the
# documentation and/or other materials provided with the distribution.
# * Neither the name of Google LLC nor the
# names of its contributors may be used to endorse or promote products
# derived from this software without specific prior written permission.
#
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND
# ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED
# WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE
# DISCLAIMED. IN NO EVENT SHALL Google LLC BE LIABLE FOR ANY
# DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES
# (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES;
# LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND
# ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
# (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS
# SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
import unittest
from google.protobuf.internal.numpy.numpy_test import *
if __name__ == '__main__':
unittest.main(verbosity=2)
| 1,663
| 51
| 81
|
py
|
grpc
|
grpc-master/third_party/upb/python/pb_unit_tests/keywords_test_wrapper.py
|
# Copyright (c) 2009-2021, Google LLC
# All rights reserved.
#
# Redistribution and use in source and binary forms, with or without
# modification, are permitted provided that the following conditions are met:
# * Redistributions of source code must retain the above copyright
# notice, this list of conditions and the following disclaimer.
# * Redistributions in binary form must reproduce the above copyright
# notice, this list of conditions and the following disclaimer in the
# documentation and/or other materials provided with the distribution.
# * Neither the name of Google LLC nor the
# names of its contributors may be used to endorse or promote products
# derived from this software without specific prior written permission.
#
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND
# ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED
# WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE
# DISCLAIMED. IN NO EVENT SHALL Google LLC BE LIABLE FOR ANY
# DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES
# (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES;
# LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND
# ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
# (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS
# SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
from google.protobuf.internal.keywords_test import *
import unittest
if __name__ == '__main__':
unittest.main(verbosity=2)
| 1,659
| 52.548387
| 81
|
py
|
grpc
|
grpc-master/third_party/upb/python/pb_unit_tests/text_format_test_wrapper.py
|
# Copyright (c) 2009-2021, Google LLC
# All rights reserved.
#
# Redistribution and use in source and binary forms, with or without
# modification, are permitted provided that the following conditions are met:
# * Redistributions of source code must retain the above copyright
# notice, this list of conditions and the following disclaimer.
# * Redistributions in binary form must reproduce the above copyright
# notice, this list of conditions and the following disclaimer in the
# documentation and/or other materials provided with the distribution.
# * Neither the name of Google LLC nor the
# names of its contributors may be used to endorse or promote products
# derived from this software without specific prior written permission.
#
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND
# ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED
# WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE
# DISCLAIMED. IN NO EVENT SHALL Google LLC BE LIABLE FOR ANY
# DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES
# (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES;
# LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND
# ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
# (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS
# SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
from google.protobuf.internal.text_format_test import *
import unittest
if __name__ == '__main__':
unittest.main(verbosity=2)
| 1,662
| 52.645161
| 81
|
py
|
grpc
|
grpc-master/third_party/upb/python/pb_unit_tests/unknown_fields_test_wrapper.py
|
# Copyright (c) 2009-2021, Google LLC
# All rights reserved.
#
# Redistribution and use in source and binary forms, with or without
# modification, are permitted provided that the following conditions are met:
# * Redistributions of source code must retain the above copyright
# notice, this list of conditions and the following disclaimer.
# * Redistributions in binary form must reproduce the above copyright
# notice, this list of conditions and the following disclaimer in the
# documentation and/or other materials provided with the distribution.
# * Neither the name of Google LLC nor the
# names of its contributors may be used to endorse or promote products
# derived from this software without specific prior written permission.
#
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND
# ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED
# WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE
# DISCLAIMED. IN NO EVENT SHALL Google LLC BE LIABLE FOR ANY
# DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES
# (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES;
# LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND
# ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
# (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS
# SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
from google.protobuf.internal.unknown_fields_test import *
import unittest
if __name__ == '__main__':
unittest.main(verbosity=2)
| 1,665
| 52.741935
| 81
|
py
|
grpc
|
grpc-master/third_party/upb/python/pb_unit_tests/message_test_wrapper.py
|
# Copyright (c) 2009-2021, Google LLC
# All rights reserved.
#
# Redistribution and use in source and binary forms, with or without
# modification, are permitted provided that the following conditions are met:
# * Redistributions of source code must retain the above copyright
# notice, this list of conditions and the following disclaimer.
# * Redistributions in binary form must reproduce the above copyright
# notice, this list of conditions and the following disclaimer in the
# documentation and/or other materials provided with the distribution.
# * Neither the name of Google LLC nor the
# names of its contributors may be used to endorse or promote products
# derived from this software without specific prior written permission.
#
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND
# ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED
# WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE
# DISCLAIMED. IN NO EVENT SHALL Google LLC BE LIABLE FOR ANY
# DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES
# (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES;
# LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND
# ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
# (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS
# SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
from google.protobuf.internal.message_test import *
import unittest
# We don't want to support extending repeated fields with nothing; this behavior
# is marked for deprecation in the existing library.
MessageTest.testExtendFloatWithNothing_proto2.__unittest_expecting_failure__ = True
MessageTest.testExtendFloatWithNothing_proto3.__unittest_expecting_failure__ = True
MessageTest.testExtendInt32WithNothing_proto2.__unittest_expecting_failure__ = True
MessageTest.testExtendInt32WithNothing_proto3.__unittest_expecting_failure__ = True
MessageTest.testExtendStringWithNothing_proto2.__unittest_expecting_failure__ = True
MessageTest.testExtendStringWithNothing_proto3.__unittest_expecting_failure__ = True
# Python/C++ customizes the C++ TextFormat to always print trailing ".0" for
# floats. upb doesn't do this, it matches C++ TextFormat.
MessageTest.testFloatPrinting_proto2.__unittest_expecting_failure__ = True
MessageTest.testFloatPrinting_proto3.__unittest_expecting_failure__ = True
# For these tests we are throwing the correct error, only the text of the error
# message is a mismatch. For technical reasons around the limited API, matching
# the existing error message exactly is not feasible.
Proto3Test.testCopyFromBadType.__unittest_expecting_failure__ = True
Proto3Test.testMergeFromBadType.__unittest_expecting_failure__ = True
Proto2Test.test_documentation.__unittest_expecting_failure__ = True
if __name__ == '__main__':
unittest.main(verbosity=2)
| 3,009
| 55.792453
| 84
|
py
|
grpc
|
grpc-master/third_party/upb/python/pb_unit_tests/descriptor_database_test_wrapper.py
|
# Copyright (c) 2009-2021, Google LLC
# All rights reserved.
#
# Redistribution and use in source and binary forms, with or without
# modification, are permitted provided that the following conditions are met:
# * Redistributions of source code must retain the above copyright
# notice, this list of conditions and the following disclaimer.
# * Redistributions in binary form must reproduce the above copyright
# notice, this list of conditions and the following disclaimer in the
# documentation and/or other materials provided with the distribution.
# * Neither the name of Google LLC nor the
# names of its contributors may be used to endorse or promote products
# derived from this software without specific prior written permission.
#
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND
# ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED
# WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE
# DISCLAIMED. IN NO EVENT SHALL Google LLC BE LIABLE FOR ANY
# DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES
# (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES;
# LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND
# ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
# (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS
# SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
from google.protobuf.internal.descriptor_database_test import *
import unittest
if __name__ == '__main__':
unittest.main(verbosity=2)
| 1,670
| 52.903226
| 81
|
py
|
grpc
|
grpc-master/third_party/upb/python/pb_unit_tests/text_encoding_test_wrapper.py
|
# Copyright (c) 2009-2021, Google LLC
# All rights reserved.
#
# Redistribution and use in source and binary forms, with or without
# modification, are permitted provided that the following conditions are met:
# * Redistributions of source code must retain the above copyright
# notice, this list of conditions and the following disclaimer.
# * Redistributions in binary form must reproduce the above copyright
# notice, this list of conditions and the following disclaimer in the
# documentation and/or other materials provided with the distribution.
# * Neither the name of Google LLC nor the
# names of its contributors may be used to endorse or promote products
# derived from this software without specific prior written permission.
#
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND
# ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED
# WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE
# DISCLAIMED. IN NO EVENT SHALL Google LLC BE LIABLE FOR ANY
# DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES
# (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES;
# LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND
# ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
# (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS
# SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
from google.protobuf.internal.text_encoding_test import *
import unittest
if __name__ == '__main__':
unittest.main(verbosity=2)
| 1,664
| 52.709677
| 81
|
py
|
grpc
|
grpc-master/third_party/upb/python/pb_unit_tests/json_format_test_wrapper.py
|
# Copyright (c) 2009-2021, Google LLC
# All rights reserved.
#
# Redistribution and use in source and binary forms, with or without
# modification, are permitted provided that the following conditions are met:
# * Redistributions of source code must retain the above copyright
# notice, this list of conditions and the following disclaimer.
# * Redistributions in binary form must reproduce the above copyright
# notice, this list of conditions and the following disclaimer in the
# documentation and/or other materials provided with the distribution.
# * Neither the name of Google LLC nor the
# names of its contributors may be used to endorse or promote products
# derived from this software without specific prior written permission.
#
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND
# ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED
# WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE
# DISCLAIMED. IN NO EVENT SHALL Google LLC BE LIABLE FOR ANY
# DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES
# (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES;
# LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND
# ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
# (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS
# SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
from google.protobuf.internal.json_format_test import *
import unittest
if __name__ == '__main__':
unittest.main(verbosity=2)
| 1,662
| 52.645161
| 81
|
py
|
grpc
|
grpc-master/third_party/upb/python/pb_unit_tests/proto_builder_test_wrapper.py
|
# Copyright (c) 2009-2021, Google LLC
# All rights reserved.
#
# Redistribution and use in source and binary forms, with or without
# modification, are permitted provided that the following conditions are met:
# * Redistributions of source code must retain the above copyright
# notice, this list of conditions and the following disclaimer.
# * Redistributions in binary form must reproduce the above copyright
# notice, this list of conditions and the following disclaimer in the
# documentation and/or other materials provided with the distribution.
# * Neither the name of Google LLC nor the
# names of its contributors may be used to endorse or promote products
# derived from this software without specific prior written permission.
#
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND
# ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED
# WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE
# DISCLAIMED. IN NO EVENT SHALL Google LLC BE LIABLE FOR ANY
# DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES
# (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES;
# LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND
# ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
# (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS
# SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
from google.protobuf.internal.proto_builder_test import *
import unittest
ProtoBuilderTest.testMakeLargeProtoClass.__unittest_expecting_failure__ = True
if __name__ == '__main__':
unittest.main(verbosity=2)
| 1,744
| 51.878788
| 81
|
py
|
grpc
|
grpc-master/third_party/upb/python/pb_unit_tests/well_known_types_test_wrapper.py
|
# Copyright (c) 2009-2021, Google LLC
# All rights reserved.
#
# Redistribution and use in source and binary forms, with or without
# modification, are permitted provided that the following conditions are met:
# * Redistributions of source code must retain the above copyright
# notice, this list of conditions and the following disclaimer.
# * Redistributions in binary form must reproduce the above copyright
# notice, this list of conditions and the following disclaimer in the
# documentation and/or other materials provided with the distribution.
# * Neither the name of Google LLC nor the
# names of its contributors may be used to endorse or promote products
# derived from this software without specific prior written permission.
#
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND
# ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED
# WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE
# DISCLAIMED. IN NO EVENT SHALL Google LLC BE LIABLE FOR ANY
# DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES
# (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES;
# LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND
# ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
# (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS
# SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
from google.protobuf.internal.well_known_types_test import *
import os
import unittest
if __name__ == '__main__':
unittest.main(verbosity=2)
| 1,677
| 51.4375
| 81
|
py
|
grpc
|
grpc-master/third_party/upb/bazel/amalgamate.py
|
#!/usr/bin/python
#
# Copyright (c) 2009-2021, Google LLC
# All rights reserved.
#
# Redistribution and use in source and binary forms, with or without
# modification, are permitted provided that the following conditions are met:
# * Redistributions of source code must retain the above copyright
# notice, this list of conditions and the following disclaimer.
# * Redistributions in binary form must reproduce the above copyright
# notice, this list of conditions and the following disclaimer in the
# documentation and/or other materials provided with the distribution.
# * Neither the name of Google LLC nor the
# names of its contributors may be used to endorse or promote products
# derived from this software without specific prior written permission.
#
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND
# ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED
# WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE
# DISCLAIMED. IN NO EVENT SHALL Google LLC BE LIABLE FOR ANY
# DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES
# (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES;
# LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND
# ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
# (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS
# SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
import sys
import re
import os
INCLUDE_RE = re.compile('^#include "([^"]*)"$')
def parse_include(line):
match = INCLUDE_RE.match(line)
return match.groups()[0] if match else None
class Amalgamator:
def __init__(self, h_out, c_out):
self.include_paths = ["."]
self.included = set()
self.output_h = open(h_out, "w")
self.output_c = open(c_out, "w")
self.h_out = h_out.split("/")[-1]
def amalgamate(self, h_files, c_files):
self.h_files = set(h_files)
self.output_c.write("/* Amalgamated source file */\n")
self.output_c.write('#include "%s"\n' % (self.h_out))
if self.h_out == "ruby-upb.h":
self.output_h.write("// Ruby is still using proto3 enum semantics for proto2\n")
self.output_h.write("#define UPB_DISABLE_PROTO2_ENUM_CHECKING\n")
self.output_h.write("/* Amalgamated source file */\n")
port_def = self._find_include_file("upb/port/def.inc")
port_undef = self._find_include_file("upb/port/undef.inc")
self._process_file(port_def, self.output_h)
self._process_file(port_def, self.output_c)
for file in c_files:
self._process_file(file, self.output_c)
self._process_file(port_undef, self.output_h)
self._process_file(port_undef, self.output_c)
def _process_file(self, infile_name, outfile):
lines = open(infile_name).readlines()
has_copyright = lines[1].startswith(" * Copyright")
if has_copyright:
while not lines[0].startswith(" */"):
lines.pop(0)
lines.pop(0)
for line in lines:
if not self._process_include(line):
outfile.write(line)
def _find_include_file(self, name):
for h_file in self.h_files:
if h_file.endswith(name):
return h_file
def _process_include(self, line):
include = parse_include(line)
if not include:
return False
if not (include.startswith("upb") or include.startswith("google")):
return False
if include and (include.endswith("port/def.inc") or include.endswith("port/undef.inc")):
# Skip, we handle this separately
return True
if include.endswith("hpp"):
# Skip, we don't support the amalgamation from C++.
return True
elif include in self.included:
return True
else:
# Include this upb header inline.
h_file = self._find_include_file(include)
if h_file:
self.h_files.remove(h_file)
self.included.add(include)
self._process_file(h_file, self.output_h)
return True
raise RuntimeError("Couldn't find include: " + include + ", h_files=" + repr(self.h_files))
# ---- main ----
c_out = sys.argv[1]
h_out = sys.argv[2]
amalgamator = Amalgamator(h_out, c_out)
c_files = []
h_files = []
for arg in sys.argv[3:]:
arg = arg.strip()
if arg.endswith(".h") or arg.endswith(".inc"):
h_files.append(arg)
else:
c_files.append(arg)
amalgamator.amalgamate(h_files, c_files)
| 4,460
| 34.688
| 97
|
py
|
grpc
|
grpc-master/third_party/upb/cmake/make_cmakelists.py
|
#!/usr/bin/python
#
# Copyright (c) 2009-2021, Google LLC
# All rights reserved.
#
# Redistribution and use in source and binary forms, with or without
# modification, are permitted provided that the following conditions are met:
# * Redistributions of source code must retain the above copyright
# notice, this list of conditions and the following disclaimer.
# * Redistributions in binary form must reproduce the above copyright
# notice, this list of conditions and the following disclaimer in the
# documentation and/or other materials provided with the distribution.
# * Neither the name of Google LLC nor the
# names of its contributors may be used to endorse or promote products
# derived from this software without specific prior written permission.
#
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND
# ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED
# WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE
# DISCLAIMED. IN NO EVENT SHALL Google LLC BE LIABLE FOR ANY
# DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES
# (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES;
# LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND
# ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
# (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS
# SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
"""A tool to convert {WORKSPACE, BUILD} -> CMakeLists.txt.
This tool is very upb-specific at the moment, and should not be seen as a
generic Bazel -> CMake converter.
"""
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
import sys
import textwrap
import os
def StripFirstChar(deps):
return [dep[1:] for dep in deps]
def IsSourceFile(name):
return name.endswith(".c") or name.endswith(".cc")
class BuildFileFunctions(object):
def __init__(self, converter):
self.converter = converter
def _add_deps(self, kwargs, keyword=""):
if "deps" not in kwargs:
return
self.converter.toplevel += "target_link_libraries(%s%s\n %s)\n" % (
kwargs["name"],
keyword,
"\n ".join(StripFirstChar(kwargs["deps"]))
)
def load(self, *args):
pass
def cc_library(self, **kwargs):
if kwargs["name"].endswith("amalgamation"):
return
if kwargs["name"] == "upbc_generator":
return
if kwargs["name"] == "lupb":
return
if "testonly" in kwargs:
return
files = kwargs.get("srcs", []) + kwargs.get("hdrs", [])
found_files = []
pregenerated_files = [
"CMakeLists.txt", "descriptor.upb.h", "descriptor.upb.c"
]
for file in files:
if os.path.basename(file) in pregenerated_files:
found_files.append("../cmake/" + file)
else:
found_files.append("../" + file)
if list(filter(IsSourceFile, files)):
# Has sources, make this a normal library.
self.converter.toplevel += "add_library(%s\n %s)\n" % (
kwargs["name"],
"\n ".join(found_files)
)
self._add_deps(kwargs)
else:
# Header-only library, have to do a couple things differently.
# For some info, see:
# http://mariobadr.com/creating-a-header-only-library-with-cmake.html
self.converter.toplevel += "add_library(%s INTERFACE)\n" % (
kwargs["name"]
)
self._add_deps(kwargs, " INTERFACE")
def cc_binary(self, **kwargs):
pass
def cc_test(self, **kwargs):
# Disable this until we properly support upb_proto_library().
# self.converter.toplevel += "add_executable(%s\n %s)\n" % (
# kwargs["name"],
# "\n ".join(kwargs["srcs"])
# )
# self.converter.toplevel += "add_test(NAME %s COMMAND %s)\n" % (
# kwargs["name"],
# kwargs["name"],
# )
# if "data" in kwargs:
# for data_dep in kwargs["data"]:
# self.converter.toplevel += textwrap.dedent("""\
# add_custom_command(
# TARGET %s POST_BUILD
# COMMAND ${CMAKE_COMMAND} -E copy
# ${CMAKE_SOURCE_DIR}/%s
# ${CMAKE_CURRENT_BINARY_DIR}/%s)\n""" % (
# kwargs["name"], data_dep, data_dep
# ))
# self._add_deps(kwargs)
pass
def cc_fuzz_test(self, **kwargs):
pass
def pkg_files(self, **kwargs):
pass
def py_library(self, **kwargs):
pass
def py_binary(self, **kwargs):
pass
def lua_proto_library(self, **kwargs):
pass
def sh_test(self, **kwargs):
pass
def make_shell_script(self, **kwargs):
pass
def exports_files(self, files, **kwargs):
pass
def proto_library(self, **kwargs):
pass
def cc_proto_library(self, **kwargs):
pass
def staleness_test(self, **kwargs):
pass
def upb_amalgamation(self, **kwargs):
pass
def upb_proto_library(self, **kwargs):
pass
def upb_proto_library_copts(self, **kwargs):
pass
def upb_proto_reflection_library(self, **kwargs):
pass
def upb_proto_srcs(self, **kwargs):
pass
def genrule(self, **kwargs):
pass
def config_setting(self, **kwargs):
pass
def upb_fasttable_enabled(self, **kwargs):
pass
def select(self, arg_dict):
return []
def glob(self, *args, **kwargs):
return []
def licenses(self, *args):
pass
def filegroup(self, **kwargs):
pass
def map_dep(self, arg):
return arg
def package_group(self, **kwargs):
pass
def bool_flag(self, **kwargs):
pass
def bootstrap_upb_proto_library(self, **kwargs):
pass
def bootstrap_cc_library(self, **kwargs):
pass
class WorkspaceFileFunctions(object):
def __init__(self, converter):
self.converter = converter
def load(self, *args, **kwargs):
pass
def workspace(self, **kwargs):
self.converter.prelude += "project(%s)\n" % (kwargs["name"])
self.converter.prelude += "set(CMAKE_C_STANDARD 99)\n"
def maybe(self, rule, **kwargs):
if kwargs["name"] == "utf8_range":
self.converter.utf8_range_commit = kwargs["commit"]
pass
def http_archive(self, **kwargs):
pass
def git_repository(self, **kwargs):
pass
def new_git_repository(self, **kwargs):
pass
def bazel_version_repository(self, **kwargs):
pass
def protobuf_deps(self):
pass
def utf8_range_deps(self):
pass
def pip_parse(self, **kwargs):
pass
def rules_fuzzing_dependencies(self):
pass
def rules_fuzzing_init(self):
pass
def rules_pkg_dependencies(self):
pass
def system_python(self, **kwargs):
pass
def register_system_python(self, **kwargs):
pass
def register_toolchains(self, toolchain):
pass
def python_source_archive(self, **kwargs):
pass
def python_nuget_package(self, **kwargs):
pass
def install_deps(self):
pass
def fuzzing_py_install_deps(self):
pass
def googletest_deps(self):
pass
class Converter(object):
def __init__(self):
self.prelude = ""
self.toplevel = ""
self.if_lua = ""
self.utf8_range_commit = ""
def convert(self):
return self.template % {
"prelude": converter.prelude,
"toplevel": converter.toplevel,
"utf8_range_commit": converter.utf8_range_commit,
}
template = textwrap.dedent("""\
# This file was generated from BUILD using tools/make_cmakelists.py.
cmake_minimum_required(VERSION 3.1)
if(${CMAKE_VERSION} VERSION_LESS 3.12)
cmake_policy(VERSION ${CMAKE_MAJOR_VERSION}.${CMAKE_MINOR_VERSION})
else()
cmake_policy(VERSION 3.12)
endif()
cmake_minimum_required (VERSION 3.0)
cmake_policy(SET CMP0048 NEW)
%(prelude)s
# Prevent CMake from setting -rdynamic on Linux (!!).
SET(CMAKE_SHARED_LIBRARY_LINK_C_FLAGS "")
SET(CMAKE_SHARED_LIBRARY_LINK_CXX_FLAGS "")
# Set default build type.
if(NOT CMAKE_BUILD_TYPE)
message(STATUS "Setting build type to 'RelWithDebInfo' as none was specified.")
set(CMAKE_BUILD_TYPE "RelWithDebInfo" CACHE STRING
"Choose the type of build, options are: Debug Release RelWithDebInfo MinSizeRel."
FORCE)
endif()
# When using Ninja, compiler output won't be colorized without this.
include(CheckCXXCompilerFlag)
CHECK_CXX_COMPILER_FLAG(-fdiagnostics-color=always SUPPORTS_COLOR_ALWAYS)
if(SUPPORTS_COLOR_ALWAYS)
set(CMAKE_CXX_FLAGS "${CMAKE_CXX_FLAGS} -fdiagnostics-color=always")
endif()
# Implement ASAN/UBSAN options
if(UPB_ENABLE_ASAN)
set(CMAKE_CXX_FLAGS "${CMAKE_CXX_FLAGS} -fsanitize=address")
set(CMAKE_C_FLAGS "${CMAKE_C_FLAGS} -fsanitize=address")
set(CMAKE_EXE_LINKER_FLAGS "${CMAKE_EXE_LINKER_FLAGS} -fsanitize=address")
set(CMAKE_SHARED_LINKER_FLAGS "${CMAKE_SHARED_LINKER_FLAGS} -fsanitize=address")
endif()
if(UPB_ENABLE_UBSAN)
set(CMAKE_CXX_FLAGS "${CMAKE_CXX_FLAGS} -fsanitize=undefined")
set(CMAKE_C_FLAGS "${CMAKE_C_FLAGS} -fsanitize=address")
set(CMAKE_EXE_LINKER_FLAGS "${CMAKE_EXE_LINKER_FLAGS} -fsanitize=address")
set(CMAKE_SHARED_LINKER_FLAGS "${CMAKE_SHARED_LINKER_FLAGS} -fsanitize=address")
endif()
include_directories(..)
include_directories(../cmake)
include_directories(${CMAKE_CURRENT_BINARY_DIR})
if(NOT TARGET utf8_range)
if(EXISTS ../external/utf8_range)
# utf8_range is already installed
include_directories(../external/utf8_range)
else()
include(FetchContent)
FetchContent_Declare(
utf8_range
GIT_REPOSITORY "https://github.com/protocolbuffers/utf8_range.git"
GIT_TAG "%(utf8_range_commit)s"
)
FetchContent_GetProperties(utf8_range)
if(NOT utf8_range_POPULATED)
FetchContent_Populate(utf8_range)
include_directories(${utf8_range_SOURCE_DIR})
endif()
endif()
endif()
if(APPLE)
set(CMAKE_SHARED_LINKER_FLAGS "${CMAKE_SHARED_LINKER_FLAGS} -undefined dynamic_lookup -flat_namespace")
elseif(UNIX)
set(CMAKE_EXE_LINKER_FLAGS "${CMAKE_EXE_LINKER_FLAGS} -Wl,--build-id")
endif()
enable_testing()
%(toplevel)s
""")
data = {}
converter = Converter()
def GetDict(obj):
ret = {}
ret["UPB_DEFAULT_COPTS"] = [] # HACK
ret["UPB_DEFAULT_CPPOPTS"] = [] # HACK
for k in dir(obj):
if not k.startswith("_"):
ret[k] = getattr(obj, k);
return ret
globs = GetDict(converter)
workspace_dict = GetDict(WorkspaceFileFunctions(converter))
exec(open("bazel/workspace_deps.bzl").read(), workspace_dict)
exec(open("WORKSPACE").read(), workspace_dict)
exec(open("BUILD").read(), GetDict(BuildFileFunctions(converter)))
with open(sys.argv[1], "w") as f:
f.write(converter.convert())
| 10,929
| 26.054455
| 109
|
py
|
grpc
|
grpc-master/third_party/upb/cmake/staleness_test_lib.py
|
#!/usr/bin/python
#
# Copyright (c) 2009-2021, Google LLC
# All rights reserved.
#
# Redistribution and use in source and binary forms, with or without
# modification, are permitted provided that the following conditions are met:
# * Redistributions of source code must retain the above copyright
# notice, this list of conditions and the following disclaimer.
# * Redistributions in binary form must reproduce the above copyright
# notice, this list of conditions and the following disclaimer in the
# documentation and/or other materials provided with the distribution.
# * Neither the name of Google LLC nor the
# names of its contributors may be used to endorse or promote products
# derived from this software without specific prior written permission.
#
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND
# ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED
# WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE
# DISCLAIMED. IN NO EVENT SHALL Google LLC BE LIABLE FOR ANY
# DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES
# (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES;
# LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND
# ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
# (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS
# SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
"""Shared code for validating staleness_test() rules.
This code is used by test scripts generated from staleness_test() rules.
"""
from __future__ import absolute_import
from __future__ import print_function
import difflib
import sys
import os
from shutil import copyfile
class _FilePair(object):
"""Represents a single (target, generated) file pair."""
def __init__(self, target, generated):
self.target = target
self.generated = generated
class Config(object):
"""Represents the configuration for a single staleness test target."""
def __init__(self, file_list):
# Duplicate to avoid modifying our arguments.
file_list = list(file_list)
# The file list contains a few other bits of information at the end.
# This is packed by the code in build_defs.bzl.
self.target_name = file_list.pop()
self.package_name = file_list.pop()
self.pattern = file_list.pop()
self.file_list = file_list
def _GetFilePairs(config):
"""Generates the list of file pairs.
Args:
config: a Config object representing this target's config.
Returns:
A list of _FilePair objects.
"""
ret = []
has_bazel_genfiles = os.path.exists("bazel-bin")
for filename in config.file_list:
target = os.path.join(config.package_name, filename)
generated = os.path.join(config.package_name, config.pattern % filename)
if has_bazel_genfiles:
generated = os.path.join("bazel-bin", generated)
# Generated files should always exist. Blaze should guarantee this before
# we are run.
if not os.path.isfile(generated):
print("Generated file '%s' does not exist." % generated)
print("Please run this command to generate it:")
print(" bazel build %s:%s" % (config.package_name, config.target_name))
sys.exit(1)
ret.append(_FilePair(target, generated))
return ret
def _GetMissingAndStaleFiles(file_pairs):
"""Generates lists of missing and stale files.
Args:
file_pairs: a list of _FilePair objects.
Returns:
missing_files: a list of _FilePair objects representing missing files.
These target files do not exist at all.
stale_files: a list of _FilePair objects representing stale files.
These target files exist but have stale contents.
"""
missing_files = []
stale_files = []
for pair in file_pairs:
if not os.path.isfile(pair.target):
missing_files.append(pair)
continue
with open(pair.generated) as g, open(pair.target) as t:
if g.read() != t.read():
stale_files.append(pair)
return missing_files, stale_files
def _CopyFiles(file_pairs):
"""Copies all generated files to the corresponding target file.
The target files must be writable already.
Args:
file_pairs: a list of _FilePair objects that we want to copy.
"""
for pair in file_pairs:
target_dir = os.path.dirname(pair.target)
if not os.path.isdir(target_dir):
os.makedirs(target_dir)
copyfile(pair.generated, pair.target)
def FixFiles(config):
"""Implements the --fix option: overwrites missing or out-of-date files.
Args:
config: the Config object for this test.
"""
file_pairs = _GetFilePairs(config)
missing_files, stale_files = _GetMissingAndStaleFiles(file_pairs)
_CopyFiles(stale_files + missing_files)
def CheckFilesMatch(config):
"""Checks whether each target file matches the corresponding generated file.
Args:
config: the Config object for this test.
Returns:
None if everything matches, otherwise a string error message.
"""
diff_errors = []
file_pairs = _GetFilePairs(config)
missing_files, stale_files = _GetMissingAndStaleFiles(file_pairs)
for pair in missing_files:
diff_errors.append("File %s does not exist" % pair.target)
continue
for pair in stale_files:
with open(pair.generated) as g, open(pair.target) as t:
diff = ''.join(difflib.unified_diff(g.read().splitlines(keepends=True),
t.read().splitlines(keepends=True)))
diff_errors.append("File %s is out of date:\n%s" % (pair.target, diff))
if diff_errors:
error_msg = "Files out of date!\n\n"
error_msg += "To fix run THIS command:\n"
error_msg += " bazel-bin/%s/%s --fix\n\n" % (config.package_name,
config.target_name)
error_msg += "Errors:\n"
error_msg += " " + "\n ".join(diff_errors)
return error_msg
else:
return None
| 6,039
| 30.789474
| 81
|
py
|
grpc
|
grpc-master/third_party/upb/cmake/staleness_test.py
|
#!/usr/bin/python
#
# Copyright (c) 2009-2021, Google LLC
# All rights reserved.
#
# Redistribution and use in source and binary forms, with or without
# modification, are permitted provided that the following conditions are met:
# * Redistributions of source code must retain the above copyright
# notice, this list of conditions and the following disclaimer.
# * Redistributions in binary form must reproduce the above copyright
# notice, this list of conditions and the following disclaimer in the
# documentation and/or other materials provided with the distribution.
# * Neither the name of Google LLC nor the
# names of its contributors may be used to endorse or promote products
# derived from this software without specific prior written permission.
#
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND
# ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED
# WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE
# DISCLAIMED. IN NO EVENT SHALL Google LLC BE LIABLE FOR ANY
# DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES
# (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES;
# LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND
# ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
# (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS
# SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
"""The py_test() script for staleness_test() rules.
Note that this file is preprocessed! The INSERT_<...> text below is replaced
with the actual list of files before we actually run the script.
"""
from __future__ import absolute_import
from cmake import staleness_test_lib
import unittest
import sys
file_list = """
INSERT_FILE_LIST_HERE
""".split()
config = staleness_test_lib.Config(file_list)
class TestFilesMatch(unittest.TestCase):
def testFilesMatch(self):
errors = staleness_test_lib.CheckFilesMatch(config)
self.assertFalse(errors, errors)
if len(sys.argv) > 1 and sys.argv[1] == "--fix":
staleness_test_lib.FixFiles(config)
else:
unittest.main()
| 2,236
| 37.568966
| 81
|
py
|
grpc
|
grpc-master/third_party/upb/docs/render.py
|
#!/usr/bin/env python3
import subprocess
import sys
import shutil
import os
if len(sys.argv) < 2:
print("Must pass a filename argument")
sys.exit(1)
in_filename = sys.argv[1]
out_filename = in_filename.replace(".in.md", ".md")
out_dir = in_filename.replace(".in.md", "")
if in_filename == out_filename:
print("File must end in .in.md")
sys.exit(1)
if os.path.isdir(out_dir):
shutil.rmtree(out_dir)
os.mkdir(out_dir)
file_num = 1
with open(out_filename, "wb") as out_file, open(in_filename, "rb") as in_file:
for line in in_file:
if line.startswith(b"```dot"):
dot_lines = []
while True:
dot_line = next(in_file)
if dot_line == b"```\n":
break
dot_lines.append(dot_line)
dot_input = b"".join(dot_lines)
svg_filename = out_dir + "/" + str(file_num) + ".svg"
svg = subprocess.check_output(['dot', '-Tsvg', '-o', svg_filename], input=dot_input)
out_file.write(b"<div align=center>\n")
out_file.write(b"<img src='%s'/>\n" % (svg_filename.encode('utf-8')))
out_file.write(b"</div>\n")
file_num += 1
else:
out_file.write(line)
| 1,255
| 27.545455
| 96
|
py
|
grpc
|
grpc-master/src/php/bin/xds_manager.py
|
#!/usr/bin/env python
# Copyright 2021 gRPC authors.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Manage PHP child processes for the main PHP xDS Interop client"""
import argparse
import fcntl
import os
import subprocess
# This script is being launched from src/php/bin/run_xds_client.sh
# to manage PHP child processes which will send 1 RPC each
# asynchronously. This script keeps track of all those open
# processes and reports back to the main PHP interop client each
# of the child RPCs' status code.
if __name__ == "__main__":
parser = argparse.ArgumentParser()
parser.add_argument("--tmp_file1", nargs="?", default="")
parser.add_argument("--tmp_file2", nargs="?", default="")
parser.add_argument("--bootstrap_path", nargs="?", default="")
args = parser.parse_args()
server_address = ""
rpcs_started = []
open_processes = {}
client_env = dict(os.environ)
client_env["GRPC_XDS_BOOTSTRAP"] = args.bootstrap_path
while True:
# tmp_file1 contains a list of RPCs (and their spec) the parent process
# wants executed
f1 = open(args.tmp_file1, "r+")
fcntl.flock(f1, fcntl.LOCK_EX)
while True:
key = f1.readline()
if not key:
break
key = key.strip()
if key.startswith("server_address"):
if not server_address:
server_address = key[15:]
elif not key in rpcs_started:
# format here needs to be in sync with
# src/php/tests/interop/xds_client.php
items = key.split("|")
num = items[0]
metadata = items[2]
timeout_sec = items[3]
if items[1] == "UnaryCall":
p = subprocess.Popen(
[
"php",
"-d",
"extension=grpc.so",
"-d",
"extension=pthreads.so",
"src/php/tests/interop/xds_unary_call.php",
"--server=" + server_address,
"--num=" + str(num),
"--metadata=" + metadata,
"--timeout_sec=" + timeout_sec,
],
env=client_env,
)
elif items[1] == "EmptyCall":
p = subprocess.Popen(
[
"php",
"-d",
"extension=grpc.so",
"-d",
"extension=pthreads.so",
"src/php/tests/interop/xds_empty_call.php",
"--server=" + server_address,
"--num=" + str(num),
"--metadata=" + metadata,
"--timeout_sec=" + timeout_sec,
],
env=client_env,
)
else:
continue
rpcs_started.append(key)
open_processes[key] = p
f1.truncate(0)
fcntl.flock(f1, fcntl.LOCK_UN)
f1.close()
# tmp_file2 contains the RPC result of each key received from tmp_file1
f2 = open(args.tmp_file2, "a")
fcntl.flock(f2, fcntl.LOCK_EX)
keys_to_delete = []
for key, process in open_processes.items():
result = process.poll()
if result is not None:
# format here needs to be in sync with
# src/php/tests/interop/xds_client.php
f2.write(key + "," + str(process.returncode) + "\n")
keys_to_delete.append(key)
for key in keys_to_delete:
del open_processes[key]
fcntl.flock(f2, fcntl.LOCK_UN)
f2.close()
| 4,496
| 38.79646
| 79
|
py
|
grpc
|
grpc-master/src/upb/gen_build_yaml.py
|
#!/usr/bin/env python2.7
# Copyright 2015 gRPC authors.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# TODO: This should ideally be in upb submodule to avoid hardcoding this here.
import re
import os
import sys
import yaml
out = {}
try:
out["libs"] = [
{
"name": "upb",
"build": "all",
"language": "c",
"src": [
"third_party/utf8_range/naive.c",
"third_party/utf8_range/range2-neon.c",
"third_party/utf8_range/range2-sse.c",
"third_party/upb/upb/base/status.c",
"third_party/upb/upb/collections/array.c",
"third_party/upb/upb/collections/map_sorter.c",
"third_party/upb/upb/collections/map.c",
"third_party/upb/upb/hash/common.c",
"third_party/upb/upb/json/decode.c",
"third_party/upb/upb/json/encode.c",
"third_party/upb/upb/lex/atoi.c",
"third_party/upb/upb/lex/round_trip.c",
"third_party/upb/upb/lex/strtod.c",
"third_party/upb/upb/lex/unicode.c",
"third_party/upb/upb/mem/alloc.c",
"third_party/upb/upb/mem/arena.c",
"third_party/upb/upb/message/accessors.c",
"third_party/upb/upb/message/message.c",
"third_party/upb/upb/mini_table/common.c",
"third_party/upb/upb/mini_table/decode.c",
"third_party/upb/upb/mini_table/encode.c",
"third_party/upb/upb/mini_table/extension_registry.c",
"third_party/upb/upb/reflection/def_builder.c",
"third_party/upb/upb/reflection/def_pool.c",
"third_party/upb/upb/reflection/def_type.c",
"third_party/upb/upb/reflection/desc_state.c",
"third_party/upb/upb/reflection/enum_def.c",
"third_party/upb/upb/reflection/enum_reserved_range.c",
"third_party/upb/upb/reflection/enum_value_def.c",
"third_party/upb/upb/reflection/extension_range.c",
"third_party/upb/upb/reflection/field_def.c",
"third_party/upb/upb/reflection/file_def.c",
"third_party/upb/upb/reflection/message_def.c",
"third_party/upb/upb/reflection/message_reserved_range.c",
"third_party/upb/upb/reflection/message.c",
"third_party/upb/upb/reflection/method_def.c",
"third_party/upb/upb/reflection/oneof_def.c",
"third_party/upb/upb/reflection/service_def.c",
"third_party/upb/upb/text/encode.c",
"third_party/upb/upb/wire/decode_fast.c",
"third_party/upb/upb/wire/decode.c",
"third_party/upb/upb/wire/encode.c",
"third_party/upb/upb/wire/eps_copy_input_stream.c",
"third_party/upb/upb/wire/reader.c",
"src/core/ext/upb-generated/google/protobuf/descriptor.upb.c",
"src/core/ext/upbdefs-generated/google/protobuf/descriptor.upbdefs.c",
],
"headers": [
"third_party/utf8_range/utf8_range.h",
"third_party/upb/upb/alloc.h",
"third_party/upb/upb/arena.h",
"third_party/upb/upb/array.h",
"third_party/upb/upb/base/descriptor_constants.h",
"third_party/upb/upb/base/log2.h",
"third_party/upb/upb/base/status.h",
"third_party/upb/upb/base/string_view.h",
"third_party/upb/upb/collections/array_internal.h",
"third_party/upb/upb/collections/array.h",
"third_party/upb/upb/collections/map_gencode_util.h",
"third_party/upb/upb/collections/map_internal.h",
"third_party/upb/upb/collections/map_sorter_internal.h",
"third_party/upb/upb/collections/map.h",
"third_party/upb/upb/collections/message_value.h",
"third_party/upb/upb/decode.h",
"third_party/upb/upb/def.h",
"third_party/upb/upb/def.hpp",
"third_party/upb/upb/encode.h",
"third_party/upb/upb/extension_registry.h",
"third_party/upb/upb/hash/common.h",
"third_party/upb/upb/hash/int_table.h",
"third_party/upb/upb/hash/str_table.h",
"third_party/upb/upb/json_decode.h",
"third_party/upb/upb/json_encode.h",
"third_party/upb/upb/json/decode.h",
"third_party/upb/upb/json/encode.h",
"third_party/upb/upb/lex/atoi.h",
"third_party/upb/upb/lex/round_trip.h",
"third_party/upb/upb/lex/strtod.h",
"third_party/upb/upb/lex/unicode.h",
"third_party/upb/upb/map.h",
"third_party/upb/upb/mem/alloc.h",
"third_party/upb/upb/mem/arena_internal.h",
"third_party/upb/upb/mem/arena.h",
"third_party/upb/upb/message/accessors_internal.h",
"third_party/upb/upb/message/accessors.h",
"third_party/upb/upb/message/extension_internal.h",
"third_party/upb/upb/message/internal.h",
"third_party/upb/upb/message/message.h",
"third_party/upb/upb/mini_table.h",
"third_party/upb/upb/mini_table/common_internal.h",
"third_party/upb/upb/mini_table/common.h",
"third_party/upb/upb/mini_table/decode.h",
"third_party/upb/upb/mini_table/encode_internal.h",
"third_party/upb/upb/mini_table/encode_internal.hpp",
"third_party/upb/upb/mini_table/enum_internal.h",
"third_party/upb/upb/mini_table/extension_internal.h",
"third_party/upb/upb/mini_table/extension_registry.h",
"third_party/upb/upb/mini_table/field_internal.h",
"third_party/upb/upb/mini_table/file_internal.h",
"third_party/upb/upb/mini_table/message_internal.h",
"third_party/upb/upb/mini_table/sub_internal.h",
"third_party/upb/upb/mini_table/types.h",
"third_party/upb/upb/msg.h",
"third_party/upb/upb/port/atomic.h",
"third_party/upb/upb/port/def.inc",
"third_party/upb/upb/port/undef.inc",
"third_party/upb/upb/port/vsnprintf_compat.h",
"third_party/upb/upb/reflection.h",
"third_party/upb/upb/reflection.hpp",
"third_party/upb/upb/reflection/common.h",
"third_party/upb/upb/reflection/def_builder_internal.h",
"third_party/upb/upb/reflection/def_pool_internal.h",
"third_party/upb/upb/reflection/def_pool.h",
"third_party/upb/upb/reflection/def_type.h",
"third_party/upb/upb/reflection/def.h",
"third_party/upb/upb/reflection/def.hpp",
"third_party/upb/upb/reflection/desc_state_internal.h",
"third_party/upb/upb/reflection/enum_def_internal.h",
"third_party/upb/upb/reflection/enum_def.h",
"third_party/upb/upb/reflection/enum_reserved_range_internal.h",
"third_party/upb/upb/reflection/enum_reserved_range.h",
"third_party/upb/upb/reflection/enum_value_def_internal.h",
"third_party/upb/upb/reflection/enum_value_def.h",
"third_party/upb/upb/reflection/extension_range_internal.h",
"third_party/upb/upb/reflection/extension_range.h",
"third_party/upb/upb/reflection/field_def_internal.h",
"third_party/upb/upb/reflection/field_def.h",
"third_party/upb/upb/reflection/file_def_internal.h",
"third_party/upb/upb/reflection/file_def.h",
"third_party/upb/upb/reflection/message_def_internal.h",
"third_party/upb/upb/reflection/message_def.h",
"third_party/upb/upb/reflection/message_reserved_range_internal.h",
"third_party/upb/upb/reflection/message_reserved_range.h",
"third_party/upb/upb/reflection/message.h",
"third_party/upb/upb/reflection/message.hpp",
"third_party/upb/upb/reflection/method_def_internal.h",
"third_party/upb/upb/reflection/method_def.h",
"third_party/upb/upb/reflection/oneof_def_internal.h",
"third_party/upb/upb/reflection/oneof_def.h",
"third_party/upb/upb/reflection/service_def_internal.h",
"third_party/upb/upb/reflection/service_def.h",
"third_party/upb/upb/status.h",
"third_party/upb/upb/string_view.h",
"third_party/upb/upb/text_encode.h",
"third_party/upb/upb/text/encode.h",
"third_party/upb/upb/upb.h",
"third_party/upb/upb/upb.hpp",
"third_party/upb/upb/wire/common_internal.h",
"third_party/upb/upb/wire/common.h",
"third_party/upb/upb/wire/decode_fast.h",
"third_party/upb/upb/wire/decode_internal.h",
"third_party/upb/upb/wire/decode.h",
"third_party/upb/upb/wire/encode.h",
"third_party/upb/upb/wire/eps_copy_input_stream.h",
"third_party/upb/upb/wire/reader.h",
"third_party/upb/upb/wire/swap_internal.h",
"third_party/upb/upb/wire/types.h",
"src/core/ext/upb-generated/google/protobuf/descriptor.upb.h",
"src/core/ext/upbdefs-generated/google/protobuf/descriptor.upbdefs.h",
],
"secure": False,
}
]
except:
pass
print(yaml.dump(out))
| 10,508
| 51.545
| 86
|
py
|
grpc
|
grpc-master/src/abseil-cpp/gen_build_yaml.py
|
#!/usr/bin/env python2.7
# Copyright 2019 gRPC authors.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import os
import yaml
BUILDS_YAML_PATH = os.path.join(
os.path.dirname(os.path.abspath(__file__)), "preprocessed_builds.yaml"
)
with open(BUILDS_YAML_PATH) as f:
builds = yaml.safe_load(f)
for build in builds:
build["build"] = "private"
build["build_system"] = []
build["language"] = "c"
build["secure"] = False
print(yaml.dump({"libs": builds}))
| 977
| 29.5625
| 74
|
py
|
grpc
|
grpc-master/src/abseil-cpp/preprocessed_builds.yaml.gen.py
|
#!/usr/bin/env python3
# Copyright 2019 gRPC authors.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import collections
import os
import re
import subprocess
import xml.etree.ElementTree as ET
import yaml
ABSEIL_PATH = "third_party/abseil-cpp"
OUTPUT_PATH = "src/abseil-cpp/preprocessed_builds.yaml"
CAPITAL_WORD = re.compile("[A-Z]+")
ABSEIL_CMAKE_RULE_BEGIN = re.compile("^absl_cc_.*\(", re.MULTILINE)
ABSEIL_CMAKE_RULE_END = re.compile("^\)", re.MULTILINE)
# Rule object representing the rule of Bazel BUILD.
Rule = collections.namedtuple(
"Rule", "type name package srcs hdrs textual_hdrs deps visibility testonly"
)
def get_elem_value(elem, name):
"""Returns the value of XML element with the given name."""
for child in elem:
if child.attrib.get("name") == name:
if child.tag == "string":
return child.attrib.get("value")
elif child.tag == "boolean":
return child.attrib.get("value") == "true"
elif child.tag == "list":
return [
nested_child.attrib.get("value") for nested_child in child
]
else:
raise "Cannot recognize tag: " + child.tag
return None
def normalize_paths(paths):
"""Returns the list of normalized path."""
# e.g. ["//absl/strings:dir/header.h"] -> ["absl/strings/dir/header.h"]
return [path.lstrip("/").replace(":", "/") for path in paths]
def parse_bazel_rule(elem, package):
"""Returns a rule from bazel XML rule."""
return Rule(
type=elem.attrib["class"],
name=get_elem_value(elem, "name"),
package=package,
srcs=normalize_paths(get_elem_value(elem, "srcs") or []),
hdrs=normalize_paths(get_elem_value(elem, "hdrs") or []),
textual_hdrs=normalize_paths(
get_elem_value(elem, "textual_hdrs") or []
),
deps=get_elem_value(elem, "deps") or [],
visibility=get_elem_value(elem, "visibility") or [],
testonly=get_elem_value(elem, "testonly") or False,
)
def read_bazel_build(package):
"""Runs bazel query on given package file and returns all cc rules."""
# Use a wrapper version of bazel in gRPC not to use system-wide bazel
# to avoid bazel conflict when running on Kokoro.
BAZEL_BIN = "../../tools/bazel"
result = subprocess.check_output(
[BAZEL_BIN, "query", package + ":all", "--output", "xml"]
)
root = ET.fromstring(result)
return [
parse_bazel_rule(elem, package)
for elem in root
if elem.tag == "rule" and elem.attrib["class"].startswith("cc_")
]
def collect_bazel_rules(root_path):
"""Collects and returns all bazel rules from root path recursively."""
rules = []
for cur, _, _ in os.walk(root_path):
build_path = os.path.join(cur, "BUILD.bazel")
if os.path.exists(build_path):
rules.extend(read_bazel_build("//" + cur))
return rules
def parse_cmake_rule(rule, package):
"""Returns a rule from absl cmake rule.
Reference: https://github.com/abseil/abseil-cpp/blob/master/CMake/AbseilHelpers.cmake
"""
kv = {}
bucket = None
lines = rule.splitlines()
for line in lines[1:-1]:
if CAPITAL_WORD.match(line.strip()):
bucket = kv.setdefault(line.strip(), [])
else:
if bucket is not None:
bucket.append(line.strip())
else:
raise ValueError("Illegal syntax: {}".format(rule))
return Rule(
type=lines[0].rstrip("("),
name="absl::" + kv["NAME"][0],
package=package,
srcs=[package + "/" + f.strip('"') for f in kv.get("SRCS", [])],
hdrs=[package + "/" + f.strip('"') for f in kv.get("HDRS", [])],
textual_hdrs=[],
deps=kv.get("DEPS", []),
visibility="PUBLIC" in kv,
testonly="TESTONLY" in kv,
)
def read_cmake_build(build_path, package):
"""Parses given CMakeLists.txt file and returns all cc rules."""
rules = []
with open(build_path, "r") as f:
src = f.read()
for begin_mo in ABSEIL_CMAKE_RULE_BEGIN.finditer(src):
end_mo = ABSEIL_CMAKE_RULE_END.search(src[begin_mo.start(0) :])
expr = src[
begin_mo.start(0) : begin_mo.start(0) + end_mo.start(0) + 1
]
rules.append(parse_cmake_rule(expr, package))
return rules
def collect_cmake_rules(root_path):
"""Collects and returns all cmake rules from root path recursively."""
rules = []
for cur, _, _ in os.walk(root_path):
build_path = os.path.join(cur, "CMakeLists.txt")
if os.path.exists(build_path):
rules.extend(read_cmake_build(build_path, cur))
return rules
def pairing_bazel_and_cmake_rules(bazel_rules, cmake_rules):
"""Returns a pair map between bazel rules and cmake rules based on
the similarity of the file list in the rule. This is because
cmake build and bazel build of abseil are not identical.
"""
pair_map = {}
for rule in bazel_rules:
best_crule, best_similarity = None, 0
for crule in cmake_rules:
similarity = len(
set(rule.srcs + rule.hdrs + rule.textual_hdrs).intersection(
set(crule.srcs + crule.hdrs + crule.textual_hdrs)
)
)
if similarity > best_similarity:
best_crule, best_similarity = crule, similarity
if best_crule:
pair_map[(rule.package, rule.name)] = best_crule.name
return pair_map
def resolve_hdrs(files):
return [ABSEIL_PATH + "/" + f for f in files if f.endswith((".h", ".inc"))]
def resolve_srcs(files):
return [ABSEIL_PATH + "/" + f for f in files if f.endswith(".cc")]
def resolve_deps(targets):
return [(t[2:] if t.startswith("//") else t) for t in targets]
def generate_builds(root_path):
"""Generates builds from all BUILD files under absl directory."""
bazel_rules = list(
filter(
lambda r: r.type == "cc_library" and not r.testonly,
collect_bazel_rules(root_path),
)
)
cmake_rules = list(
filter(
lambda r: r.type == "absl_cc_library" and not r.testonly,
collect_cmake_rules(root_path),
)
)
pair_map = pairing_bazel_and_cmake_rules(bazel_rules, cmake_rules)
builds = []
for rule in sorted(bazel_rules, key=lambda r: r.package[2:] + ":" + r.name):
p = {
"name": rule.package[2:] + ":" + rule.name,
"cmake_target": pair_map.get((rule.package, rule.name)) or "",
"headers": sorted(
resolve_hdrs(rule.srcs + rule.hdrs + rule.textual_hdrs)
),
"src": sorted(
resolve_srcs(rule.srcs + rule.hdrs + rule.textual_hdrs)
),
"deps": sorted(resolve_deps(rule.deps)),
}
builds.append(p)
return builds
def main():
previous_dir = os.getcwd()
os.chdir(ABSEIL_PATH)
builds = generate_builds("absl")
os.chdir(previous_dir)
with open(OUTPUT_PATH, "w") as outfile:
outfile.write(yaml.dump(builds, indent=2))
if __name__ == "__main__":
main()
| 7,761
| 32.747826
| 89
|
py
|
grpc
|
grpc-master/src/re2/gen_build_yaml.py
|
#!/usr/bin/env python2.7
# Copyright 2015 gRPC authors.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import os
import sys
import glob
import yaml
os.chdir(os.path.dirname(sys.argv[0]) + "/../..")
out = {}
out["libs"] = [
{
# TODO @donnadionne: extracting the list of source files from bazel build to reduce duplication
"name": "re2",
"build": "private",
"language": "c",
"secure": False,
"src": sorted(
glob.glob("third_party/re2/re2/*.cc")
+ [
"third_party/re2/util/pcre.cc",
"third_party/re2/util/rune.cc",
"third_party/re2/util/strutil.cc",
]
),
"headers": sorted(
glob.glob("third_party/re2/re2/*.h")
+ glob.glob("third_party/re2/util/*.h")
),
}
]
print(yaml.dump(out))
| 1,369
| 26.959184
| 103
|
py
|
grpc
|
grpc-master/src/c-ares/gen_build_yaml.py
|
#!/usr/bin/env python2.7
# Copyright 2015 gRPC authors.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import re
import os
import sys
import yaml
os.chdir(os.path.dirname(sys.argv[0]) + "/../..")
out = {}
try:
def gen_ares_build(x):
subprocess.call("third_party/cares/cares/buildconf", shell=True)
subprocess.call("third_party/cares/cares/configure", shell=True)
def config_platform(x):
if "darwin" in sys.platform:
return "src/cares/cares/config_darwin/ares_config.h"
if "freebsd" in sys.platform:
return "src/cares/cares/config_freebsd/ares_config.h"
if "linux" in sys.platform:
return "src/cares/cares/config_linux/ares_config.h"
if "openbsd" in sys.platform:
return "src/cares/cares/config_openbsd/ares_config.h"
if not os.path.isfile("third_party/cares/cares/ares_config.h"):
gen_ares_build(x)
return "third_party/cares/cares/ares_config.h"
def ares_build(x):
if os.path.isfile("src/cares/cares/ares_build.h"):
return "src/cares/cares/ares_build.h"
if not os.path.isfile("third_party/cares/cares/include/ares_build.h"):
gen_ares_build(x)
return "third_party/cares/cares/include/ares_build.h"
out["libs"] = [
{
"name": "ares",
"defaults": "ares",
"build": "private",
"language": "c",
"secure": False,
"src": [
"third_party/cares/cares/src/lib/ares__read_line.c",
"third_party/cares/cares/src/lib/ares__get_hostent.c",
"third_party/cares/cares/src/lib/ares__close_sockets.c",
"third_party/cares/cares/src/lib/ares__timeval.c",
"third_party/cares/cares/src/lib/ares_gethostbyaddr.c",
"third_party/cares/cares/src/lib/ares_getenv.c",
"third_party/cares/cares/src/lib/ares_free_string.c",
"third_party/cares/cares/src/lib/ares_free_hostent.c",
"third_party/cares/cares/src/lib/ares_fds.c",
"third_party/cares/cares/src/lib/ares_expand_string.c",
"third_party/cares/cares/src/lib/ares_create_query.c",
"third_party/cares/cares/src/lib/ares_cancel.c",
"third_party/cares/cares/src/lib/ares_android.c",
"third_party/cares/cares/src/lib/ares_parse_txt_reply.c",
"third_party/cares/cares/src/lib/ares_parse_srv_reply.c",
"third_party/cares/cares/src/lib/ares_parse_soa_reply.c",
"third_party/cares/cares/src/lib/ares_parse_ptr_reply.c",
"third_party/cares/cares/src/lib/ares_parse_ns_reply.c",
"third_party/cares/cares/src/lib/ares_parse_naptr_reply.c",
"third_party/cares/cares/src/lib/ares_parse_mx_reply.c",
"third_party/cares/cares/src/lib/ares_parse_caa_reply.c",
"third_party/cares/cares/src/lib/ares_options.c",
"third_party/cares/cares/src/lib/ares_nowarn.c",
"third_party/cares/cares/src/lib/ares_mkquery.c",
"third_party/cares/cares/src/lib/ares_llist.c",
"third_party/cares/cares/src/lib/ares_getsock.c",
"third_party/cares/cares/src/lib/ares_getnameinfo.c",
"third_party/cares/cares/src/lib/bitncmp.c",
"third_party/cares/cares/src/lib/ares_writev.c",
"third_party/cares/cares/src/lib/ares_version.c",
"third_party/cares/cares/src/lib/ares_timeout.c",
"third_party/cares/cares/src/lib/ares_strerror.c",
"third_party/cares/cares/src/lib/ares_strcasecmp.c",
"third_party/cares/cares/src/lib/ares_search.c",
"third_party/cares/cares/src/lib/ares_platform.c",
"third_party/cares/cares/src/lib/windows_port.c",
"third_party/cares/cares/src/lib/inet_ntop.c",
"third_party/cares/cares/src/lib/ares__sortaddrinfo.c",
"third_party/cares/cares/src/lib/ares__readaddrinfo.c",
"third_party/cares/cares/src/lib/ares_parse_uri_reply.c",
"third_party/cares/cares/src/lib/ares__parse_into_addrinfo.c",
"third_party/cares/cares/src/lib/ares_parse_a_reply.c",
"third_party/cares/cares/src/lib/ares_parse_aaaa_reply.c",
"third_party/cares/cares/src/lib/ares_library_init.c",
"third_party/cares/cares/src/lib/ares_init.c",
"third_party/cares/cares/src/lib/ares_gethostbyname.c",
"third_party/cares/cares/src/lib/ares_getaddrinfo.c",
"third_party/cares/cares/src/lib/ares_freeaddrinfo.c",
"third_party/cares/cares/src/lib/ares_expand_name.c",
"third_party/cares/cares/src/lib/ares_destroy.c",
"third_party/cares/cares/src/lib/ares_data.c",
"third_party/cares/cares/src/lib/ares__addrinfo_localhost.c",
"third_party/cares/cares/src/lib/ares__addrinfo2hostent.c",
"third_party/cares/cares/src/lib/inet_net_pton.c",
"third_party/cares/cares/src/lib/ares_strsplit.c",
"third_party/cares/cares/src/lib/ares_strdup.c",
"third_party/cares/cares/src/lib/ares_send.c",
"third_party/cares/cares/src/lib/ares_rand.c",
"third_party/cares/cares/src/lib/ares_query.c",
"third_party/cares/cares/src/lib/ares_process.c",
],
"headers": [
"third_party/cares/ares_build.h",
"third_party/cares/cares/include/ares_version.h",
"third_party/cares/cares/include/ares.h",
"third_party/cares/cares/include/ares_rules.h",
"third_party/cares/cares/include/ares_dns.h",
"third_party/cares/cares/include/ares_nameser.h",
"third_party/cares/cares/src/tools/ares_getopt.h",
"third_party/cares/cares/src/lib/ares_strsplit.h",
"third_party/cares/cares/src/lib/ares_android.h",
"third_party/cares/cares/src/lib/ares_private.h",
"third_party/cares/cares/src/lib/ares_llist.h",
"third_party/cares/cares/src/lib/ares_platform.h",
"third_party/cares/cares/src/lib/ares_ipv6.h",
"third_party/cares/cares/src/lib/config-dos.h",
"third_party/cares/cares/src/lib/bitncmp.h",
"third_party/cares/cares/src/lib/ares_strcasecmp.h",
"third_party/cares/cares/src/lib/setup_once.h",
"third_party/cares/cares/src/lib/ares_inet_net_pton.h",
"third_party/cares/cares/src/lib/ares_data.h",
"third_party/cares/cares/src/lib/ares_getenv.h",
"third_party/cares/cares/src/lib/config-win32.h",
"third_party/cares/cares/src/lib/ares_strdup.h",
"third_party/cares/cares/src/lib/ares_iphlpapi.h",
"third_party/cares/cares/src/lib/ares_setup.h",
"third_party/cares/cares/src/lib/ares_writev.h",
"third_party/cares/cares/src/lib/ares_nowarn.h",
"third_party/cares/config_darwin/ares_config.h",
"third_party/cares/config_freebsd/ares_config.h",
"third_party/cares/config_linux/ares_config.h",
"third_party/cares/config_openbsd/ares_config.h",
],
}
]
except:
pass
print(yaml.dump(out))
| 8,175
| 50.421384
| 78
|
py
|
grpc
|
grpc-master/src/python/grpcio_channelz/setup.py
|
# Copyright 2018 The gRPC Authors
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Setup module for the GRPC Python package's Channelz."""
import os
import sys
import setuptools
_PACKAGE_PATH = os.path.realpath(os.path.dirname(__file__))
_README_PATH = os.path.join(_PACKAGE_PATH, "README.rst")
# Ensure we're in the proper directory whether or not we're being used by pip.
os.chdir(os.path.dirname(os.path.abspath(__file__)))
# Break import-style to ensure we can actually find our local modules.
import grpc_version
class _NoOpCommand(setuptools.Command):
"""No-op command."""
description = ""
user_options = []
def initialize_options(self):
pass
def finalize_options(self):
pass
def run(self):
pass
CLASSIFIERS = [
"Development Status :: 5 - Production/Stable",
"Programming Language :: Python",
"Programming Language :: Python :: 3",
"Programming Language :: Python :: 3.4",
"Programming Language :: Python :: 3.5",
"Programming Language :: Python :: 3.6",
"Programming Language :: Python :: 3.7",
"Programming Language :: Python :: 3.8",
"Programming Language :: Python :: 3.9",
"Programming Language :: Python :: 3.10",
"Programming Language :: Python :: 3.11",
"License :: OSI Approved :: Apache Software License",
]
PACKAGE_DIRECTORIES = {
"": ".",
}
INSTALL_REQUIRES = (
"protobuf>=4.21.6",
"grpcio>={version}".format(version=grpc_version.VERSION),
)
try:
import channelz_commands as _channelz_commands
# we are in the build environment, otherwise the above import fails
SETUP_REQUIRES = (
"grpcio-tools=={version}".format(version=grpc_version.VERSION),
)
COMMAND_CLASS = {
# Run preprocess from the repository *before* doing any packaging!
"preprocess": _channelz_commands.Preprocess,
"build_package_protos": _channelz_commands.BuildPackageProtos,
}
except ImportError:
SETUP_REQUIRES = ()
COMMAND_CLASS = {
# wire up commands to no-op not to break the external dependencies
"preprocess": _NoOpCommand,
"build_package_protos": _NoOpCommand,
}
setuptools.setup(
name="grpcio-channelz",
version=grpc_version.VERSION,
license="Apache License 2.0",
description="Channel Level Live Debug Information Service for gRPC",
long_description=open(_README_PATH, "r").read(),
author="The gRPC Authors",
author_email="grpc-io@googlegroups.com",
classifiers=CLASSIFIERS,
url="https://grpc.io",
package_dir=PACKAGE_DIRECTORIES,
packages=setuptools.find_packages("."),
python_requires=">=3.6",
install_requires=INSTALL_REQUIRES,
setup_requires=SETUP_REQUIRES,
cmdclass=COMMAND_CLASS,
)
| 3,257
| 29.166667
| 78
|
py
|
grpc
|
grpc-master/src/python/grpcio_channelz/grpc_version.py
|
# Copyright 2018 The gRPC Authors
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# AUTO-GENERATED FROM `$REPO_ROOT/templates/src/python/grpcio_channelz/grpc_version.py.template`!!!
VERSION = '1.57.0.dev0'
| 706
| 38.277778
| 99
|
py
|
grpc
|
grpc-master/src/python/grpcio_channelz/channelz_commands.py
|
# Copyright 2018 The gRPC Authors
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Provides distutils command classes for the GRPC Python setup process."""
import os
import shutil
import setuptools
ROOT_DIR = os.path.abspath(os.path.dirname(os.path.abspath(__file__)))
CHANNELZ_PROTO = os.path.join(
ROOT_DIR, "../../proto/grpc/channelz/channelz.proto"
)
LICENSE = os.path.join(ROOT_DIR, "../../../LICENSE")
class Preprocess(setuptools.Command):
"""Command to copy proto modules from grpc/src/proto and LICENSE from
the root directory"""
description = ""
user_options = []
def initialize_options(self):
pass
def finalize_options(self):
pass
def run(self):
if os.path.isfile(CHANNELZ_PROTO):
shutil.copyfile(
CHANNELZ_PROTO,
os.path.join(ROOT_DIR, "grpc_channelz/v1/channelz.proto"),
)
if os.path.isfile(LICENSE):
shutil.copyfile(LICENSE, os.path.join(ROOT_DIR, "LICENSE"))
class BuildPackageProtos(setuptools.Command):
"""Command to generate project *_pb2.py modules from proto files."""
description = "build grpc protobuf modules"
user_options = []
def initialize_options(self):
pass
def finalize_options(self):
pass
def run(self):
# due to limitations of the proto generator, we require that only *one*
# directory is provided as an 'include' directory. We assume it's the '' key
# to `self.distribution.package_dir` (and get a key error if it's not
# there).
from grpc_tools import command
command.build_package_protos(self.distribution.package_dir[""])
| 2,193
| 29.901408
| 84
|
py
|
grpc
|
grpc-master/src/python/grpcio_channelz/grpc_channelz/__init__.py
|
# Copyright 2018 The gRPC Authors
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
| 580
| 40.5
| 74
|
py
|
grpc
|
grpc-master/src/python/grpcio_channelz/grpc_channelz/v1/_async.py
|
# Copyright 2020 The gRPC Authors
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""AsyncIO version of Channelz servicer."""
from grpc.experimental import aio
from grpc_channelz.v1._servicer import ChannelzServicer as _SyncChannelzServicer
import grpc_channelz.v1.channelz_pb2 as _channelz_pb2
import grpc_channelz.v1.channelz_pb2_grpc as _channelz_pb2_grpc
class ChannelzServicer(_channelz_pb2_grpc.ChannelzServicer):
"""AsyncIO servicer for handling RPCs for service statuses."""
@staticmethod
async def GetTopChannels(
request: _channelz_pb2.GetTopChannelsRequest,
context: aio.ServicerContext,
) -> _channelz_pb2.GetTopChannelsResponse:
return _SyncChannelzServicer.GetTopChannels(request, context)
@staticmethod
async def GetServers(
request: _channelz_pb2.GetServersRequest, context: aio.ServicerContext
) -> _channelz_pb2.GetServersResponse:
return _SyncChannelzServicer.GetServers(request, context)
@staticmethod
async def GetServer(
request: _channelz_pb2.GetServerRequest, context: aio.ServicerContext
) -> _channelz_pb2.GetServerResponse:
return _SyncChannelzServicer.GetServer(request, context)
@staticmethod
async def GetServerSockets(
request: _channelz_pb2.GetServerSocketsRequest,
context: aio.ServicerContext,
) -> _channelz_pb2.GetServerSocketsResponse:
return _SyncChannelzServicer.GetServerSockets(request, context)
@staticmethod
async def GetChannel(
request: _channelz_pb2.GetChannelRequest, context: aio.ServicerContext
) -> _channelz_pb2.GetChannelResponse:
return _SyncChannelzServicer.GetChannel(request, context)
@staticmethod
async def GetSubchannel(
request: _channelz_pb2.GetSubchannelRequest,
context: aio.ServicerContext,
) -> _channelz_pb2.GetSubchannelResponse:
return _SyncChannelzServicer.GetSubchannel(request, context)
@staticmethod
async def GetSocket(
request: _channelz_pb2.GetSocketRequest, context: aio.ServicerContext
) -> _channelz_pb2.GetSocketResponse:
return _SyncChannelzServicer.GetSocket(request, context)
| 2,690
| 38
| 80
|
py
|
grpc
|
grpc-master/src/python/grpcio_channelz/grpc_channelz/v1/channelz.py
|
# Copyright 2018 The gRPC Authors
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Channelz debug service implementation in gRPC Python."""
import sys
import grpc
from grpc_channelz.v1._servicer import ChannelzServicer
import grpc_channelz.v1.channelz_pb2_grpc as _channelz_pb2_grpc
_add_channelz_servicer_doc = """Add Channelz servicer to a server.
Channelz servicer is in charge of
pulling information from C-Core for entire process. It will allow the
server to response to Channelz queries.
The Channelz statistic is enabled by default inside C-Core. Whether the
statistic is enabled or not is isolated from adding Channelz servicer.
That means you can query Channelz info with a Channelz-disabled channel,
and you can add Channelz servicer to a Channelz-disabled server.
The Channelz statistic can be enabled or disabled by channel option
'grpc.enable_channelz'. Set to 1 to enable, set to 0 to disable.
This is an EXPERIMENTAL API.
Args:
server: A gRPC server to which Channelz service will be added.
"""
if sys.version_info[0] >= 3 and sys.version_info[1] >= 6:
from grpc_channelz.v1 import _async as aio
def add_channelz_servicer(server):
if isinstance(server, grpc.experimental.aio.Server):
_channelz_pb2_grpc.add_ChannelzServicer_to_server(
aio.ChannelzServicer(), server
)
else:
_channelz_pb2_grpc.add_ChannelzServicer_to_server(
ChannelzServicer(), server
)
add_channelz_servicer.__doc__ = _add_channelz_servicer_doc
__all__ = [
"aio",
"add_channelz_servicer",
"ChannelzServicer",
]
else:
def add_channelz_servicer(server):
_channelz_pb2_grpc.add_ChannelzServicer_to_server(
ChannelzServicer(), server
)
add_channelz_servicer.__doc__ = _add_channelz_servicer_doc
__all__ = [
"add_channelz_servicer",
"ChannelzServicer",
]
| 2,453
| 31.289474
| 74
|
py
|
grpc
|
grpc-master/src/python/grpcio_channelz/grpc_channelz/v1/_servicer.py
|
# Copyright 2020 The gRPC Authors
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Channelz debug service implementation in gRPC Python."""
from google.protobuf import json_format
import grpc
from grpc._cython import cygrpc
import grpc_channelz.v1.channelz_pb2 as _channelz_pb2
import grpc_channelz.v1.channelz_pb2_grpc as _channelz_pb2_grpc
class ChannelzServicer(_channelz_pb2_grpc.ChannelzServicer):
"""Servicer handling RPCs for service statuses."""
@staticmethod
def GetTopChannels(request, context):
try:
return json_format.Parse(
cygrpc.channelz_get_top_channels(request.start_channel_id),
_channelz_pb2.GetTopChannelsResponse(),
)
except (ValueError, json_format.ParseError) as e:
context.set_code(grpc.StatusCode.INTERNAL)
context.set_details(str(e))
@staticmethod
def GetServers(request, context):
try:
return json_format.Parse(
cygrpc.channelz_get_servers(request.start_server_id),
_channelz_pb2.GetServersResponse(),
)
except (ValueError, json_format.ParseError) as e:
context.set_code(grpc.StatusCode.INTERNAL)
context.set_details(str(e))
@staticmethod
def GetServer(request, context):
try:
return json_format.Parse(
cygrpc.channelz_get_server(request.server_id),
_channelz_pb2.GetServerResponse(),
)
except ValueError as e:
context.set_code(grpc.StatusCode.NOT_FOUND)
context.set_details(str(e))
except json_format.ParseError as e:
context.set_code(grpc.StatusCode.INTERNAL)
context.set_details(str(e))
@staticmethod
def GetServerSockets(request, context):
try:
return json_format.Parse(
cygrpc.channelz_get_server_sockets(
request.server_id,
request.start_socket_id,
request.max_results,
),
_channelz_pb2.GetServerSocketsResponse(),
)
except ValueError as e:
context.set_code(grpc.StatusCode.NOT_FOUND)
context.set_details(str(e))
except json_format.ParseError as e:
context.set_code(grpc.StatusCode.INTERNAL)
context.set_details(str(e))
@staticmethod
def GetChannel(request, context):
try:
return json_format.Parse(
cygrpc.channelz_get_channel(request.channel_id),
_channelz_pb2.GetChannelResponse(),
)
except ValueError as e:
context.set_code(grpc.StatusCode.NOT_FOUND)
context.set_details(str(e))
except json_format.ParseError as e:
context.set_code(grpc.StatusCode.INTERNAL)
context.set_details(str(e))
@staticmethod
def GetSubchannel(request, context):
try:
return json_format.Parse(
cygrpc.channelz_get_subchannel(request.subchannel_id),
_channelz_pb2.GetSubchannelResponse(),
)
except ValueError as e:
context.set_code(grpc.StatusCode.NOT_FOUND)
context.set_details(str(e))
except json_format.ParseError as e:
context.set_code(grpc.StatusCode.INTERNAL)
context.set_details(str(e))
@staticmethod
def GetSocket(request, context):
try:
return json_format.Parse(
cygrpc.channelz_get_socket(request.socket_id),
_channelz_pb2.GetSocketResponse(),
)
except ValueError as e:
context.set_code(grpc.StatusCode.NOT_FOUND)
context.set_details(str(e))
except json_format.ParseError as e:
context.set_code(grpc.StatusCode.INTERNAL)
context.set_details(str(e))
| 4,446
| 35.752066
| 75
|
py
|
grpc
|
grpc-master/src/python/grpcio_channelz/grpc_channelz/v1/__init__.py
|
# Copyright 2018 The gRPC Authors
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
| 580
| 40.5
| 74
|
py
|
grpc
|
grpc-master/src/python/grpcio_tests/setup.py
|
# Copyright 2015 gRPC authors.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""A setup module for the gRPC Python package."""
import multiprocessing
import os
import os.path
import sys
import grpc_tools.command
import setuptools
PY3 = sys.version_info.major == 3
# Ensure we're in the proper directory whether or not we're being used by pip.
os.chdir(os.path.dirname(os.path.abspath(__file__)))
# Break import-style to ensure we can actually find our in-repo dependencies.
import commands
import grpc_version
LICENSE = "Apache License 2.0"
PACKAGE_DIRECTORIES = {
"": ".",
}
INSTALL_REQUIRES = (
"coverage>=4.0",
"grpcio>={version}".format(version=grpc_version.VERSION),
"grpcio-channelz>={version}".format(version=grpc_version.VERSION),
"grpcio-status>={version}".format(version=grpc_version.VERSION),
"grpcio-tools>={version}".format(version=grpc_version.VERSION),
"grpcio-health-checking>={version}".format(version=grpc_version.VERSION),
"oauth2client>=1.4.7",
"protobuf>=4.21.6rc1,!=4.22.0.*",
"google-auth>=1.17.2",
"requests>=2.14.2",
)
COMMAND_CLASS = {
# Run `preprocess` *before* doing any packaging!
"preprocess": commands.GatherProto,
"build_package_protos": grpc_tools.command.BuildPackageProtos,
"build_py": commands.BuildPy,
"run_fork": commands.RunFork,
"run_interop": commands.RunInterop,
"test_lite": commands.TestLite,
"test_gevent": commands.TestGevent,
"test_aio": commands.TestAio,
"test_py3_only": commands.TestPy3Only,
}
PACKAGE_DATA = {
"tests.interop": [
"credentials/ca.pem",
"credentials/server1.key",
"credentials/server1.pem",
],
"tests.protoc_plugin.protos.invocation_testing": [
"same.proto",
"compiler.proto",
],
"tests.protoc_plugin.protos.invocation_testing.split_messages": [
"messages.proto",
],
"tests.protoc_plugin.protos.invocation_testing.split_services": [
"services.proto",
],
"tests.testing.proto": [
"requests.proto",
"services.proto",
],
"tests.unit": [
"credentials/ca.pem",
"credentials/server1.key",
"credentials/server1.pem",
],
"tests": ["tests.json"],
}
TEST_SUITE = "tests"
TEST_LOADER = "tests:Loader"
TEST_RUNNER = "tests:Runner"
TESTS_REQUIRE = INSTALL_REQUIRES
PACKAGES = setuptools.find_packages(".")
if __name__ == "__main__":
multiprocessing.freeze_support()
setuptools.setup(
name="grpcio-tests",
version=grpc_version.VERSION,
license=LICENSE,
packages=list(PACKAGES),
package_dir=PACKAGE_DIRECTORIES,
package_data=PACKAGE_DATA,
install_requires=INSTALL_REQUIRES,
cmdclass=COMMAND_CLASS,
tests_require=TESTS_REQUIRE,
test_suite=TEST_SUITE,
test_loader=TEST_LOADER,
test_runner=TEST_RUNNER,
)
| 3,406
| 28.37069
| 78
|
py
|
grpc
|
grpc-master/src/python/grpcio_tests/grpc_version.py
|
# Copyright 2016 gRPC authors.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# AUTO-GENERATED FROM `$REPO_ROOT/templates/src/python/grpcio_tests/grpc_version.py.template`!!!
VERSION = '1.57.0.dev0'
| 700
| 37.944444
| 96
|
py
|
grpc
|
grpc-master/src/python/grpcio_tests/commands.py
|
# Copyright 2015 gRPC authors.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Provides distutils command classes for the gRPC Python setup process."""
from distutils import errors as _errors
import glob
import os
import os.path
import platform
import re
import shutil
import sys
import setuptools
from setuptools.command import build_ext
from setuptools.command import build_py
from setuptools.command import easy_install
from setuptools.command import install
from setuptools.command import test
PYTHON_STEM = os.path.dirname(os.path.abspath(__file__))
GRPC_STEM = os.path.abspath(PYTHON_STEM + "../../../../")
GRPC_PROTO_STEM = os.path.join(GRPC_STEM, "src", "proto")
PROTO_STEM = os.path.join(PYTHON_STEM, "src", "proto")
PYTHON_PROTO_TOP_LEVEL = os.path.join(PYTHON_STEM, "src")
class CommandError(object):
pass
class GatherProto(setuptools.Command):
description = "gather proto dependencies"
user_options = []
def initialize_options(self):
pass
def finalize_options(self):
pass
def run(self):
# TODO(atash) ensure that we're running from the repository directory when
# this command is used
try:
shutil.rmtree(PROTO_STEM)
except Exception as error:
# We don't care if this command fails
pass
shutil.copytree(GRPC_PROTO_STEM, PROTO_STEM)
for root, _, _ in os.walk(PYTHON_PROTO_TOP_LEVEL):
path = os.path.join(root, "__init__.py")
open(path, "a").close()
class BuildPy(build_py.build_py):
"""Custom project build command."""
def run(self):
try:
self.run_command("build_package_protos")
except CommandError as error:
sys.stderr.write("warning: %s\n" % error.message)
build_py.build_py.run(self)
class TestLite(setuptools.Command):
"""Command to run tests without fetching or building anything."""
description = "run tests without fetching or building anything."
user_options = []
def initialize_options(self):
pass
def finalize_options(self):
# distutils requires this override.
pass
def run(self):
self._add_eggs_to_path()
import tests
loader = tests.Loader()
loader.loadTestsFromNames(["tests"])
runner = tests.Runner(dedicated_threads=True)
result = runner.run(loader.suite)
if not result.wasSuccessful():
sys.exit("Test failure")
def _add_eggs_to_path(self):
"""Fetch install and test requirements"""
self.distribution.fetch_build_eggs(self.distribution.install_requires)
self.distribution.fetch_build_eggs(self.distribution.tests_require)
class TestPy3Only(setuptools.Command):
"""Command to run tests for Python 3+ features.
This does not include asyncio tests, which are housed in a separate
directory.
"""
description = "run tests for py3+ features"
user_options = []
def initialize_options(self):
pass
def finalize_options(self):
pass
def run(self):
self._add_eggs_to_path()
import tests
loader = tests.Loader()
loader.loadTestsFromNames(["tests_py3_only"])
runner = tests.Runner()
result = runner.run(loader.suite)
if not result.wasSuccessful():
sys.exit("Test failure")
def _add_eggs_to_path(self):
self.distribution.fetch_build_eggs(self.distribution.install_requires)
self.distribution.fetch_build_eggs(self.distribution.tests_require)
class TestAio(setuptools.Command):
"""Command to run aio tests without fetching or building anything."""
description = "run aio tests without fetching or building anything."
user_options = []
def initialize_options(self):
pass
def finalize_options(self):
pass
def run(self):
self._add_eggs_to_path()
import tests
loader = tests.Loader()
loader.loadTestsFromNames(["tests_aio"])
# Even without dedicated threads, the framework will somehow spawn a
# new thread for tests to run upon. New thread doesn't have event loop
# attached by default, so initialization is needed.
runner = tests.Runner(dedicated_threads=False)
result = runner.run(loader.suite)
if not result.wasSuccessful():
sys.exit("Test failure")
def _add_eggs_to_path(self):
"""Fetch install and test requirements"""
self.distribution.fetch_build_eggs(self.distribution.install_requires)
self.distribution.fetch_build_eggs(self.distribution.tests_require)
class TestGevent(setuptools.Command):
"""Command to run tests w/gevent."""
BANNED_TESTS = (
# Fork support is not compatible with gevent
"fork._fork_interop_test.ForkInteropTest",
# These tests send a lot of RPCs and are really slow on gevent. They will
# eventually succeed, but need to dig into performance issues.
"unit._cython._no_messages_server_completion_queue_per_call_test.Test.test_rpcs",
"unit._cython._no_messages_single_server_completion_queue_test.Test.test_rpcs",
"unit._compression_test",
# TODO(https://github.com/grpc/grpc/issues/16890) enable this test
"unit._cython._channel_test.ChannelTest.test_multiple_channels_lonely_connectivity",
# I have no idea why this doesn't work in gevent, but it shouldn't even be
# using the c-core
"testing._client_test.ClientTest.test_infinite_request_stream_real_time",
# TODO(https://github.com/grpc/grpc/issues/15743) enable this test
"unit._session_cache_test.SSLSessionCacheTest.testSSLSessionCacheLRU",
# TODO(https://github.com/grpc/grpc/issues/14789) enable this test
"unit._server_ssl_cert_config_test",
# TODO(https://github.com/grpc/grpc/issues/14901) enable this test
"protoc_plugin._python_plugin_test.PythonPluginTest",
"protoc_plugin._python_plugin_test.SimpleStubsPluginTest",
# Beta API is unsupported for gevent
"protoc_plugin.beta_python_plugin_test",
"unit.beta._beta_features_test",
# TODO(https://github.com/grpc/grpc/issues/15411) unpin gevent version
# This test will stuck while running higher version of gevent
"unit._auth_context_test.AuthContextTest.testSessionResumption",
# TODO(https://github.com/grpc/grpc/issues/15411) enable these tests
"unit._channel_ready_future_test.ChannelReadyFutureTest.test_immediately_connectable_channel_connectivity",
"unit._cython._channel_test.ChannelTest.test_single_channel_lonely_connectivity",
"unit._exit_test.ExitTest.test_in_flight_unary_unary_call",
"unit._exit_test.ExitTest.test_in_flight_unary_stream_call",
"unit._exit_test.ExitTest.test_in_flight_stream_unary_call",
"unit._exit_test.ExitTest.test_in_flight_stream_stream_call",
"unit._exit_test.ExitTest.test_in_flight_partial_unary_stream_call",
"unit._exit_test.ExitTest.test_in_flight_partial_stream_unary_call",
"unit._exit_test.ExitTest.test_in_flight_partial_stream_stream_call",
# TODO(https://github.com/grpc/grpc/issues/18980): Reenable.
"unit._signal_handling_test.SignalHandlingTest",
"unit._metadata_flags_test",
"health_check._health_servicer_test.HealthServicerTest.test_cancelled_watch_removed_from_watch_list",
# TODO(https://github.com/grpc/grpc/issues/17330) enable these three tests
"channelz._channelz_servicer_test.ChannelzServicerTest.test_many_subchannels",
"channelz._channelz_servicer_test.ChannelzServicerTest.test_many_subchannels_and_sockets",
"channelz._channelz_servicer_test.ChannelzServicerTest.test_streaming_rpc",
# TODO(https://github.com/grpc/grpc/issues/15411) enable this test
"unit._cython._channel_test.ChannelTest.test_negative_deadline_connectivity",
# TODO(https://github.com/grpc/grpc/issues/15411) enable this test
"unit._local_credentials_test.LocalCredentialsTest",
# TODO(https://github.com/grpc/grpc/issues/22020) LocalCredentials
# aren't supported with custom io managers.
"unit._contextvars_propagation_test",
"testing._time_test.StrictRealTimeTest",
)
BANNED_WINDOWS_TESTS = (
# TODO(https://github.com/grpc/grpc/pull/15411) enable this test
"unit._dns_resolver_test.DNSResolverTest.test_connect_loopback",
# TODO(https://github.com/grpc/grpc/pull/15411) enable this test
"unit._server_test.ServerTest.test_failed_port_binding_exception",
)
BANNED_MACOS_TESTS = (
# TODO(https://github.com/grpc/grpc/issues/15411) enable this test
"unit._dynamic_stubs_test.DynamicStubTest",
)
description = "run tests with gevent. Assumes grpc/gevent are installed"
user_options = []
def initialize_options(self):
pass
def finalize_options(self):
# distutils requires this override.
pass
def run(self):
import gevent
from gevent import monkey
monkey.patch_all()
threadpool = gevent.hub.get_hub().threadpool
# Currently, each channel corresponds to a single native thread in the
# gevent threadpool. Thus, when the unit test suite spins up hundreds of
# channels concurrently, some will be starved out, causing the test to
# increase in duration. We increase the max size here so this does not
# happen.
threadpool.maxsize = 1024
threadpool.size = 32
import grpc.experimental.gevent
import tests
grpc.experimental.gevent.init_gevent()
import gevent
import tests
loader = tests.Loader()
loader.loadTestsFromNames(["tests", "tests_gevent"])
runner = tests.Runner()
if sys.platform == "win32":
runner.skip_tests(self.BANNED_TESTS + self.BANNED_WINDOWS_TESTS)
elif sys.platform == "darwin":
runner.skip_tests(self.BANNED_TESTS + self.BANNED_MACOS_TESTS)
else:
runner.skip_tests(self.BANNED_TESTS)
result = gevent.spawn(runner.run, loader.suite)
result.join()
if not result.value.wasSuccessful():
sys.exit("Test failure")
class RunInterop(test.test):
description = "run interop test client/server"
user_options = [
("args=", None, "pass-thru arguments for the client/server"),
("client", None, "flag indicating to run the client"),
("server", None, "flag indicating to run the server"),
("use-asyncio", None, "flag indicating to run the asyncio stack"),
]
def initialize_options(self):
self.args = ""
self.client = False
self.server = False
self.use_asyncio = False
def finalize_options(self):
if self.client and self.server:
raise _errors.DistutilsOptionError(
"you may only specify one of client or server"
)
def run(self):
if self.distribution.install_requires:
self.distribution.fetch_build_eggs(
self.distribution.install_requires
)
if self.distribution.tests_require:
self.distribution.fetch_build_eggs(self.distribution.tests_require)
if self.client:
self.run_client()
elif self.server:
self.run_server()
def run_server(self):
# We import here to ensure that our setuptools parent has had a chance to
# edit the Python system path.
if self.use_asyncio:
import asyncio
from tests_aio.interop import server
sys.argv[1:] = self.args.split()
asyncio.get_event_loop().run_until_complete(server.serve())
else:
from tests.interop import server
sys.argv[1:] = self.args.split()
server.serve()
def run_client(self):
# We import here to ensure that our setuptools parent has had a chance to
# edit the Python system path.
from tests.interop import client
sys.argv[1:] = self.args.split()
client.test_interoperability()
class RunFork(test.test):
description = "run fork test client"
user_options = [("args=", "a", "pass-thru arguments for the client")]
def initialize_options(self):
self.args = ""
def finalize_options(self):
# distutils requires this override.
pass
def run(self):
if self.distribution.install_requires:
self.distribution.fetch_build_eggs(
self.distribution.install_requires
)
if self.distribution.tests_require:
self.distribution.fetch_build_eggs(self.distribution.tests_require)
# We import here to ensure that our setuptools parent has had a chance to
# edit the Python system path.
from tests.fork import client
sys.argv[1:] = self.args.split()
client.test_fork()
| 13,621
| 35.520107
| 115
|
py
|
grpc
|
grpc-master/src/python/grpcio_tests/tests_gevent/__init__.py
|
# Copyright 2021 The gRPC Authors
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
| 580
| 40.5
| 74
|
py
|
grpc
|
grpc-master/src/python/grpcio_tests/tests_gevent/unit/close_channel_test.py
|
# Copyright 2021 The gRPC Authors
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import sys
import unittest
import gevent
from gevent.pool import Group
import grpc
from src.proto.grpc.testing import messages_pb2
from src.proto.grpc.testing import test_pb2_grpc
from tests_gevent.unit._test_server import start_test_server
_UNARY_CALL_METHOD_WITH_SLEEP = "/grpc.testing.TestService/UnaryCallWithSleep"
class CloseChannelTest(unittest.TestCase):
def setUp(self):
self._server_target, self._server = start_test_server()
self._channel = grpc.insecure_channel(self._server_target)
self._unhandled_exception = False
sys.excepthook = self._global_exception_handler
def tearDown(self):
self._channel.close()
self._server.stop(None)
def test_graceful_close(self):
stub = test_pb2_grpc.TestServiceStub(self._channel)
_, response = stub.UnaryCall.with_call(messages_pb2.SimpleRequest())
self._channel.close()
self.assertEqual(grpc.StatusCode.OK, response.code())
def test_graceful_close_in_greenlet(self):
group = Group()
stub = test_pb2_grpc.TestServiceStub(self._channel)
greenlet = group.spawn(self._run_client, stub.UnaryCall)
# release loop so that greenlet can take control
gevent.sleep()
self._channel.close()
group.killone(greenlet)
self.assertFalse(self._unhandled_exception, "Unhandled GreenletExit")
try:
greenlet.get()
except Exception as e: # pylint: disable=broad-except
self.fail(f"Unexpected exception in greenlet: {e}")
def test_ungraceful_close_in_greenlet(self):
group = Group()
UnaryCallWithSleep = self._channel.unary_unary(
_UNARY_CALL_METHOD_WITH_SLEEP,
request_serializer=messages_pb2.SimpleRequest.SerializeToString,
response_deserializer=messages_pb2.SimpleResponse.FromString,
)
greenlet = group.spawn(self._run_client, UnaryCallWithSleep)
# release loop so that greenlet can take control
gevent.sleep()
group.killone(greenlet)
self.assertFalse(self._unhandled_exception, "Unhandled GreenletExit")
def test_kill_greenlet_with_generic_exception(self):
group = Group()
UnaryCallWithSleep = self._channel.unary_unary(
_UNARY_CALL_METHOD_WITH_SLEEP,
request_serializer=messages_pb2.SimpleRequest.SerializeToString,
response_deserializer=messages_pb2.SimpleResponse.FromString,
)
greenlet = group.spawn(self._run_client, UnaryCallWithSleep)
# release loop so that greenlet can take control
gevent.sleep()
group.killone(greenlet, exception=Exception)
self.assertFalse(self._unhandled_exception, "Unhandled exception")
self.assertRaises(Exception, greenlet.get)
def _run_client(self, call):
try:
call.with_call(messages_pb2.SimpleRequest())
except grpc.RpcError as e:
if e.code() != grpc.StatusCode.CANCELLED:
raise
def _global_exception_handler(self, exctype, value, tb):
if exctype == gevent.GreenletExit:
self._unhandled_exception = True
return
sys.__excepthook__(exctype, value, tb)
if __name__ == "__main__":
unittest.main(verbosity=2)
| 3,891
| 36.066667
| 78
|
py
|
grpc
|
grpc-master/src/python/grpcio_tests/tests_gevent/unit/__init__.py
|
# Copyright 2021 The gRPC Authors
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
| 580
| 40.5
| 74
|
py
|
grpc
|
grpc-master/src/python/grpcio_tests/tests_gevent/unit/_test_server.py
|
# Copyright 2021 The gRPC Authors
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
from concurrent import futures
from typing import Any, Tuple
import gevent
import grpc
from src.proto.grpc.testing import messages_pb2
from src.proto.grpc.testing import test_pb2_grpc
LONG_UNARY_CALL_WITH_SLEEP_VALUE = 1
class TestServiceServicer(test_pb2_grpc.TestServiceServicer):
def UnaryCall(self, request, context):
return messages_pb2.SimpleResponse()
def UnaryCallWithSleep(self, unused_request, unused_context):
gevent.sleep(LONG_UNARY_CALL_WITH_SLEEP_VALUE)
return messages_pb2.SimpleResponse()
def start_test_server(port: int = 0) -> Tuple[str, Any]:
server = grpc.server(futures.ThreadPoolExecutor())
servicer = TestServiceServicer()
test_pb2_grpc.add_TestServiceServicer_to_server(
TestServiceServicer(), server
)
server.add_generic_rpc_handlers((_create_extra_generic_handler(servicer),))
port = server.add_insecure_port("[::]:%d" % port)
server.start()
return "localhost:%d" % port, server
def _create_extra_generic_handler(servicer: TestServiceServicer) -> Any:
# Add programatically extra methods not provided by the proto file
# that are used during the tests
rpc_method_handlers = {
"UnaryCallWithSleep": grpc.unary_unary_rpc_method_handler(
servicer.UnaryCallWithSleep,
request_deserializer=messages_pb2.SimpleRequest.FromString,
response_serializer=messages_pb2.SimpleResponse.SerializeToString,
)
}
return grpc.method_handlers_generic_handler(
"grpc.testing.TestService", rpc_method_handlers
)
| 2,164
| 33.919355
| 79
|
py
|
grpc
|
grpc-master/src/python/grpcio_tests/tests_py3_only/__init__.py
|
# Copyright 2020 The gRPC authors.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
from __future__ import absolute_import
from tests import _loader
from tests import _runner
Loader = _loader.Loader
Runner = _runner.Runner
| 723
| 31.909091
| 74
|
py
|
grpc
|
grpc-master/src/python/grpcio_tests/tests_py3_only/unit/_simple_stubs_test.py
|
# Copyright 2020 The gRPC authors.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Tests for Simple Stubs."""
# TODO(https://github.com/grpc/grpc/issues/21965): Run under setuptools.
import os
_MAXIMUM_CHANNELS = 10
_DEFAULT_TIMEOUT = 1.0
os.environ["GRPC_PYTHON_MANAGED_CHANNEL_EVICTION_SECONDS"] = "2"
os.environ["GRPC_PYTHON_MANAGED_CHANNEL_MAXIMUM"] = str(_MAXIMUM_CHANNELS)
os.environ["GRPC_PYTHON_DEFAULT_TIMEOUT_SECONDS"] = str(_DEFAULT_TIMEOUT)
import contextlib
import datetime
import inspect
import logging
import sys
import threading
import time
from typing import Callable, Optional
import unittest
import grpc
import grpc.experimental
from tests.unit import resources
from tests.unit import test_common
from tests.unit.framework.common import get_socket
_REQUEST = b"0000"
_CACHE_EPOCHS = 8
_CACHE_TRIALS = 6
_SERVER_RESPONSE_COUNT = 10
_CLIENT_REQUEST_COUNT = _SERVER_RESPONSE_COUNT
_STRESS_EPOCHS = _MAXIMUM_CHANNELS * 10
_UNARY_UNARY = "/test/UnaryUnary"
_UNARY_STREAM = "/test/UnaryStream"
_STREAM_UNARY = "/test/StreamUnary"
_STREAM_STREAM = "/test/StreamStream"
_BLACK_HOLE = "/test/BlackHole"
@contextlib.contextmanager
def _env(key: str, value: str):
os.environ[key] = value
grpc._cython.cygrpc.reset_grpc_config_vars()
yield
del os.environ[key]
def _unary_unary_handler(request, context):
return request
def _unary_stream_handler(request, context):
for _ in range(_SERVER_RESPONSE_COUNT):
yield request
def _stream_unary_handler(request_iterator, context):
request = None
for single_request in request_iterator:
request = single_request
return request
def _stream_stream_handler(request_iterator, context):
for request in request_iterator:
yield request
def _black_hole_handler(request, context):
event = threading.Event()
def _on_done():
event.set()
context.add_callback(_on_done)
while not event.is_set():
time.sleep(0.1)
class _GenericHandler(grpc.GenericRpcHandler):
def service(self, handler_call_details):
if handler_call_details.method == _UNARY_UNARY:
return grpc.unary_unary_rpc_method_handler(_unary_unary_handler)
elif handler_call_details.method == _UNARY_STREAM:
return grpc.unary_stream_rpc_method_handler(_unary_stream_handler)
elif handler_call_details.method == _STREAM_UNARY:
return grpc.stream_unary_rpc_method_handler(_stream_unary_handler)
elif handler_call_details.method == _STREAM_STREAM:
return grpc.stream_stream_rpc_method_handler(_stream_stream_handler)
elif handler_call_details.method == _BLACK_HOLE:
return grpc.unary_unary_rpc_method_handler(_black_hole_handler)
else:
raise NotImplementedError()
def _time_invocation(to_time: Callable[[], None]) -> datetime.timedelta:
start = datetime.datetime.now()
to_time()
return datetime.datetime.now() - start
@contextlib.contextmanager
def _server(credentials: Optional[grpc.ServerCredentials]):
try:
server = test_common.test_server()
target = "[::]:0"
if credentials is None:
port = server.add_insecure_port(target)
else:
port = server.add_secure_port(target, credentials)
server.add_generic_rpc_handlers((_GenericHandler(),))
server.start()
yield port
finally:
server.stop(None)
class SimpleStubsTest(unittest.TestCase):
def assert_cached(self, to_check: Callable[[str], None]) -> None:
"""Asserts that a function caches intermediate data/state.
To be specific, given a function whose caching behavior is
deterministic in the value of a supplied string, this function asserts
that, on average, subsequent invocations of the function for a specific
string are faster than first invocations with that same string.
Args:
to_check: A function returning nothing, that caches values based on
an arbitrary supplied string.
"""
initial_runs = []
cached_runs = []
for epoch in range(_CACHE_EPOCHS):
runs = []
text = str(epoch)
for trial in range(_CACHE_TRIALS):
runs.append(_time_invocation(lambda: to_check(text)))
initial_runs.append(runs[0])
cached_runs.extend(runs[1:])
average_cold = sum(
(run for run in initial_runs), datetime.timedelta()
) / len(initial_runs)
average_warm = sum(
(run for run in cached_runs), datetime.timedelta()
) / len(cached_runs)
self.assertLess(average_warm, average_cold)
def assert_eventually(
self,
predicate: Callable[[], bool],
*,
timeout: Optional[datetime.timedelta] = None,
message: Optional[Callable[[], str]] = None,
) -> None:
message = message or (lambda: "Proposition did not evaluate to true")
timeout = timeout or datetime.timedelta(seconds=10)
end = datetime.datetime.now() + timeout
while datetime.datetime.now() < end:
if predicate():
break
time.sleep(0.5)
else:
self.fail(message() + " after " + str(timeout))
def test_unary_unary_insecure(self):
with _server(None) as port:
target = f"localhost:{port}"
response = grpc.experimental.unary_unary(
_REQUEST,
target,
_UNARY_UNARY,
channel_credentials=grpc.experimental.insecure_channel_credentials(),
timeout=None,
)
self.assertEqual(_REQUEST, response)
def test_unary_unary_secure(self):
with _server(grpc.local_server_credentials()) as port:
target = f"localhost:{port}"
response = grpc.experimental.unary_unary(
_REQUEST,
target,
_UNARY_UNARY,
channel_credentials=grpc.local_channel_credentials(),
timeout=None,
)
self.assertEqual(_REQUEST, response)
def test_channels_cached(self):
with _server(grpc.local_server_credentials()) as port:
target = f"localhost:{port}"
test_name = inspect.stack()[0][3]
args = (_REQUEST, target, _UNARY_UNARY)
kwargs = {"channel_credentials": grpc.local_channel_credentials()}
def _invoke(seed: str):
run_kwargs = dict(kwargs)
run_kwargs["options"] = ((test_name + seed, ""),)
grpc.experimental.unary_unary(*args, **run_kwargs)
self.assert_cached(_invoke)
def test_channels_evicted(self):
with _server(grpc.local_server_credentials()) as port:
target = f"localhost:{port}"
response = grpc.experimental.unary_unary(
_REQUEST,
target,
_UNARY_UNARY,
channel_credentials=grpc.local_channel_credentials(),
)
self.assert_eventually(
lambda: grpc._simple_stubs.ChannelCache.get()._test_only_channel_count()
== 0,
message=lambda: f"{grpc._simple_stubs.ChannelCache.get()._test_only_channel_count()} remain",
)
def test_total_channels_enforced(self):
with _server(grpc.local_server_credentials()) as port:
target = f"localhost:{port}"
for i in range(_STRESS_EPOCHS):
# Ensure we get a new channel each time.
options = (("foo", str(i)),)
# Send messages at full blast.
grpc.experimental.unary_unary(
_REQUEST,
target,
_UNARY_UNARY,
options=options,
channel_credentials=grpc.local_channel_credentials(),
)
self.assert_eventually(
lambda: grpc._simple_stubs.ChannelCache.get()._test_only_channel_count()
<= _MAXIMUM_CHANNELS + 1,
message=lambda: f"{grpc._simple_stubs.ChannelCache.get()._test_only_channel_count()} channels remain",
)
def test_unary_stream(self):
with _server(grpc.local_server_credentials()) as port:
target = f"localhost:{port}"
for response in grpc.experimental.unary_stream(
_REQUEST,
target,
_UNARY_STREAM,
channel_credentials=grpc.local_channel_credentials(),
):
self.assertEqual(_REQUEST, response)
def test_stream_unary(self):
def request_iter():
for _ in range(_CLIENT_REQUEST_COUNT):
yield _REQUEST
with _server(grpc.local_server_credentials()) as port:
target = f"localhost:{port}"
response = grpc.experimental.stream_unary(
request_iter(),
target,
_STREAM_UNARY,
channel_credentials=grpc.local_channel_credentials(),
)
self.assertEqual(_REQUEST, response)
def test_stream_stream(self):
def request_iter():
for _ in range(_CLIENT_REQUEST_COUNT):
yield _REQUEST
with _server(grpc.local_server_credentials()) as port:
target = f"localhost:{port}"
for response in grpc.experimental.stream_stream(
request_iter(),
target,
_STREAM_STREAM,
channel_credentials=grpc.local_channel_credentials(),
):
self.assertEqual(_REQUEST, response)
def test_default_ssl(self):
_private_key = resources.private_key()
_certificate_chain = resources.certificate_chain()
_server_certs = ((_private_key, _certificate_chain),)
_server_host_override = "foo.test.google.fr"
_test_root_certificates = resources.test_root_certificates()
_property_options = (
(
"grpc.ssl_target_name_override",
_server_host_override,
),
)
cert_dir = os.path.join(
os.path.dirname(resources.__file__), "credentials"
)
cert_file = os.path.join(cert_dir, "ca.pem")
with _env("GRPC_DEFAULT_SSL_ROOTS_FILE_PATH", cert_file):
server_creds = grpc.ssl_server_credentials(_server_certs)
with _server(server_creds) as port:
target = f"localhost:{port}"
response = grpc.experimental.unary_unary(
_REQUEST, target, _UNARY_UNARY, options=_property_options
)
def test_insecure_sugar(self):
with _server(None) as port:
target = f"localhost:{port}"
response = grpc.experimental.unary_unary(
_REQUEST, target, _UNARY_UNARY, insecure=True
)
self.assertEqual(_REQUEST, response)
def test_insecure_sugar_mutually_exclusive(self):
with _server(None) as port:
target = f"localhost:{port}"
with self.assertRaises(ValueError):
response = grpc.experimental.unary_unary(
_REQUEST,
target,
_UNARY_UNARY,
insecure=True,
channel_credentials=grpc.local_channel_credentials(),
)
def test_default_wait_for_ready(self):
addr, port, sock = get_socket()
sock.close()
target = f"{addr}:{port}"
channel = grpc._simple_stubs.ChannelCache.get().get_channel(
target, (), None, True, None
)
rpc_finished_event = threading.Event()
rpc_failed_event = threading.Event()
server = None
def _on_connectivity_changed(connectivity):
nonlocal server
if connectivity is grpc.ChannelConnectivity.TRANSIENT_FAILURE:
self.assertFalse(rpc_finished_event.is_set())
self.assertFalse(rpc_failed_event.is_set())
server = test_common.test_server()
server.add_insecure_port(target)
server.add_generic_rpc_handlers((_GenericHandler(),))
server.start()
channel.unsubscribe(_on_connectivity_changed)
elif connectivity in (
grpc.ChannelConnectivity.IDLE,
grpc.ChannelConnectivity.CONNECTING,
):
pass
else:
self.fail("Encountered unknown state.")
channel.subscribe(_on_connectivity_changed)
def _send_rpc():
try:
response = grpc.experimental.unary_unary(
_REQUEST, target, _UNARY_UNARY, timeout=None, insecure=True
)
rpc_finished_event.set()
except Exception as e:
rpc_failed_event.set()
t = threading.Thread(target=_send_rpc)
t.start()
t.join()
self.assertFalse(rpc_failed_event.is_set())
self.assertTrue(rpc_finished_event.is_set())
if server is not None:
server.stop(None)
def assert_times_out(self, invocation_args):
with _server(None) as port:
target = f"localhost:{port}"
with self.assertRaises(grpc.RpcError) as cm:
response = grpc.experimental.unary_unary(
_REQUEST,
target,
_BLACK_HOLE,
insecure=True,
**invocation_args,
)
self.assertEqual(
grpc.StatusCode.DEADLINE_EXCEEDED, cm.exception.code()
)
def test_default_timeout(self):
not_present = object()
wait_for_ready_values = [True, not_present]
timeout_values = [0.5, not_present]
cases = []
for wait_for_ready in wait_for_ready_values:
for timeout in timeout_values:
case = {}
if timeout is not not_present:
case["timeout"] = timeout
if wait_for_ready is not not_present:
case["wait_for_ready"] = wait_for_ready
cases.append(case)
for case in cases:
with self.subTest(**case):
self.assert_times_out(case)
if __name__ == "__main__":
logging.basicConfig(level=logging.INFO)
unittest.main(verbosity=2)
| 15,134
| 34.197674
| 122
|
py
|
grpc
|
grpc-master/src/python/grpcio_tests/tests_py3_only/unit/__init__.py
|
# Copyright 2019 The gRPC Authors.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
| 581
| 40.571429
| 74
|
py
|
grpc
|
grpc-master/src/python/grpcio_tests/tests_py3_only/unit/_leak_test.py
|
# Copyright 2020 The gRPC Authors
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""A smoke test for memory leaks on short-lived channels without close.
This test doesn't guarantee all resources are cleaned if `Channel.close` is not
explicitly invoked. The recommended way of using Channel object is using `with`
clause, and let context manager automatically close the channel.
"""
from concurrent.futures import ThreadPoolExecutor
import logging
import os
import resource
import sys
import unittest
import grpc
_TEST_METHOD = "/test/Test"
_REQUEST = b"\x23\x33"
_LARGE_NUM_OF_ITERATIONS = 5000
# If MAX_RSS inflated more than this size, the test is failed.
_FAIL_THRESHOLD = 25 * 1024 * 1024 # 25 MiB
def _get_max_rss():
return resource.getrusage(resource.RUSAGE_SELF).ru_maxrss
def _pretty_print_bytes(x):
if x > 1024 * 1024 * 1024:
return "%.2f GiB" % (x / 1024.0 / 1024 / 1024)
elif x > 1024 * 1024:
return "%.2f MiB" % (x / 1024.0 / 1024)
elif x > 1024:
return "%.2f KiB" % (x / 1024.0)
else:
return "%d B" % x
class _GenericHandler(grpc.GenericRpcHandler):
def service(self, handler_call_details):
if handler_call_details.method == _TEST_METHOD:
return grpc.unary_unary_rpc_method_handler(lambda x, _: x)
def _start_a_test_server():
server = grpc.server(
ThreadPoolExecutor(max_workers=1), options=(("grpc.so_reuseport", 0),)
)
server.add_generic_rpc_handlers((_GenericHandler(),))
port = server.add_insecure_port("localhost:0")
server.start()
return "localhost:%d" % port, server
def _perform_an_rpc(address):
channel = grpc.insecure_channel(address)
multicallable = channel.unary_unary(_TEST_METHOD)
response = multicallable(_REQUEST)
assert _REQUEST == response
class TestLeak(unittest.TestCase):
def test_leak_with_single_shot_rpcs(self):
address, server = _start_a_test_server()
# Records memory before experiment.
before = _get_max_rss()
# Amplifies the leak.
for n in range(_LARGE_NUM_OF_ITERATIONS):
_perform_an_rpc(address)
# Fails the test if memory leak detected.
diff = _get_max_rss() - before
if diff > _FAIL_THRESHOLD:
self.fail(
"Max RSS inflated {} > {}".format(
_pretty_print_bytes(diff),
_pretty_print_bytes(_FAIL_THRESHOLD),
)
)
if __name__ == "__main__":
logging.basicConfig(level=logging.DEBUG)
unittest.main(verbosity=2)
| 3,080
| 29.50495
| 79
|
py
|
grpc
|
grpc-master/src/python/grpcio_tests/tests_py3_only/interop/xds_interop_client_test.py
|
# Copyright 2022 gRPC authors.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import collections
import contextlib
import logging
import os
import subprocess
import sys
import tempfile
import time
from typing import Iterable, List, Mapping, Set, Tuple
import unittest
import grpc.experimental
import xds_interop_client
import xds_interop_server
from src.proto.grpc.testing import empty_pb2
from src.proto.grpc.testing import messages_pb2
from src.proto.grpc.testing import test_pb2
from src.proto.grpc.testing import test_pb2_grpc
import src.python.grpcio_tests.tests.unit.framework.common as framework_common
_CLIENT_PATH = os.path.abspath(os.path.realpath(xds_interop_client.__file__))
_SERVER_PATH = os.path.abspath(os.path.realpath(xds_interop_server.__file__))
_METHODS = (
(messages_pb2.ClientConfigureRequest.UNARY_CALL, "UNARY_CALL"),
(messages_pb2.ClientConfigureRequest.EMPTY_CALL, "EMPTY_CALL"),
)
_QPS = 100
_NUM_CHANNELS = 20
_TEST_ITERATIONS = 10
_ITERATION_DURATION_SECONDS = 1
_SUBPROCESS_TIMEOUT_SECONDS = 2
def _set_union(a: Iterable, b: Iterable) -> Set:
c = set(a)
c.update(b)
return c
@contextlib.contextmanager
def _start_python_with_args(
file: str, args: List[str]
) -> Tuple[subprocess.Popen, tempfile.TemporaryFile, tempfile.TemporaryFile]:
with tempfile.TemporaryFile(mode="r") as stdout:
with tempfile.TemporaryFile(mode="r") as stderr:
proc = subprocess.Popen(
(sys.executable, file) + tuple(args),
stdout=stdout,
stderr=stderr,
)
yield proc, stdout, stderr
def _dump_stream(
process_name: str, stream_name: str, stream: tempfile.TemporaryFile
):
sys.stderr.write(f"{process_name} {stream_name}:\n")
stream.seek(0)
sys.stderr.write(stream.read())
def _dump_streams(
process_name: str,
stdout: tempfile.TemporaryFile,
stderr: tempfile.TemporaryFile,
):
_dump_stream(process_name, "stdout", stdout)
_dump_stream(process_name, "stderr", stderr)
sys.stderr.write(f"End {process_name} output.\n")
def _index_accumulated_stats(
response: messages_pb2.LoadBalancerAccumulatedStatsResponse,
) -> Mapping[str, Mapping[int, int]]:
indexed = collections.defaultdict(lambda: collections.defaultdict(int))
for _, method_str in _METHODS:
for status in response.stats_per_method[method_str].result.keys():
indexed[method_str][status] = response.stats_per_method[
method_str
].result[status]
return indexed
def _subtract_indexed_stats(
a: Mapping[str, Mapping[int, int]], b: Mapping[str, Mapping[int, int]]
):
c = collections.defaultdict(lambda: collections.defaultdict(int))
all_methods = _set_union(a.keys(), b.keys())
for method in all_methods:
all_statuses = _set_union(a[method].keys(), b[method].keys())
for status in all_statuses:
c[method][status] = a[method][status] - b[method][status]
return c
def _collect_stats(
stats_port: int, duration: int
) -> Mapping[str, Mapping[int, int]]:
settings = {
"target": f"localhost:{stats_port}",
"insecure": True,
}
response = test_pb2_grpc.LoadBalancerStatsService.GetClientAccumulatedStats(
messages_pb2.LoadBalancerAccumulatedStatsRequest(), **settings
)
before = _index_accumulated_stats(response)
time.sleep(duration)
response = test_pb2_grpc.LoadBalancerStatsService.GetClientAccumulatedStats(
messages_pb2.LoadBalancerAccumulatedStatsRequest(), **settings
)
after = _index_accumulated_stats(response)
return _subtract_indexed_stats(after, before)
class XdsInteropClientTest(unittest.TestCase):
def _assert_client_consistent(
self, server_port: int, stats_port: int, qps: int, num_channels: int
):
settings = {
"target": f"localhost:{stats_port}",
"insecure": True,
}
for i in range(_TEST_ITERATIONS):
target_method, target_method_str = _METHODS[i % len(_METHODS)]
test_pb2_grpc.XdsUpdateClientConfigureService.Configure(
messages_pb2.ClientConfigureRequest(types=[target_method]),
**settings,
)
delta = _collect_stats(stats_port, _ITERATION_DURATION_SECONDS)
logging.info("Delta: %s", delta)
for _, method_str in _METHODS:
for status in delta[method_str]:
if status == 0 and method_str == target_method_str:
self.assertGreater(delta[method_str][status], 0, delta)
else:
self.assertEqual(delta[method_str][status], 0, delta)
def test_configure_consistency(self):
_, server_port, socket = framework_common.get_socket()
with _start_python_with_args(
_SERVER_PATH,
[f"--port={server_port}", f"--maintenance_port={server_port}"],
) as (server, server_stdout, server_stderr):
# Send RPC to server to make sure it's running.
logging.info("Sending RPC to server.")
test_pb2_grpc.TestService.EmptyCall(
empty_pb2.Empty(),
f"localhost:{server_port}",
insecure=True,
wait_for_ready=True,
)
logging.info("Server successfully started.")
socket.close()
_, stats_port, stats_socket = framework_common.get_socket()
with _start_python_with_args(
_CLIENT_PATH,
[
f"--server=localhost:{server_port}",
f"--stats_port={stats_port}",
f"--qps={_QPS}",
f"--num_channels={_NUM_CHANNELS}",
],
) as (client, client_stdout, client_stderr):
stats_socket.close()
try:
self._assert_client_consistent(
server_port, stats_port, _QPS, _NUM_CHANNELS
)
except:
_dump_streams("server", server_stdout, server_stderr)
_dump_streams("client", client_stdout, client_stderr)
raise
finally:
server.kill()
client.kill()
server.wait(timeout=_SUBPROCESS_TIMEOUT_SECONDS)
client.wait(timeout=_SUBPROCESS_TIMEOUT_SECONDS)
if __name__ == "__main__":
logging.basicConfig()
unittest.main(verbosity=2)
| 7,092
| 34.113861
| 80
|
py
|
grpc
|
grpc-master/src/python/grpcio_tests/tests_py3_only/interop/xds_interop_server.py
|
# Copyright 2021 The gRPC authors.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import argparse
import collections
from concurrent import futures
import logging
import signal
import socket
import sys
import threading
import time
from typing import DefaultDict, Dict, List, Mapping, Sequence, Set, Tuple
import grpc
from grpc_channelz.v1 import channelz
from grpc_channelz.v1 import channelz_pb2
from grpc_health.v1 import health as grpc_health
from grpc_health.v1 import health_pb2
from grpc_health.v1 import health_pb2_grpc
from grpc_reflection.v1alpha import reflection
from src.proto.grpc.testing import empty_pb2
from src.proto.grpc.testing import messages_pb2
from src.proto.grpc.testing import test_pb2
from src.proto.grpc.testing import test_pb2_grpc
# NOTE: This interop server is not fully compatible with all xDS interop tests.
# It currently only implements enough functionality to pass the xDS security
# tests.
_LISTEN_HOST = "0.0.0.0"
_THREAD_POOL_SIZE = 256
logger = logging.getLogger()
console_handler = logging.StreamHandler()
formatter = logging.Formatter(fmt="%(asctime)s: %(levelname)-8s %(message)s")
console_handler.setFormatter(formatter)
logger.addHandler(console_handler)
class TestService(test_pb2_grpc.TestServiceServicer):
def __init__(self, server_id, hostname):
self._server_id = server_id
self._hostname = hostname
def EmptyCall(
self, _: empty_pb2.Empty, context: grpc.ServicerContext
) -> empty_pb2.Empty:
context.send_initial_metadata((("hostname", self._hostname),))
return empty_pb2.Empty()
def UnaryCall(
self, request: messages_pb2.SimpleRequest, context: grpc.ServicerContext
) -> messages_pb2.SimpleResponse:
context.send_initial_metadata((("hostname", self._hostname),))
response = messages_pb2.SimpleResponse()
response.server_id = self._server_id
response.hostname = self._hostname
return response
def _configure_maintenance_server(
server: grpc.Server, maintenance_port: int
) -> None:
channelz.add_channelz_servicer(server)
listen_address = f"{_LISTEN_HOST}:{maintenance_port}"
server.add_insecure_port(listen_address)
health_servicer = grpc_health.HealthServicer(
experimental_non_blocking=True,
experimental_thread_pool=futures.ThreadPoolExecutor(
max_workers=_THREAD_POOL_SIZE
),
)
health_pb2_grpc.add_HealthServicer_to_server(health_servicer, server)
SERVICE_NAMES = (
test_pb2.DESCRIPTOR.services_by_name["TestService"].full_name,
health_pb2.DESCRIPTOR.services_by_name["Health"].full_name,
channelz_pb2.DESCRIPTOR.services_by_name["Channelz"].full_name,
reflection.SERVICE_NAME,
)
for service in SERVICE_NAMES:
health_servicer.set(service, health_pb2.HealthCheckResponse.SERVING)
reflection.enable_server_reflection(SERVICE_NAMES, server)
def _configure_test_server(
server: grpc.Server, port: int, secure_mode: bool, server_id: str
) -> None:
test_pb2_grpc.add_TestServiceServicer_to_server(
TestService(server_id, socket.gethostname()), server
)
listen_address = f"{_LISTEN_HOST}:{port}"
if not secure_mode:
server.add_insecure_port(listen_address)
else:
logger.info("Running with xDS Server credentials")
server_fallback_creds = grpc.insecure_server_credentials()
server_creds = grpc.xds_server_credentials(server_fallback_creds)
server.add_secure_port(listen_address, server_creds)
def _run(
port: int, maintenance_port: int, secure_mode: bool, server_id: str
) -> None:
if port == maintenance_port:
server = grpc.server(
futures.ThreadPoolExecutor(max_workers=_THREAD_POOL_SIZE)
)
_configure_test_server(server, port, secure_mode, server_id)
_configure_maintenance_server(server, maintenance_port)
server.start()
logger.info("Test server listening on port %d", port)
logger.info("Maintenance server listening on port %d", maintenance_port)
server.wait_for_termination()
else:
maintenance_server = grpc.server(
futures.ThreadPoolExecutor(max_workers=_THREAD_POOL_SIZE)
)
_configure_maintenance_server(maintenance_server, maintenance_port)
maintenance_server.start()
logger.info("Maintenance server listening on port %d", maintenance_port)
test_server = grpc.server(
futures.ThreadPoolExecutor(max_workers=_THREAD_POOL_SIZE),
xds=secure_mode,
)
_configure_test_server(test_server, port, secure_mode, server_id)
test_server.start()
logger.info("Test server listening on port %d", port)
test_server.wait_for_termination()
maintenance_server.wait_for_termination()
def bool_arg(arg: str) -> bool:
if arg.lower() in ("true", "yes", "y"):
return True
elif arg.lower() in ("false", "no", "n"):
return False
else:
raise argparse.ArgumentTypeError(f"Could not parse '{arg}' as a bool.")
if __name__ == "__main__":
parser = argparse.ArgumentParser(
description="Run Python xDS interop server."
)
parser.add_argument(
"--port", type=int, default=8080, help="Port for test server."
)
parser.add_argument(
"--maintenance_port",
type=int,
default=8080,
help="Port for servers besides test server.",
)
parser.add_argument(
"--secure_mode",
type=bool_arg,
default="False",
help="If specified, uses xDS to retrieve server credentials.",
)
parser.add_argument(
"--server_id",
type=str,
default="python_server",
help="The server ID to return in responses..",
)
parser.add_argument(
"--verbose",
help="verbose log output",
default=False,
action="store_true",
)
args = parser.parse_args()
if args.verbose:
logger.setLevel(logging.DEBUG)
else:
logger.setLevel(logging.INFO)
if args.secure_mode and args.port == args.maintenance_port:
raise ValueError(
"--port and --maintenance_port must not be the same when"
" --secure_mode is set."
)
_run(args.port, args.maintenance_port, args.secure_mode, args.server_id)
| 6,892
| 33.813131
| 80
|
py
|
grpc
|
grpc-master/src/python/grpcio_tests/tests_py3_only/interop/xds_interop_client.py
|
# Copyright 2020 The gRPC authors.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import argparse
import collections
from concurrent import futures
import datetime
import logging
import signal
import sys
import threading
import time
from typing import DefaultDict, Dict, List, Mapping, Sequence, Set, Tuple
import grpc
import grpc_admin
from grpc_channelz.v1 import channelz
from src.proto.grpc.testing import empty_pb2
from src.proto.grpc.testing import messages_pb2
from src.proto.grpc.testing import test_pb2
from src.proto.grpc.testing import test_pb2_grpc
logger = logging.getLogger()
console_handler = logging.StreamHandler()
formatter = logging.Formatter(fmt="%(asctime)s: %(levelname)-8s %(message)s")
console_handler.setFormatter(formatter)
logger.addHandler(console_handler)
_SUPPORTED_METHODS = (
"UnaryCall",
"EmptyCall",
)
_METHOD_CAMEL_TO_CAPS_SNAKE = {
"UnaryCall": "UNARY_CALL",
"EmptyCall": "EMPTY_CALL",
}
_METHOD_STR_TO_ENUM = {
"UnaryCall": messages_pb2.ClientConfigureRequest.UNARY_CALL,
"EmptyCall": messages_pb2.ClientConfigureRequest.EMPTY_CALL,
}
_METHOD_ENUM_TO_STR = {v: k for k, v in _METHOD_STR_TO_ENUM.items()}
PerMethodMetadataType = Mapping[str, Sequence[Tuple[str, str]]]
_CONFIG_CHANGE_TIMEOUT = datetime.timedelta(milliseconds=500)
class _StatsWatcher:
_start: int
_end: int
_rpcs_needed: int
_rpcs_by_peer: DefaultDict[str, int]
_rpcs_by_method: DefaultDict[str, DefaultDict[str, int]]
_no_remote_peer: int
_lock: threading.Lock
_condition: threading.Condition
def __init__(self, start: int, end: int):
self._start = start
self._end = end
self._rpcs_needed = end - start
self._rpcs_by_peer = collections.defaultdict(int)
self._rpcs_by_method = collections.defaultdict(
lambda: collections.defaultdict(int)
)
self._condition = threading.Condition()
self._no_remote_peer = 0
def on_rpc_complete(self, request_id: int, peer: str, method: str) -> None:
"""Records statistics for a single RPC."""
if self._start <= request_id < self._end:
with self._condition:
if not peer:
self._no_remote_peer += 1
else:
self._rpcs_by_peer[peer] += 1
self._rpcs_by_method[method][peer] += 1
self._rpcs_needed -= 1
self._condition.notify()
def await_rpc_stats_response(
self, timeout_sec: int
) -> messages_pb2.LoadBalancerStatsResponse:
"""Blocks until a full response has been collected."""
with self._condition:
self._condition.wait_for(
lambda: not self._rpcs_needed, timeout=float(timeout_sec)
)
response = messages_pb2.LoadBalancerStatsResponse()
for peer, count in self._rpcs_by_peer.items():
response.rpcs_by_peer[peer] = count
for method, count_by_peer in self._rpcs_by_method.items():
for peer, count in count_by_peer.items():
response.rpcs_by_method[method].rpcs_by_peer[peer] = count
response.num_failures = self._no_remote_peer + self._rpcs_needed
return response
_global_lock = threading.Lock()
_stop_event = threading.Event()
_global_rpc_id: int = 0
_watchers: Set[_StatsWatcher] = set()
_global_server = None
_global_rpcs_started: Mapping[str, int] = collections.defaultdict(int)
_global_rpcs_succeeded: Mapping[str, int] = collections.defaultdict(int)
_global_rpcs_failed: Mapping[str, int] = collections.defaultdict(int)
# Mapping[method, Mapping[status_code, count]]
_global_rpc_statuses: Mapping[str, Mapping[int, int]] = collections.defaultdict(
lambda: collections.defaultdict(int)
)
def _handle_sigint(sig, frame) -> None:
logger.warning("Received SIGINT")
_stop_event.set()
_global_server.stop(None)
class _LoadBalancerStatsServicer(
test_pb2_grpc.LoadBalancerStatsServiceServicer
):
def __init__(self):
super(_LoadBalancerStatsServicer).__init__()
def GetClientStats(
self,
request: messages_pb2.LoadBalancerStatsRequest,
context: grpc.ServicerContext,
) -> messages_pb2.LoadBalancerStatsResponse:
logger.info("Received stats request.")
start = None
end = None
watcher = None
with _global_lock:
start = _global_rpc_id + 1
end = start + request.num_rpcs
watcher = _StatsWatcher(start, end)
_watchers.add(watcher)
response = watcher.await_rpc_stats_response(request.timeout_sec)
with _global_lock:
_watchers.remove(watcher)
logger.info("Returning stats response: %s", response)
return response
def GetClientAccumulatedStats(
self,
request: messages_pb2.LoadBalancerAccumulatedStatsRequest,
context: grpc.ServicerContext,
) -> messages_pb2.LoadBalancerAccumulatedStatsResponse:
logger.info("Received cumulative stats request.")
response = messages_pb2.LoadBalancerAccumulatedStatsResponse()
with _global_lock:
for method in _SUPPORTED_METHODS:
caps_method = _METHOD_CAMEL_TO_CAPS_SNAKE[method]
response.num_rpcs_started_by_method[
caps_method
] = _global_rpcs_started[method]
response.num_rpcs_succeeded_by_method[
caps_method
] = _global_rpcs_succeeded[method]
response.num_rpcs_failed_by_method[
caps_method
] = _global_rpcs_failed[method]
response.stats_per_method[
caps_method
].rpcs_started = _global_rpcs_started[method]
for code, count in _global_rpc_statuses[method].items():
response.stats_per_method[caps_method].result[code] = count
logger.info("Returning cumulative stats response.")
return response
def _start_rpc(
method: str,
metadata: Sequence[Tuple[str, str]],
request_id: int,
stub: test_pb2_grpc.TestServiceStub,
timeout: float,
futures: Mapping[int, Tuple[grpc.Future, str]],
) -> None:
logger.debug(f"Sending {method} request to backend: {request_id}")
if method == "UnaryCall":
future = stub.UnaryCall.future(
messages_pb2.SimpleRequest(), metadata=metadata, timeout=timeout
)
elif method == "EmptyCall":
future = stub.EmptyCall.future(
empty_pb2.Empty(), metadata=metadata, timeout=timeout
)
else:
raise ValueError(f"Unrecognized method '{method}'.")
futures[request_id] = (future, method)
def _on_rpc_done(
rpc_id: int, future: grpc.Future, method: str, print_response: bool
) -> None:
exception = future.exception()
hostname = ""
with _global_lock:
_global_rpc_statuses[method][future.code().value[0]] += 1
if exception is not None:
with _global_lock:
_global_rpcs_failed[method] += 1
if exception.code() == grpc.StatusCode.DEADLINE_EXCEEDED:
logger.error(f"RPC {rpc_id} timed out")
else:
logger.error(exception)
else:
response = future.result()
hostname = None
for metadatum in future.initial_metadata():
if metadatum[0] == "hostname":
hostname = metadatum[1]
break
else:
hostname = response.hostname
if future.code() == grpc.StatusCode.OK:
with _global_lock:
_global_rpcs_succeeded[method] += 1
else:
with _global_lock:
_global_rpcs_failed[method] += 1
if print_response:
if future.code() == grpc.StatusCode.OK:
logger.debug("Successful response.")
else:
logger.debug(f"RPC failed: {call}")
with _global_lock:
for watcher in _watchers:
watcher.on_rpc_complete(rpc_id, hostname, method)
def _remove_completed_rpcs(
futures: Mapping[int, grpc.Future], print_response: bool
) -> None:
logger.debug("Removing completed RPCs")
done = []
for future_id, (future, method) in futures.items():
if future.done():
_on_rpc_done(future_id, future, method, args.print_response)
done.append(future_id)
for rpc_id in done:
del futures[rpc_id]
def _cancel_all_rpcs(futures: Mapping[int, Tuple[grpc.Future, str]]) -> None:
logger.info("Cancelling all remaining RPCs")
for future, _ in futures.values():
future.cancel()
class _ChannelConfiguration:
"""Configuration for a single client channel.
Instances of this class are meant to be dealt with as PODs. That is,
data member should be accessed directly. This class is not thread-safe.
When accessing any of its members, the lock member should be held.
"""
def __init__(
self,
method: str,
metadata: Sequence[Tuple[str, str]],
qps: int,
server: str,
rpc_timeout_sec: int,
print_response: bool,
secure_mode: bool,
):
# condition is signalled when a change is made to the config.
self.condition = threading.Condition()
self.method = method
self.metadata = metadata
self.qps = qps
self.server = server
self.rpc_timeout_sec = rpc_timeout_sec
self.print_response = print_response
self.secure_mode = secure_mode
def _run_single_channel(config: _ChannelConfiguration) -> None:
global _global_rpc_id # pylint: disable=global-statement
with config.condition:
server = config.server
channel = None
if config.secure_mode:
fallback_creds = grpc.experimental.insecure_channel_credentials()
channel_creds = grpc.xds_channel_credentials(fallback_creds)
channel = grpc.secure_channel(server, channel_creds)
else:
channel = grpc.insecure_channel(server)
with channel:
stub = test_pb2_grpc.TestServiceStub(channel)
futures: Dict[int, Tuple[grpc.Future, str]] = {}
while not _stop_event.is_set():
with config.condition:
if config.qps == 0:
config.condition.wait(
timeout=_CONFIG_CHANGE_TIMEOUT.total_seconds()
)
continue
else:
duration_per_query = 1.0 / float(config.qps)
request_id = None
with _global_lock:
request_id = _global_rpc_id
_global_rpc_id += 1
_global_rpcs_started[config.method] += 1
start = time.time()
end = start + duration_per_query
_start_rpc(
config.method,
config.metadata,
request_id,
stub,
float(config.rpc_timeout_sec),
futures,
)
print_response = config.print_response
_remove_completed_rpcs(futures, config.print_response)
logger.debug(f"Currently {len(futures)} in-flight RPCs")
now = time.time()
while now < end:
time.sleep(end - now)
now = time.time()
_cancel_all_rpcs(futures)
class _XdsUpdateClientConfigureServicer(
test_pb2_grpc.XdsUpdateClientConfigureServiceServicer
):
def __init__(
self, per_method_configs: Mapping[str, _ChannelConfiguration], qps: int
):
super(_XdsUpdateClientConfigureServicer).__init__()
self._per_method_configs = per_method_configs
self._qps = qps
def Configure(
self,
request: messages_pb2.ClientConfigureRequest,
context: grpc.ServicerContext,
) -> messages_pb2.ClientConfigureResponse:
logger.info("Received Configure RPC: %s", request)
method_strs = [_METHOD_ENUM_TO_STR[t] for t in request.types]
for method in _SUPPORTED_METHODS:
method_enum = _METHOD_STR_TO_ENUM[method]
channel_config = self._per_method_configs[method]
if method in method_strs:
qps = self._qps
metadata = (
(md.key, md.value)
for md in request.metadata
if md.type == method_enum
)
# For backward compatibility, do not change timeout when we
# receive a default value timeout.
if request.timeout_sec == 0:
timeout_sec = channel_config.rpc_timeout_sec
else:
timeout_sec = request.timeout_sec
else:
qps = 0
metadata = ()
# Leave timeout unchanged for backward compatibility.
timeout_sec = channel_config.rpc_timeout_sec
with channel_config.condition:
channel_config.qps = qps
channel_config.metadata = list(metadata)
channel_config.rpc_timeout_sec = timeout_sec
channel_config.condition.notify_all()
return messages_pb2.ClientConfigureResponse()
class _MethodHandle:
"""An object grouping together threads driving RPCs for a method."""
_channel_threads: List[threading.Thread]
def __init__(
self, num_channels: int, channel_config: _ChannelConfiguration
):
"""Creates and starts a group of threads running the indicated method."""
self._channel_threads = []
for i in range(num_channels):
thread = threading.Thread(
target=_run_single_channel, args=(channel_config,)
)
thread.start()
self._channel_threads.append(thread)
def stop(self) -> None:
"""Joins all threads referenced by the handle."""
for channel_thread in self._channel_threads:
channel_thread.join()
def _run(
args: argparse.Namespace,
methods: Sequence[str],
per_method_metadata: PerMethodMetadataType,
) -> None:
logger.info("Starting python xDS Interop Client.")
global _global_server # pylint: disable=global-statement
method_handles = []
channel_configs = {}
for method in _SUPPORTED_METHODS:
if method in methods:
qps = args.qps
else:
qps = 0
channel_config = _ChannelConfiguration(
method,
per_method_metadata.get(method, []),
qps,
args.server,
args.rpc_timeout_sec,
args.print_response,
args.secure_mode,
)
channel_configs[method] = channel_config
method_handles.append(_MethodHandle(args.num_channels, channel_config))
_global_server = grpc.server(futures.ThreadPoolExecutor())
_global_server.add_insecure_port(f"0.0.0.0:{args.stats_port}")
test_pb2_grpc.add_LoadBalancerStatsServiceServicer_to_server(
_LoadBalancerStatsServicer(), _global_server
)
test_pb2_grpc.add_XdsUpdateClientConfigureServiceServicer_to_server(
_XdsUpdateClientConfigureServicer(channel_configs, args.qps),
_global_server,
)
channelz.add_channelz_servicer(_global_server)
grpc_admin.add_admin_servicers(_global_server)
_global_server.start()
_global_server.wait_for_termination()
for method_handle in method_handles:
method_handle.stop()
def parse_metadata_arg(metadata_arg: str) -> PerMethodMetadataType:
metadata = metadata_arg.split(",") if args.metadata else []
per_method_metadata = collections.defaultdict(list)
for metadatum in metadata:
elems = metadatum.split(":")
if len(elems) != 3:
raise ValueError(
f"'{metadatum}' was not in the form 'METHOD:KEY:VALUE'"
)
if elems[0] not in _SUPPORTED_METHODS:
raise ValueError(f"Unrecognized method '{elems[0]}'")
per_method_metadata[elems[0]].append((elems[1], elems[2]))
return per_method_metadata
def parse_rpc_arg(rpc_arg: str) -> Sequence[str]:
methods = rpc_arg.split(",")
if set(methods) - set(_SUPPORTED_METHODS):
raise ValueError(
"--rpc supported methods: {}".format(", ".join(_SUPPORTED_METHODS))
)
return methods
def bool_arg(arg: str) -> bool:
if arg.lower() in ("true", "yes", "y"):
return True
elif arg.lower() in ("false", "no", "n"):
return False
else:
raise argparse.ArgumentTypeError(f"Could not parse '{arg}' as a bool.")
if __name__ == "__main__":
parser = argparse.ArgumentParser(
description="Run Python XDS interop client."
)
parser.add_argument(
"--num_channels",
default=1,
type=int,
help="The number of channels from which to send requests.",
)
parser.add_argument(
"--print_response",
default="False",
type=bool_arg,
help="Write RPC response to STDOUT.",
)
parser.add_argument(
"--qps",
default=1,
type=int,
help="The number of queries to send from each channel per second.",
)
parser.add_argument(
"--rpc_timeout_sec",
default=30,
type=int,
help="The per-RPC timeout in seconds.",
)
parser.add_argument(
"--server", default="localhost:50051", help="The address of the server."
)
parser.add_argument(
"--stats_port",
default=50052,
type=int,
help="The port on which to expose the peer distribution stats service.",
)
parser.add_argument(
"--secure_mode",
default="False",
type=bool_arg,
help="If specified, uses xDS credentials to connect to the server.",
)
parser.add_argument(
"--verbose",
help="verbose log output",
default=False,
action="store_true",
)
parser.add_argument(
"--log_file", default=None, type=str, help="A file to log to."
)
rpc_help = "A comma-delimited list of RPC methods to run. Must be one of "
rpc_help += ", ".join(_SUPPORTED_METHODS)
rpc_help += "."
parser.add_argument("--rpc", default="UnaryCall", type=str, help=rpc_help)
metadata_help = (
"A comma-delimited list of 3-tuples of the form "
+ "METHOD:KEY:VALUE, e.g. "
+ "EmptyCall:key1:value1,UnaryCall:key2:value2,EmptyCall:k3:v3"
)
parser.add_argument("--metadata", default="", type=str, help=metadata_help)
args = parser.parse_args()
signal.signal(signal.SIGINT, _handle_sigint)
if args.verbose:
logger.setLevel(logging.DEBUG)
if args.log_file:
file_handler = logging.FileHandler(args.log_file, mode="a")
file_handler.setFormatter(formatter)
logger.addHandler(file_handler)
_run(args, parse_rpc_arg(args.rpc), parse_metadata_arg(args.metadata))
| 19,654
| 33.97331
| 81
|
py
|
grpc
|
grpc-master/src/python/grpcio_tests/tests/_result.py
|
# Copyright 2015 gRPC authors.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
from __future__ import absolute_import
import collections
import io
import itertools
import traceback
import unittest
from xml.etree import ElementTree
import coverage
from tests import _loader
class CaseResult(
collections.namedtuple(
"CaseResult",
["id", "name", "kind", "stdout", "stderr", "skip_reason", "traceback"],
)
):
"""A serializable result of a single test case.
Attributes:
id (object): Any serializable object used to denote the identity of this
test case.
name (str or None): A human-readable name of the test case.
kind (CaseResult.Kind): The kind of test result.
stdout (object or None): Output on stdout, or None if nothing was captured.
stderr (object or None): Output on stderr, or None if nothing was captured.
skip_reason (object or None): The reason the test was skipped. Must be
something if self.kind is CaseResult.Kind.SKIP, else None.
traceback (object or None): The traceback of the test. Must be something if
self.kind is CaseResult.Kind.{ERROR, FAILURE, EXPECTED_FAILURE}, else
None.
"""
class Kind(object):
UNTESTED = "untested"
RUNNING = "running"
ERROR = "error"
FAILURE = "failure"
SUCCESS = "success"
SKIP = "skip"
EXPECTED_FAILURE = "expected failure"
UNEXPECTED_SUCCESS = "unexpected success"
def __new__(
cls,
id=None,
name=None,
kind=None,
stdout=None,
stderr=None,
skip_reason=None,
traceback=None,
):
"""Helper keyword constructor for the namedtuple.
See this class' attributes for information on the arguments."""
assert id is not None
assert name is None or isinstance(name, str)
if kind is CaseResult.Kind.UNTESTED:
pass
elif kind is CaseResult.Kind.RUNNING:
pass
elif kind is CaseResult.Kind.ERROR:
assert traceback is not None
elif kind is CaseResult.Kind.FAILURE:
assert traceback is not None
elif kind is CaseResult.Kind.SUCCESS:
pass
elif kind is CaseResult.Kind.SKIP:
assert skip_reason is not None
elif kind is CaseResult.Kind.EXPECTED_FAILURE:
assert traceback is not None
elif kind is CaseResult.Kind.UNEXPECTED_SUCCESS:
pass
else:
assert False
return super(cls, CaseResult).__new__(
cls, id, name, kind, stdout, stderr, skip_reason, traceback
)
def updated(
self,
name=None,
kind=None,
stdout=None,
stderr=None,
skip_reason=None,
traceback=None,
):
"""Get a new validated CaseResult with the fields updated.
See this class' attributes for information on the arguments."""
name = self.name if name is None else name
kind = self.kind if kind is None else kind
stdout = self.stdout if stdout is None else stdout
stderr = self.stderr if stderr is None else stderr
skip_reason = self.skip_reason if skip_reason is None else skip_reason
traceback = self.traceback if traceback is None else traceback
return CaseResult(
id=self.id,
name=name,
kind=kind,
stdout=stdout,
stderr=stderr,
skip_reason=skip_reason,
traceback=traceback,
)
class AugmentedResult(unittest.TestResult):
"""unittest.Result that keeps track of additional information.
Uses CaseResult objects to store test-case results, providing additional
information beyond that of the standard Python unittest library, such as
standard output.
Attributes:
id_map (callable): A unary callable mapping unittest.TestCase objects to
unique identifiers.
cases (dict): A dictionary mapping from the identifiers returned by id_map
to CaseResult objects corresponding to those IDs.
"""
def __init__(self, id_map):
"""Initialize the object with an identifier mapping.
Arguments:
id_map (callable): Corresponds to the attribute `id_map`."""
super(AugmentedResult, self).__init__()
self.id_map = id_map
self.cases = None
def startTestRun(self):
"""See unittest.TestResult.startTestRun."""
super(AugmentedResult, self).startTestRun()
self.cases = dict()
def startTest(self, test):
"""See unittest.TestResult.startTest."""
super(AugmentedResult, self).startTest(test)
case_id = self.id_map(test)
self.cases[case_id] = CaseResult(
id=case_id, name=test.id(), kind=CaseResult.Kind.RUNNING
)
def addError(self, test, err):
"""See unittest.TestResult.addError."""
super(AugmentedResult, self).addError(test, err)
case_id = self.id_map(test)
self.cases[case_id] = self.cases[case_id].updated(
kind=CaseResult.Kind.ERROR, traceback=err
)
def addFailure(self, test, err):
"""See unittest.TestResult.addFailure."""
super(AugmentedResult, self).addFailure(test, err)
case_id = self.id_map(test)
self.cases[case_id] = self.cases[case_id].updated(
kind=CaseResult.Kind.FAILURE, traceback=err
)
def addSuccess(self, test):
"""See unittest.TestResult.addSuccess."""
super(AugmentedResult, self).addSuccess(test)
case_id = self.id_map(test)
self.cases[case_id] = self.cases[case_id].updated(
kind=CaseResult.Kind.SUCCESS
)
def addSkip(self, test, reason):
"""See unittest.TestResult.addSkip."""
super(AugmentedResult, self).addSkip(test, reason)
case_id = self.id_map(test)
self.cases[case_id] = self.cases[case_id].updated(
kind=CaseResult.Kind.SKIP, skip_reason=reason
)
def addExpectedFailure(self, test, err):
"""See unittest.TestResult.addExpectedFailure."""
super(AugmentedResult, self).addExpectedFailure(test, err)
case_id = self.id_map(test)
self.cases[case_id] = self.cases[case_id].updated(
kind=CaseResult.Kind.EXPECTED_FAILURE, traceback=err
)
def addUnexpectedSuccess(self, test):
"""See unittest.TestResult.addUnexpectedSuccess."""
super(AugmentedResult, self).addUnexpectedSuccess(test)
case_id = self.id_map(test)
self.cases[case_id] = self.cases[case_id].updated(
kind=CaseResult.Kind.UNEXPECTED_SUCCESS
)
def set_output(self, test, stdout, stderr):
"""Set the output attributes for the CaseResult corresponding to a test.
Args:
test (unittest.TestCase): The TestCase to set the outputs of.
stdout (str): Output from stdout to assign to self.id_map(test).
stderr (str): Output from stderr to assign to self.id_map(test).
"""
case_id = self.id_map(test)
self.cases[case_id] = self.cases[case_id].updated(
stdout=stdout.decode(), stderr=stderr.decode()
)
def augmented_results(self, filter):
"""Convenience method to retrieve filtered case results.
Args:
filter (callable): A unary predicate to filter over CaseResult objects.
"""
return (
self.cases[case_id]
for case_id in self.cases
if filter(self.cases[case_id])
)
class CoverageResult(AugmentedResult):
"""Extension to AugmentedResult adding coverage.py support per test.\
Attributes:
coverage_context (coverage.Coverage): coverage.py management object.
"""
def __init__(self, id_map):
"""See AugmentedResult.__init__."""
super(CoverageResult, self).__init__(id_map=id_map)
self.coverage_context = None
def startTest(self, test):
"""See unittest.TestResult.startTest.
Additionally initializes and begins code coverage tracking."""
super(CoverageResult, self).startTest(test)
self.coverage_context = coverage.Coverage(data_suffix=True)
self.coverage_context.start()
def stopTest(self, test):
"""See unittest.TestResult.stopTest.
Additionally stops and deinitializes code coverage tracking."""
super(CoverageResult, self).stopTest(test)
self.coverage_context.stop()
self.coverage_context.save()
self.coverage_context = None
class _Colors(object):
"""Namespaced constants for terminal color magic numbers."""
HEADER = "\033[95m"
INFO = "\033[94m"
OK = "\033[92m"
WARN = "\033[93m"
FAIL = "\033[91m"
BOLD = "\033[1m"
UNDERLINE = "\033[4m"
END = "\033[0m"
class TerminalResult(CoverageResult):
"""Extension to CoverageResult adding basic terminal reporting."""
def __init__(self, out, id_map):
"""Initialize the result object.
Args:
out (file-like): Output file to which terminal-colored live results will
be written.
id_map (callable): See AugmentedResult.__init__.
"""
super(TerminalResult, self).__init__(id_map=id_map)
self.out = out
def startTestRun(self):
"""See unittest.TestResult.startTestRun."""
super(TerminalResult, self).startTestRun()
self.out.write(
_Colors.HEADER + "Testing gRPC Python...\n" + _Colors.END
)
def stopTestRun(self):
"""See unittest.TestResult.stopTestRun."""
super(TerminalResult, self).stopTestRun()
self.out.write(summary(self))
self.out.flush()
def addError(self, test, err):
"""See unittest.TestResult.addError."""
super(TerminalResult, self).addError(test, err)
self.out.write(
_Colors.FAIL + "ERROR {}\n".format(test.id()) + _Colors.END
)
self.out.flush()
def addFailure(self, test, err):
"""See unittest.TestResult.addFailure."""
super(TerminalResult, self).addFailure(test, err)
self.out.write(
_Colors.FAIL + "FAILURE {}\n".format(test.id()) + _Colors.END
)
self.out.flush()
def addSuccess(self, test):
"""See unittest.TestResult.addSuccess."""
super(TerminalResult, self).addSuccess(test)
self.out.write(
_Colors.OK + "SUCCESS {}\n".format(test.id()) + _Colors.END
)
self.out.flush()
def addSkip(self, test, reason):
"""See unittest.TestResult.addSkip."""
super(TerminalResult, self).addSkip(test, reason)
self.out.write(
_Colors.INFO + "SKIP {}\n".format(test.id()) + _Colors.END
)
self.out.flush()
def addExpectedFailure(self, test, err):
"""See unittest.TestResult.addExpectedFailure."""
super(TerminalResult, self).addExpectedFailure(test, err)
self.out.write(
_Colors.INFO + "FAILURE_OK {}\n".format(test.id()) + _Colors.END
)
self.out.flush()
def addUnexpectedSuccess(self, test):
"""See unittest.TestResult.addUnexpectedSuccess."""
super(TerminalResult, self).addUnexpectedSuccess(test)
self.out.write(
_Colors.INFO + "UNEXPECTED_OK {}\n".format(test.id()) + _Colors.END
)
self.out.flush()
def _traceback_string(type, value, trace):
"""Generate a descriptive string of a Python exception traceback.
Args:
type (class): The type of the exception.
value (Exception): The value of the exception.
trace (traceback): Traceback of the exception.
Returns:
str: Formatted exception descriptive string.
"""
buffer = io.StringIO()
traceback.print_exception(type, value, trace, file=buffer)
return buffer.getvalue()
def summary(result):
"""A summary string of a result object.
Args:
result (AugmentedResult): The result object to get the summary of.
Returns:
str: The summary string.
"""
assert isinstance(result, AugmentedResult)
untested = list(
result.augmented_results(
lambda case_result: case_result.kind is CaseResult.Kind.UNTESTED
)
)
running = list(
result.augmented_results(
lambda case_result: case_result.kind is CaseResult.Kind.RUNNING
)
)
failures = list(
result.augmented_results(
lambda case_result: case_result.kind is CaseResult.Kind.FAILURE
)
)
errors = list(
result.augmented_results(
lambda case_result: case_result.kind is CaseResult.Kind.ERROR
)
)
successes = list(
result.augmented_results(
lambda case_result: case_result.kind is CaseResult.Kind.SUCCESS
)
)
skips = list(
result.augmented_results(
lambda case_result: case_result.kind is CaseResult.Kind.SKIP
)
)
expected_failures = list(
result.augmented_results(
lambda case_result: case_result.kind
is CaseResult.Kind.EXPECTED_FAILURE
)
)
unexpected_successes = list(
result.augmented_results(
lambda case_result: case_result.kind
is CaseResult.Kind.UNEXPECTED_SUCCESS
)
)
running_names = [case.name for case in running]
finished_count = (
len(failures)
+ len(errors)
+ len(successes)
+ len(expected_failures)
+ len(unexpected_successes)
)
statistics = (
"{finished} tests finished:\n"
"\t{successful} successful\n"
"\t{unsuccessful} unsuccessful\n"
"\t{skipped} skipped\n"
"\t{expected_fail} expected failures\n"
"\t{unexpected_successful} unexpected successes\n"
"Interrupted Tests:\n"
"\t{interrupted}\n".format(
finished=finished_count,
successful=len(successes),
unsuccessful=(len(failures) + len(errors)),
skipped=len(skips),
expected_fail=len(expected_failures),
unexpected_successful=len(unexpected_successes),
interrupted=str(running_names),
)
)
tracebacks = "\n\n".join(
[
(
_Colors.FAIL
+ "{test_name}"
+ _Colors.END
+ "\n"
+ _Colors.BOLD
+ "traceback:"
+ _Colors.END
+ "\n"
+ "{traceback}\n"
+ _Colors.BOLD
+ "stdout:"
+ _Colors.END
+ "\n"
+ "{stdout}\n"
+ _Colors.BOLD
+ "stderr:"
+ _Colors.END
+ "\n"
+ "{stderr}\n"
).format(
test_name=result.name,
traceback=_traceback_string(*result.traceback),
stdout=result.stdout,
stderr=result.stderr,
)
for result in itertools.chain(failures, errors)
]
)
notes = "Unexpected successes: {}\n".format(
[result.name for result in unexpected_successes]
)
return statistics + "\nErrors/Failures: \n" + tracebacks + "\n" + notes
def jenkins_junit_xml(result):
"""An XML tree object that when written is recognizable by Jenkins.
Args:
result (AugmentedResult): The result object to get the junit xml output of.
Returns:
ElementTree.ElementTree: The XML tree.
"""
assert isinstance(result, AugmentedResult)
root = ElementTree.Element("testsuites")
suite = ElementTree.SubElement(
root,
"testsuite",
{
"name": "Python gRPC tests",
},
)
for case in result.cases.values():
if case.kind is CaseResult.Kind.SUCCESS:
ElementTree.SubElement(
suite,
"testcase",
{
"name": case.name,
},
)
elif case.kind in (CaseResult.Kind.ERROR, CaseResult.Kind.FAILURE):
case_xml = ElementTree.SubElement(
suite,
"testcase",
{
"name": case.name,
},
)
error_xml = ElementTree.SubElement(case_xml, "error", {})
error_xml.text = "".format(case.stderr, case.traceback)
return ElementTree.ElementTree(element=root)
| 17,191
| 31.746667
| 82
|
py
|
grpc
|
grpc-master/src/python/grpcio_tests/tests/bazel_namespace_package_hack.py
|
# Copyright 2019 The gRPC Authors
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import os
import site
import sys
_GRPC_BAZEL_RUNTIME_ENV = "GRPC_BAZEL_RUNTIME"
# TODO(https://github.com/bazelbuild/bazel/issues/6844) Bazel failed to
# interpret namespace packages correctly. This monkey patch will force the
# Python process to parse the .pth file in the sys.path to resolve namespace
# package in the right place.
# Analysis in depth: https://github.com/bazelbuild/rules_python/issues/55
def sys_path_to_site_dir_hack():
"""Add valid sys.path item to site directory to parse the .pth files."""
# Only run within our Bazel environment
if not os.environ.get(_GRPC_BAZEL_RUNTIME_ENV):
return
items = []
for item in sys.path:
if os.path.exists(item):
# The only difference between sys.path and site-directory is
# whether the .pth file will be parsed or not. A site-directory
# will always exist in sys.path, but not another way around.
items.append(item)
for item in items:
site.addsitedir(item)
| 1,593
| 37.878049
| 76
|
py
|
grpc
|
grpc-master/src/python/grpcio_tests/tests/_loader.py
|
# Copyright 2015 gRPC authors.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
from __future__ import absolute_import
import importlib
import os
import pkgutil
import re
import sys
import unittest
import coverage
TEST_MODULE_REGEX = r"^.*_test$"
# Determines the path og a given path relative to the first matching
# path on sys.path. Useful for determining what a directory's module
# path will be.
def _relativize_to_sys_path(path):
for sys_path in sys.path:
if path.startswith(sys_path):
relative = path[len(sys_path) :]
if not relative:
return ""
if relative.startswith(os.path.sep):
relative = relative[len(os.path.sep) :]
if not relative.endswith(os.path.sep):
relative += os.path.sep
return relative
raise AssertionError("Failed to relativize {} to sys.path.".format(path))
def _relative_path_to_module_prefix(path):
return path.replace(os.path.sep, ".")
class Loader(object):
"""Test loader for setuptools test suite support.
Attributes:
suite (unittest.TestSuite): All tests collected by the loader.
loader (unittest.TestLoader): Standard Python unittest loader to be ran per
module discovered.
module_matcher (re.RegexObject): A regular expression object to match
against module names and determine whether or not the discovered module
contributes to the test suite.
"""
def __init__(self):
self.suite = unittest.TestSuite()
self.loader = unittest.TestLoader()
self.module_matcher = re.compile(TEST_MODULE_REGEX)
def loadTestsFromNames(self, names, module=None):
"""Function mirroring TestLoader::loadTestsFromNames, as expected by
setuptools.setup argument `test_loader`."""
# ensure that we capture decorators and definitions (else our coverage
# measure unnecessarily suffers)
coverage_context = coverage.Coverage(data_suffix=True)
coverage_context.start()
imported_modules = tuple(
importlib.import_module(name) for name in names
)
for imported_module in imported_modules:
self.visit_module(imported_module)
for imported_module in imported_modules:
try:
package_paths = imported_module.__path__
except AttributeError:
continue
self.walk_packages(package_paths)
coverage_context.stop()
coverage_context.save()
return self.suite
def walk_packages(self, package_paths):
"""Walks over the packages, dispatching `visit_module` calls.
Args:
package_paths (list): A list of paths over which to walk through modules
along.
"""
for path in package_paths:
self._walk_package(path)
def _walk_package(self, package_path):
prefix = _relative_path_to_module_prefix(
_relativize_to_sys_path(package_path)
)
for importer, module_name, is_package in pkgutil.walk_packages(
[package_path], prefix
):
found_module = importer.find_module(module_name)
module = None
if module_name in sys.modules:
module = sys.modules[module_name]
else:
module = found_module.load_module(module_name)
self.visit_module(module)
def visit_module(self, module):
"""Visits the module, adding discovered tests to the test suite.
Args:
module (module): Module to match against self.module_matcher; if matched
it has its tests loaded via self.loader into self.suite.
"""
if self.module_matcher.match(module.__name__):
module_suite = self.loader.loadTestsFromModule(module)
self.suite.addTest(module_suite)
def iterate_suite_cases(suite):
"""Generator over all unittest.TestCases in a unittest.TestSuite.
Args:
suite (unittest.TestSuite): Suite to iterate over in the generator.
Returns:
generator: A generator over all unittest.TestCases in `suite`.
"""
for item in suite:
if isinstance(item, unittest.TestSuite):
for child_item in iterate_suite_cases(item):
yield child_item
elif isinstance(item, unittest.TestCase):
yield item
else:
raise ValueError(
"unexpected suite item of type {}".format(type(item))
)
| 5,048
| 33.82069
| 82
|
py
|
grpc
|
grpc-master/src/python/grpcio_tests/tests/__init__.py
|
# Copyright 2015 gRPC authors.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
from __future__ import absolute_import
from tests import _loader
from tests import _runner
Loader = _loader.Loader
Runner = _runner.Runner
| 719
| 31.727273
| 74
|
py
|
grpc
|
grpc-master/src/python/grpcio_tests/tests/_runner.py
|
# Copyright 2015 gRPC authors.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
from __future__ import absolute_import
import collections
import io
import os
import select
import signal
import sys
import tempfile
import threading
import time
import unittest
import uuid
from tests import _loader
from tests import _result
class CaptureFile(object):
"""A context-managed file to redirect output to a byte array.
Use by invoking `start` (`__enter__`) and at some point invoking `stop`
(`__exit__`). At any point after the initial call to `start` call `output` to
get the current redirected output. Note that we don't currently use file
locking, so calling `output` between calls to `start` and `stop` may muddle
the result (you should only be doing this during a Python-handled interrupt as
a last ditch effort to provide output to the user).
Attributes:
_redirected_fd (int): File descriptor of file to redirect writes from.
_saved_fd (int): A copy of the original value of the redirected file
descriptor.
_into_file (TemporaryFile or None): File to which writes are redirected.
Only non-None when self is started.
"""
def __init__(self, fd):
self._redirected_fd = fd
self._saved_fd = os.dup(self._redirected_fd)
self._into_file = None
def output(self):
"""Get all output from the redirected-to file if it exists."""
if self._into_file:
self._into_file.seek(0)
return bytes(self._into_file.read())
else:
return bytes()
def start(self):
"""Start redirection of writes to the file descriptor."""
self._into_file = tempfile.TemporaryFile()
os.dup2(self._into_file.fileno(), self._redirected_fd)
def stop(self):
"""Stop redirection of writes to the file descriptor."""
# n.b. this dup2 call auto-closes self._redirected_fd
os.dup2(self._saved_fd, self._redirected_fd)
def write_bypass(self, value):
"""Bypass the redirection and write directly to the original file.
Arguments:
value (str): What to write to the original file.
"""
if not isinstance(value, bytes):
value = value.encode("ascii")
if self._saved_fd is None:
os.write(self._redirect_fd, value)
else:
os.write(self._saved_fd, value)
def __enter__(self):
self.start()
return self
def __exit__(self, type, value, traceback):
self.stop()
def close(self):
"""Close any resources used by self not closed by stop()."""
os.close(self._saved_fd)
class AugmentedCase(collections.namedtuple("AugmentedCase", ["case", "id"])):
"""A test case with a guaranteed unique externally specified identifier.
Attributes:
case (unittest.TestCase): TestCase we're decorating with an additional
identifier.
id (object): Any identifier that may be considered 'unique' for testing
purposes.
"""
def __new__(cls, case, id=None):
if id is None:
id = uuid.uuid4()
return super(cls, AugmentedCase).__new__(cls, case, id)
# NOTE(lidiz) This complex wrapper is not triggering setUpClass nor
# tearDownClass. Do not use those methods, or fix this wrapper!
class Runner(object):
def __init__(self, dedicated_threads=False):
"""Constructs the Runner object.
Args:
dedicated_threads: A bool indicates whether to spawn each unit test
in separate thread or not.
"""
self._skipped_tests = []
self._dedicated_threads = dedicated_threads
def skip_tests(self, tests):
self._skipped_tests = tests
def run(self, suite):
"""See setuptools' test_runner setup argument for information."""
# only run test cases with id starting with given prefix
testcase_filter = os.getenv("GRPC_PYTHON_TESTRUNNER_FILTER")
filtered_cases = []
for case in _loader.iterate_suite_cases(suite):
if not testcase_filter or case.id().startswith(testcase_filter):
filtered_cases.append(case)
# Ensure that every test case has no collision with any other test case in
# the augmented results.
augmented_cases = [
AugmentedCase(case, uuid.uuid4()) for case in filtered_cases
]
case_id_by_case = dict(
(augmented_case.case, augmented_case.id)
for augmented_case in augmented_cases
)
result_out = io.StringIO()
result = _result.TerminalResult(
result_out, id_map=lambda case: case_id_by_case[case]
)
stdout_pipe = CaptureFile(sys.stdout.fileno())
stderr_pipe = CaptureFile(sys.stderr.fileno())
kill_flag = [False]
def sigint_handler(signal_number, frame):
if signal_number == signal.SIGINT:
kill_flag[0] = True # Python 2.7 not having 'local'... :-(
signal.signal(signal_number, signal.SIG_DFL)
def fault_handler(signal_number, frame):
stdout_pipe.write_bypass(
"Received fault signal {}\nstdout:\n{}\n\nstderr:{}\n".format(
signal_number, stdout_pipe.output(), stderr_pipe.output()
)
)
os._exit(1)
def check_kill_self():
if kill_flag[0]:
stdout_pipe.write_bypass("Stopping tests short...")
result.stopTestRun()
stdout_pipe.write_bypass(result_out.getvalue())
stdout_pipe.write_bypass(
"\ninterrupted stdout:\n{}\n".format(
stdout_pipe.output().decode()
)
)
stderr_pipe.write_bypass(
"\ninterrupted stderr:\n{}\n".format(
stderr_pipe.output().decode()
)
)
os._exit(1)
def try_set_handler(name, handler):
try:
signal.signal(getattr(signal, name), handler)
except AttributeError:
pass
try_set_handler("SIGINT", sigint_handler)
try_set_handler("SIGBUS", fault_handler)
try_set_handler("SIGABRT", fault_handler)
try_set_handler("SIGFPE", fault_handler)
try_set_handler("SIGILL", fault_handler)
# Sometimes output will lag after a test has successfully finished; we
# ignore such writes to our pipes.
try_set_handler("SIGPIPE", signal.SIG_IGN)
# Run the tests
result.startTestRun()
for augmented_case in augmented_cases:
for skipped_test in self._skipped_tests:
if skipped_test in augmented_case.case.id():
break
else:
sys.stdout.write(
"Running {}\n".format(augmented_case.case.id())
)
sys.stdout.flush()
if self._dedicated_threads:
# (Deprecated) Spawns dedicated thread for each test case.
case_thread = threading.Thread(
target=augmented_case.case.run, args=(result,)
)
try:
with stdout_pipe, stderr_pipe:
case_thread.start()
# If the thread is exited unexpected, stop testing.
while case_thread.is_alive():
check_kill_self()
time.sleep(0)
case_thread.join()
except: # pylint: disable=try-except-raise
# re-raise the exception after forcing the with-block to end
raise
# Records the result of the test case run.
result.set_output(
augmented_case.case,
stdout_pipe.output(),
stderr_pipe.output(),
)
sys.stdout.write(result_out.getvalue())
sys.stdout.flush()
result_out.truncate(0)
check_kill_self()
else:
# Donates current thread to test case execution.
augmented_case.case.run(result)
result.stopTestRun()
stdout_pipe.close()
stderr_pipe.close()
# Report results
sys.stdout.write(result_out.getvalue())
sys.stdout.flush()
signal.signal(signal.SIGINT, signal.SIG_DFL)
with open("report.xml", "wb") as report_xml_file:
_result.jenkins_junit_xml(result).write(report_xml_file)
return result
| 9,380
| 36.079051
| 84
|
py
|
grpc
|
grpc-master/src/python/grpcio_tests/tests/observability/_observability_test.py
|
# Copyright 2023 gRPC authors.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
from concurrent import futures
import json
import logging
import os
import random
from typing import Any, Dict, List
import unittest
import grpc
import grpc_observability
from grpc_observability import _cyobservability
from grpc_observability import _observability
logger = logging.getLogger(__name__)
_REQUEST = b"\x00\x00\x00"
_RESPONSE = b"\x00\x00\x00"
_UNARY_UNARY = "/test/UnaryUnary"
_UNARY_STREAM = "/test/UnaryStream"
_STREAM_UNARY = "/test/StreamUnary"
_STREAM_STREAM = "/test/StreamStream"
STREAM_LENGTH = 5
CONFIG_ENV_VAR_NAME = "GRPC_GCP_OBSERVABILITY_CONFIG"
CONFIG_FILE_ENV_VAR_NAME = "GRPC_GCP_OBSERVABILITY_CONFIG_FILE"
_VALID_CONFIG_TRACING_STATS = {
"project_id": "test-project",
"cloud_trace": {"sampling_rate": 1.00},
"cloud_monitoring": {},
}
_VALID_CONFIG_TRACING_ONLY = {
"project_id": "test-project",
"cloud_trace": {"sampling_rate": 1.00},
}
_VALID_CONFIG_STATS_ONLY = {
"project_id": "test-project",
"cloud_monitoring": {},
}
_VALID_CONFIG_STATS_ONLY_STR = """
{
'project_id': 'test-project',
'cloud_monitoring': {}
}
"""
# Depends on grpc_core::IsTransportSuppliesClientLatencyEnabled,
# the following metrcis might not exist.
_SKIP_VEFIRY = [_cyobservability.MetricsName.CLIENT_TRANSPORT_LATENCY]
_SPAN_PREFIXS = ["Recv", "Sent", "Attempt"]
class TestExporter(_observability.Exporter):
def __init__(
self,
metrics: List[_observability.StatsData],
spans: List[_observability.TracingData],
):
self.span_collecter = spans
self.metric_collecter = metrics
self._server = None
def export_stats_data(
self, stats_data: List[_observability.StatsData]
) -> None:
self.metric_collecter.extend(stats_data)
def export_tracing_data(
self, tracing_data: List[_observability.TracingData]
) -> None:
self.span_collecter.extend(tracing_data)
def handle_unary_unary(request, servicer_context):
return _RESPONSE
def handle_unary_stream(request, servicer_context):
for _ in range(STREAM_LENGTH):
yield _RESPONSE
def handle_stream_unary(request_iterator, servicer_context):
return _RESPONSE
def handle_stream_stream(request_iterator, servicer_context):
for request in request_iterator:
yield _RESPONSE
class _MethodHandler(grpc.RpcMethodHandler):
def __init__(self, request_streaming, response_streaming):
self.request_streaming = request_streaming
self.response_streaming = response_streaming
self.request_deserializer = None
self.response_serializer = None
self.unary_unary = None
self.unary_stream = None
self.stream_unary = None
self.stream_stream = None
if self.request_streaming and self.response_streaming:
self.stream_stream = lambda x, y: handle_stream_stream(x, y)
elif self.request_streaming:
self.stream_unary = lambda x, y: handle_stream_unary(x, y)
elif self.response_streaming:
self.unary_stream = lambda x, y: handle_unary_stream(x, y)
else:
self.unary_unary = lambda x, y: handle_unary_unary(x, y)
class _GenericHandler(grpc.GenericRpcHandler):
def service(self, handler_call_details):
if handler_call_details.method == _UNARY_UNARY:
return _MethodHandler(False, False)
elif handler_call_details.method == _UNARY_STREAM:
return _MethodHandler(False, True)
elif handler_call_details.method == _STREAM_UNARY:
return _MethodHandler(True, False)
elif handler_call_details.method == _STREAM_STREAM:
return _MethodHandler(True, True)
else:
return None
class ObservabilityTest(unittest.TestCase):
def setUp(self):
self.all_metric = []
self.all_span = []
self.test_exporter = TestExporter(self.all_metric, self.all_span)
self._server = None
self._port = None
def tearDown(self):
os.environ[CONFIG_ENV_VAR_NAME] = ""
os.environ[CONFIG_FILE_ENV_VAR_NAME] = ""
if self._server:
self._server.stop(0)
def testRecordUnaryUnary(self):
self._set_config_file(_VALID_CONFIG_TRACING_STATS)
with grpc_observability.GCPOpenCensusObservability(
exporter=self.test_exporter
):
self._start_server()
self.unary_unary_call()
self.assertGreater(len(self.all_metric), 0)
self.assertGreater(len(self.all_span), 0)
self._validate_metrics(self.all_metric)
self._validate_spans(self.all_span)
def testThrowErrorWithoutConfig(self):
with self.assertRaises(ValueError):
with grpc_observability.GCPOpenCensusObservability(
exporter=self.test_exporter
):
pass
def testThrowErrorWithInvalidConfig(self):
_INVALID_CONFIG = "INVALID"
self._set_config_file(_INVALID_CONFIG)
with self.assertRaises(ValueError):
with grpc_observability.GCPOpenCensusObservability(
exporter=self.test_exporter
):
pass
def testNoErrorAndDataWithEmptyConfig(self):
_EMPTY_CONFIG = {}
self._set_config_file(_EMPTY_CONFIG)
# Empty config still require project_id
os.environ["GCP_PROJECT"] = "test-project"
with grpc_observability.GCPOpenCensusObservability(
exporter=self.test_exporter
):
self._start_server()
self.unary_unary_call()
self.assertEqual(len(self.all_metric), 0)
self.assertEqual(len(self.all_span), 0)
def testThrowErrorWhenCallingMultipleInit(self):
self._set_config_file(_VALID_CONFIG_TRACING_STATS)
with self.assertRaises(ValueError):
with grpc_observability.GCPOpenCensusObservability(
exporter=self.test_exporter
) as o11y:
grpc._observability.observability_init(o11y)
def testRecordUnaryUnaryStatsOnly(self):
self._set_config_file(_VALID_CONFIG_STATS_ONLY)
with grpc_observability.GCPOpenCensusObservability(
exporter=self.test_exporter
):
self._start_server()
self.unary_unary_call()
self.assertEqual(len(self.all_span), 0)
self.assertGreater(len(self.all_metric), 0)
self._validate_metrics(self.all_metric)
def testRecordUnaryUnaryTracingOnly(self):
self._set_config_file(_VALID_CONFIG_TRACING_ONLY)
with grpc_observability.GCPOpenCensusObservability(
exporter=self.test_exporter
):
self._start_server()
self.unary_unary_call()
self.assertEqual(len(self.all_metric), 0)
self.assertGreater(len(self.all_span), 0)
self._validate_spans(self.all_span)
def testRecordUnaryStream(self):
self._set_config_file(_VALID_CONFIG_TRACING_STATS)
with grpc_observability.GCPOpenCensusObservability(
exporter=self.test_exporter
):
self._start_server()
self.unary_stream_call()
self.assertGreater(len(self.all_metric), 0)
self.assertGreater(len(self.all_span), 0)
self._validate_metrics(self.all_metric)
self._validate_spans(self.all_span)
def testRecordStreamUnary(self):
self._set_config_file(_VALID_CONFIG_TRACING_STATS)
with grpc_observability.GCPOpenCensusObservability(
exporter=self.test_exporter
):
self._start_server()
self.stream_unary_call()
self.assertTrue(len(self.all_metric) > 0)
self.assertTrue(len(self.all_span) > 0)
self._validate_metrics(self.all_metric)
self._validate_spans(self.all_span)
def testRecordStreamStream(self):
self._set_config_file(_VALID_CONFIG_TRACING_STATS)
with grpc_observability.GCPOpenCensusObservability(
exporter=self.test_exporter
):
self._start_server()
self.stream_stream_call()
self.assertGreater(len(self.all_metric), 0)
self.assertGreater(len(self.all_span), 0)
self._validate_metrics(self.all_metric)
self._validate_spans(self.all_span)
def testNoRecordBeforeInit(self):
self._set_config_file(_VALID_CONFIG_TRACING_STATS)
self._start_server()
self.unary_unary_call()
self.assertEqual(len(self.all_metric), 0)
self.assertEqual(len(self.all_span), 0)
self._server.stop(0)
with grpc_observability.GCPOpenCensusObservability(
exporter=self.test_exporter
):
self._start_server()
self.unary_unary_call()
self.assertGreater(len(self.all_metric), 0)
self.assertGreater(len(self.all_span), 0)
self._validate_metrics(self.all_metric)
self._validate_spans(self.all_span)
def testNoRecordAfterExit(self):
self._set_config_file(_VALID_CONFIG_TRACING_STATS)
with grpc_observability.GCPOpenCensusObservability(
exporter=self.test_exporter
):
self._start_server()
self.unary_unary_call()
self.assertGreater(len(self.all_metric), 0)
self.assertGreater(len(self.all_span), 0)
current_metric_len = len(self.all_metric)
current_spans_len = len(self.all_span)
self._validate_metrics(self.all_metric)
self._validate_spans(self.all_span)
self.unary_unary_call()
self.assertEqual(len(self.all_metric), current_metric_len)
self.assertEqual(len(self.all_span), current_spans_len)
def testTraceSamplingRate(self):
# Make 40 UnaryCall's
# With 50% sampling rate, we should get 10-30 traces with >99.93% probability
# Each trace will have three span (Send, Recv, Attempt)
_CALLS = 40
_LOWER_BOUND = 10 * 3
_HIGHER_BOUND = 30 * 3
_VALID_CONFIG_TRACING_ONLY_SAMPLE_HALF = {
"project_id": "test-project",
"cloud_trace": {"sampling_rate": 0.5},
}
self._set_config_file(_VALID_CONFIG_TRACING_ONLY_SAMPLE_HALF)
with grpc_observability.GCPOpenCensusObservability(
exporter=self.test_exporter
):
self._start_server()
for _ in range(_CALLS):
self.unary_unary_call()
self.assertEqual(len(self.all_metric), 0)
self.assertGreaterEqual(len(self.all_span), _LOWER_BOUND)
self.assertLessEqual(len(self.all_span), _HIGHER_BOUND)
self._validate_spans(self.all_span)
def testConfigFileOverEnvVar(self):
# env var have only stats enabled
os.environ[CONFIG_ENV_VAR_NAME] = _VALID_CONFIG_STATS_ONLY_STR
# config_file have only tracing enabled
self._set_config_file(_VALID_CONFIG_TRACING_ONLY)
with grpc_observability.GCPOpenCensusObservability(
exporter=self.test_exporter
):
self._start_server()
self.unary_unary_call()
self.assertEqual(len(self.all_metric), 0)
self.assertGreater(len(self.all_span), 0)
self._validate_spans(self.all_span)
def _set_config_file(self, config: Dict[str, Any]) -> None:
# Using random name here so multiple tests can run with different config files.
config_file_path = "/tmp/" + str(random.randint(0, 100000))
with open(config_file_path, "w", encoding="utf-8") as f:
f.write(json.dumps(config))
os.environ[CONFIG_FILE_ENV_VAR_NAME] = config_file_path
def unary_unary_call(self):
with grpc.insecure_channel(f"localhost:{self._port}") as channel:
multi_callable = channel.unary_unary(_UNARY_UNARY)
unused_response, call = multi_callable.with_call(_REQUEST)
def unary_stream_call(self):
with grpc.insecure_channel(f"localhost:{self._port}") as channel:
multi_callable = channel.unary_stream(_UNARY_STREAM)
call = multi_callable(_REQUEST)
for _ in call:
pass
def stream_unary_call(self):
with grpc.insecure_channel(f"localhost:{self._port}") as channel:
multi_callable = channel.stream_unary(_STREAM_UNARY)
unused_response, call = multi_callable.with_call(
iter([_REQUEST] * STREAM_LENGTH)
)
def stream_stream_call(self):
with grpc.insecure_channel(f"localhost:{self._port}") as channel:
multi_callable = channel.stream_stream(_STREAM_STREAM)
call = multi_callable(iter([_REQUEST] * STREAM_LENGTH))
for _ in call:
pass
def _start_server(self) -> None:
self._server = grpc.server(futures.ThreadPoolExecutor(max_workers=10))
self._server.add_generic_rpc_handlers((_GenericHandler(),))
self._port = self._server.add_insecure_port("[::]:0")
self._server.start()
def _validate_metrics(
self, metrics: List[_observability.StatsData]
) -> None:
metric_names = set(metric.name for metric in metrics)
for name in _cyobservability.MetricsName:
if name in _SKIP_VEFIRY:
continue
if name not in metric_names:
logger.error(
"metric %s not found in exported metrics: %s!",
name,
metric_names,
)
self.assertTrue(name in metric_names)
def _validate_spans(
self, tracing_data: List[_observability.TracingData]
) -> None:
span_names = set(data.name for data in tracing_data)
for prefix in _SPAN_PREFIXS:
prefix_exist = any(prefix in name for name in span_names)
if not prefix_exist:
logger.error(
"missing span with prefix %s in exported spans: %s!",
prefix,
span_names,
)
self.assertTrue(prefix_exist)
if __name__ == "__main__":
logging.basicConfig()
unittest.main(verbosity=2)
| 14,737
| 34.174224
| 87
|
py
|
grpc
|
grpc-master/src/python/grpcio_tests/tests/stress/unary_stream_benchmark.py
|
# Copyright 2019 The gRPC authors.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import contextlib
import datetime
import subprocess
import sys
import threading
import time
import grpc
import grpc.experimental
_PORT = 5741
_MESSAGE_SIZE = 4
_RESPONSE_COUNT = 32 * 1024
_SERVER_CODE = (
"""
import datetime
import threading
import grpc
from concurrent import futures
from src.python.grpcio_tests.tests.stress import unary_stream_benchmark_pb2
from src.python.grpcio_tests.tests.stress import unary_stream_benchmark_pb2_grpc
class Handler(unary_stream_benchmark_pb2_grpc.UnaryStreamBenchmarkServiceServicer):
def Benchmark(self, request, context):
payload = b'\\x00\\x01' * int(request.message_size / 2)
for _ in range(request.response_count):
yield unary_stream_benchmark_pb2.BenchmarkResponse(response=payload)
server = grpc.server(futures.ThreadPoolExecutor(max_workers=1))
server.add_insecure_port('[::]:%d')
unary_stream_benchmark_pb2_grpc.add_UnaryStreamBenchmarkServiceServicer_to_server(Handler(), server)
server.start()
server.wait_for_termination()
"""
% _PORT
)
try:
from src.python.grpcio_tests.tests.stress import (
unary_stream_benchmark_pb2_grpc,
)
from src.python.grpcio_tests.tests.stress import unary_stream_benchmark_pb2
_GRPC_CHANNEL_OPTIONS = [
("grpc.max_metadata_size", 16 * 1024 * 1024),
("grpc.max_receive_message_length", 64 * 1024 * 1024),
(grpc.experimental.ChannelOptions.SingleThreadedUnaryStream, 1),
]
@contextlib.contextmanager
def _running_server():
server_process = subprocess.Popen(
[sys.executable, "-c", _SERVER_CODE],
stdout=subprocess.PIPE,
stderr=subprocess.PIPE,
)
try:
yield
finally:
server_process.terminate()
server_process.wait()
sys.stdout.write("stdout: {}".format(server_process.stdout.read()))
sys.stdout.flush()
sys.stdout.write("stderr: {}".format(server_process.stderr.read()))
sys.stdout.flush()
def profile(message_size, response_count):
request = unary_stream_benchmark_pb2.BenchmarkRequest(
message_size=message_size, response_count=response_count
)
with grpc.insecure_channel(
"[::]:{}".format(_PORT), options=_GRPC_CHANNEL_OPTIONS
) as channel:
stub = (
unary_stream_benchmark_pb2_grpc.UnaryStreamBenchmarkServiceStub(
channel
)
)
start = datetime.datetime.now()
call = stub.Benchmark(request, wait_for_ready=True)
for message in call:
pass
end = datetime.datetime.now()
return end - start
def main():
with _running_server():
for i in range(1000):
latency = profile(_MESSAGE_SIZE, 1024)
sys.stdout.write("{}\n".format(latency.total_seconds()))
sys.stdout.flush()
if __name__ == "__main__":
main()
except ImportError:
# NOTE(rbellevi): The test runner should not load this module.
pass
| 3,693
| 30.844828
| 100
|
py
|
grpc
|
grpc-master/src/python/grpcio_tests/tests/stress/client.py
|
# Copyright 2016 gRPC authors.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Entry point for running stress tests."""
import argparse
from concurrent import futures
import queue
import threading
import grpc
from src.proto.grpc.testing import metrics_pb2_grpc
from src.proto.grpc.testing import test_pb2_grpc
from tests.interop import methods
from tests.interop import resources
from tests.qps import histogram
from tests.stress import metrics_server
from tests.stress import test_runner
def _args():
parser = argparse.ArgumentParser(
description="gRPC Python stress test client"
)
parser.add_argument(
"--server_addresses",
help="comma separated list of hostname:port to run servers on",
default="localhost:8080",
type=str,
)
parser.add_argument(
"--test_cases",
help="comma separated list of testcase:weighting of tests to run",
default="large_unary:100",
type=str,
)
parser.add_argument(
"--test_duration_secs",
help="number of seconds to run the stress test",
default=-1,
type=int,
)
parser.add_argument(
"--num_channels_per_server",
help="number of channels per server",
default=1,
type=int,
)
parser.add_argument(
"--num_stubs_per_channel",
help="number of stubs to create per channel",
default=1,
type=int,
)
parser.add_argument(
"--metrics_port",
help="the port to listen for metrics requests on",
default=8081,
type=int,
)
parser.add_argument(
"--use_test_ca",
help="Whether to use our fake CA. Requires --use_tls=true",
default=False,
type=bool,
)
parser.add_argument(
"--use_tls", help="Whether to use TLS", default=False, type=bool
)
parser.add_argument(
"--server_host_override",
help="the server host to which to claim to connect",
type=str,
)
return parser.parse_args()
def _test_case_from_arg(test_case_arg):
for test_case in methods.TestCase:
if test_case_arg == test_case.value:
return test_case
else:
raise ValueError("No test case {}!".format(test_case_arg))
def _parse_weighted_test_cases(test_case_args):
weighted_test_cases = {}
for test_case_arg in test_case_args.split(","):
name, weight = test_case_arg.split(":", 1)
test_case = _test_case_from_arg(name)
weighted_test_cases[test_case] = int(weight)
return weighted_test_cases
def _get_channel(target, args):
if args.use_tls:
if args.use_test_ca:
root_certificates = resources.test_root_certificates()
else:
root_certificates = None # will load default roots.
channel_credentials = grpc.ssl_channel_credentials(
root_certificates=root_certificates
)
options = (
(
"grpc.ssl_target_name_override",
args.server_host_override,
),
)
channel = grpc.secure_channel(
target, channel_credentials, options=options
)
else:
channel = grpc.insecure_channel(target)
# waits for the channel to be ready before we start sending messages
grpc.channel_ready_future(channel).result()
return channel
def run_test(args):
test_cases = _parse_weighted_test_cases(args.test_cases)
test_server_targets = args.server_addresses.split(",")
# Propagate any client exceptions with a queue
exception_queue = queue.Queue()
stop_event = threading.Event()
hist = histogram.Histogram(1, 1)
runners = []
server = grpc.server(futures.ThreadPoolExecutor(max_workers=25))
metrics_pb2_grpc.add_MetricsServiceServicer_to_server(
metrics_server.MetricsServer(hist), server
)
server.add_insecure_port("[::]:{}".format(args.metrics_port))
server.start()
for test_server_target in test_server_targets:
for _ in range(args.num_channels_per_server):
channel = _get_channel(test_server_target, args)
for _ in range(args.num_stubs_per_channel):
stub = test_pb2_grpc.TestServiceStub(channel)
runner = test_runner.TestRunner(
stub, test_cases, hist, exception_queue, stop_event
)
runners.append(runner)
for runner in runners:
runner.start()
try:
timeout_secs = args.test_duration_secs
if timeout_secs < 0:
timeout_secs = None
raise exception_queue.get(block=True, timeout=timeout_secs)
except queue.Empty:
# No exceptions thrown, success
pass
finally:
stop_event.set()
for runner in runners:
runner.join()
runner = None
server.stop(None)
if __name__ == "__main__":
run_test(_args())
| 5,447
| 29.606742
| 74
|
py
|
grpc
|
grpc-master/src/python/grpcio_tests/tests/stress/test_runner.py
|
# Copyright 2016 gRPC authors.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Thread that sends random weighted requests on a TestService stub."""
import random
import threading
import time
import traceback
def _weighted_test_case_generator(weighted_cases):
weight_sum = sum(weighted_cases.itervalues())
while True:
val = random.uniform(0, weight_sum)
partial_sum = 0
for case in weighted_cases:
partial_sum += weighted_cases[case]
if val <= partial_sum:
yield case
break
class TestRunner(threading.Thread):
def __init__(self, stub, test_cases, hist, exception_queue, stop_event):
super(TestRunner, self).__init__()
self._exception_queue = exception_queue
self._stop_event = stop_event
self._stub = stub
self._test_cases = _weighted_test_case_generator(test_cases)
self._histogram = hist
def run(self):
while not self._stop_event.is_set():
try:
test_case = next(self._test_cases)
start_time = time.time()
test_case.test_interoperability(self._stub, None)
end_time = time.time()
self._histogram.add((end_time - start_time) * 1e9)
except Exception as e: # pylint: disable=broad-except
traceback.print_exc()
self._exception_queue.put(
Exception(
"An exception occurred during test {}".format(
test_case
),
e,
)
)
| 2,164
| 33.919355
| 76
|
py
|
grpc
|
grpc-master/src/python/grpcio_tests/tests/stress/__init__.py
|
# Copyright 2016 gRPC authors.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
| 577
| 40.285714
| 74
|
py
|
grpc
|
grpc-master/src/python/grpcio_tests/tests/stress/metrics_server.py
|
# Copyright 2016 gRPC authors.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""MetricsService for publishing stress test qps data."""
import time
from src.proto.grpc.testing import metrics_pb2
from src.proto.grpc.testing import metrics_pb2_grpc
GAUGE_NAME = "python_overall_qps"
class MetricsServer(metrics_pb2_grpc.MetricsServiceServicer):
def __init__(self, histogram):
self._start_time = time.time()
self._histogram = histogram
def _get_qps(self):
count = self._histogram.get_data().count
delta = time.time() - self._start_time
self._histogram.reset()
self._start_time = time.time()
return int(count / delta)
def GetAllGauges(self, request, context):
qps = self._get_qps()
return [metrics_pb2.GaugeResponse(name=GAUGE_NAME, long_value=qps)]
def GetGauge(self, request, context):
if request.name != GAUGE_NAME:
raise Exception("Gauge {} does not exist".format(request.name))
qps = self._get_qps()
return metrics_pb2.GaugeResponse(name=GAUGE_NAME, long_value=qps)
| 1,598
| 34.533333
| 75
|
py
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.