repo stringlengths 7 55 | path stringlengths 4 127 | func_name stringlengths 1 88 | original_string stringlengths 75 19.8k | language stringclasses 1
value | code stringlengths 75 19.8k | code_tokens listlengths 20 707 | docstring stringlengths 3 17.3k | docstring_tokens listlengths 3 222 | sha stringlengths 40 40 | url stringlengths 87 242 | partition stringclasses 1
value | idx int64 0 252k |
|---|---|---|---|---|---|---|---|---|---|---|---|---|
uber/tchannel-python | tchannel/tornado/peer.py | Peer.incoming_connections | def incoming_connections(self):
"""Returns a list of all incoming connections for this peer."""
# Incoming connections are on the left.
return list(
takewhile(lambda c: c.direction == INCOMING, self.connections)
) | python | def incoming_connections(self):
"""Returns a list of all incoming connections for this peer."""
# Incoming connections are on the left.
return list(
takewhile(lambda c: c.direction == INCOMING, self.connections)
) | [
"def",
"incoming_connections",
"(",
"self",
")",
":",
"# Incoming connections are on the left.",
"return",
"list",
"(",
"takewhile",
"(",
"lambda",
"c",
":",
"c",
".",
"direction",
"==",
"INCOMING",
",",
"self",
".",
"connections",
")",
")"
] | Returns a list of all incoming connections for this peer. | [
"Returns",
"a",
"list",
"of",
"all",
"incoming",
"connections",
"for",
"this",
"peer",
"."
] | ee08cce6234f24fd2373774988186dd374306c43 | https://github.com/uber/tchannel-python/blob/ee08cce6234f24fd2373774988186dd374306c43/tchannel/tornado/peer.py#L224-L230 | train | 36,600 |
uber/tchannel-python | tchannel/tornado/peer.py | PeerClientOperation._get_peer_connection | def _get_peer_connection(self, blacklist=None):
"""Find a peer and connect to it.
Returns a ``(peer, connection)`` tuple.
Raises ``NoAvailablePeerError`` if no healthy peers are found.
:param blacklist:
If given, a set of hostports for peers that we must not try.
"""
blacklist = blacklist or set()
peer = None
connection = None
while connection is None:
peer = self._choose(blacklist)
if not peer:
raise NoAvailablePeerError(
"Can't find an available peer for '%s'" % self.service
)
try:
connection = yield peer.connect()
except NetworkError as e:
log.info(
'Failed to connect to %s. Trying a different host.',
peer.hostport,
exc_info=e,
)
connection = None
blacklist.add(peer.hostport)
raise gen.Return((peer, connection)) | python | def _get_peer_connection(self, blacklist=None):
"""Find a peer and connect to it.
Returns a ``(peer, connection)`` tuple.
Raises ``NoAvailablePeerError`` if no healthy peers are found.
:param blacklist:
If given, a set of hostports for peers that we must not try.
"""
blacklist = blacklist or set()
peer = None
connection = None
while connection is None:
peer = self._choose(blacklist)
if not peer:
raise NoAvailablePeerError(
"Can't find an available peer for '%s'" % self.service
)
try:
connection = yield peer.connect()
except NetworkError as e:
log.info(
'Failed to connect to %s. Trying a different host.',
peer.hostport,
exc_info=e,
)
connection = None
blacklist.add(peer.hostport)
raise gen.Return((peer, connection)) | [
"def",
"_get_peer_connection",
"(",
"self",
",",
"blacklist",
"=",
"None",
")",
":",
"blacklist",
"=",
"blacklist",
"or",
"set",
"(",
")",
"peer",
"=",
"None",
"connection",
"=",
"None",
"while",
"connection",
"is",
"None",
":",
"peer",
"=",
"self",
".",... | Find a peer and connect to it.
Returns a ``(peer, connection)`` tuple.
Raises ``NoAvailablePeerError`` if no healthy peers are found.
:param blacklist:
If given, a set of hostports for peers that we must not try. | [
"Find",
"a",
"peer",
"and",
"connect",
"to",
"it",
"."
] | ee08cce6234f24fd2373774988186dd374306c43 | https://github.com/uber/tchannel-python/blob/ee08cce6234f24fd2373774988186dd374306c43/tchannel/tornado/peer.py#L314-L349 | train | 36,601 |
uber/tchannel-python | tchannel/tornado/peer.py | PeerClientOperation.send | def send(
self, arg1, arg2, arg3,
headers=None,
retry_limit=None,
ttl=None,
):
"""Make a request to the Peer.
:param arg1:
String or Stream containing the contents of arg1. If None, an empty
stream is used.
:param arg2:
String or Stream containing the contents of arg2. If None, an empty
stream is used.
:param arg3:
String or Stream containing the contents of arg3. If None, an empty
stream is used.
:param headers:
Headers will be put in the message as protocol header.
:param retry_limit:
Maximum number of retries will perform on the message. If the number
is 0, it means no retry.
:param ttl:
Timeout for each request (second).
:return:
Future that contains the response from the peer.
"""
# find a peer connection
# If we can't find available peer at the first time, we throw
# NoAvailablePeerError. Later during retry, if we can't find available
# peer, we throw exceptions from retry not NoAvailablePeerError.
peer, connection = yield self._get_peer_connection()
arg1, arg2, arg3 = (
maybe_stream(arg1), maybe_stream(arg2), maybe_stream(arg3)
)
if retry_limit is None:
retry_limit = DEFAULT_RETRY_LIMIT
ttl = ttl or DEFAULT_TIMEOUT
# hack to get endpoint from arg_1 for trace name
arg1.close()
endpoint = yield read_full(arg1)
# set default transport headers
headers = headers or {}
for k, v in self.headers.iteritems():
headers.setdefault(k, v)
if self.tracing_span is None:
tracer = ClientTracer(channel=self.tchannel)
self.tracing_span, _ = tracer.start_span(
service=self.service, endpoint=endpoint,
hostport=self._hostport, encoding=self.headers.get('as')
)
request = Request(
service=self.service,
argstreams=[InMemStream(endpoint), arg2, arg3],
id=connection.writer.next_message_id(),
headers=headers,
endpoint=endpoint,
ttl=ttl,
tracing=tracing.span_to_tracing_field(self.tracing_span)
)
# only retry on non-stream request
if request.is_streaming_request or self._hostport:
retry_limit = 0
if request.is_streaming_request:
request.ttl = 0
try:
with self.tracing_span: # to ensure span is finished
response = yield self.send_with_retry(
request, peer, retry_limit, connection
)
except Exception as e:
# event: on_exception
exc_info = sys.exc_info()
yield self.tchannel.event_emitter.fire(
EventType.on_exception, request, e,
)
six.reraise(*exc_info)
log.debug("Got response %s", response)
raise gen.Return(response) | python | def send(
self, arg1, arg2, arg3,
headers=None,
retry_limit=None,
ttl=None,
):
"""Make a request to the Peer.
:param arg1:
String or Stream containing the contents of arg1. If None, an empty
stream is used.
:param arg2:
String or Stream containing the contents of arg2. If None, an empty
stream is used.
:param arg3:
String or Stream containing the contents of arg3. If None, an empty
stream is used.
:param headers:
Headers will be put in the message as protocol header.
:param retry_limit:
Maximum number of retries will perform on the message. If the number
is 0, it means no retry.
:param ttl:
Timeout for each request (second).
:return:
Future that contains the response from the peer.
"""
# find a peer connection
# If we can't find available peer at the first time, we throw
# NoAvailablePeerError. Later during retry, if we can't find available
# peer, we throw exceptions from retry not NoAvailablePeerError.
peer, connection = yield self._get_peer_connection()
arg1, arg2, arg3 = (
maybe_stream(arg1), maybe_stream(arg2), maybe_stream(arg3)
)
if retry_limit is None:
retry_limit = DEFAULT_RETRY_LIMIT
ttl = ttl or DEFAULT_TIMEOUT
# hack to get endpoint from arg_1 for trace name
arg1.close()
endpoint = yield read_full(arg1)
# set default transport headers
headers = headers or {}
for k, v in self.headers.iteritems():
headers.setdefault(k, v)
if self.tracing_span is None:
tracer = ClientTracer(channel=self.tchannel)
self.tracing_span, _ = tracer.start_span(
service=self.service, endpoint=endpoint,
hostport=self._hostport, encoding=self.headers.get('as')
)
request = Request(
service=self.service,
argstreams=[InMemStream(endpoint), arg2, arg3],
id=connection.writer.next_message_id(),
headers=headers,
endpoint=endpoint,
ttl=ttl,
tracing=tracing.span_to_tracing_field(self.tracing_span)
)
# only retry on non-stream request
if request.is_streaming_request or self._hostport:
retry_limit = 0
if request.is_streaming_request:
request.ttl = 0
try:
with self.tracing_span: # to ensure span is finished
response = yield self.send_with_retry(
request, peer, retry_limit, connection
)
except Exception as e:
# event: on_exception
exc_info = sys.exc_info()
yield self.tchannel.event_emitter.fire(
EventType.on_exception, request, e,
)
six.reraise(*exc_info)
log.debug("Got response %s", response)
raise gen.Return(response) | [
"def",
"send",
"(",
"self",
",",
"arg1",
",",
"arg2",
",",
"arg3",
",",
"headers",
"=",
"None",
",",
"retry_limit",
"=",
"None",
",",
"ttl",
"=",
"None",
",",
")",
":",
"# find a peer connection",
"# If we can't find available peer at the first time, we throw",
... | Make a request to the Peer.
:param arg1:
String or Stream containing the contents of arg1. If None, an empty
stream is used.
:param arg2:
String or Stream containing the contents of arg2. If None, an empty
stream is used.
:param arg3:
String or Stream containing the contents of arg3. If None, an empty
stream is used.
:param headers:
Headers will be put in the message as protocol header.
:param retry_limit:
Maximum number of retries will perform on the message. If the number
is 0, it means no retry.
:param ttl:
Timeout for each request (second).
:return:
Future that contains the response from the peer. | [
"Make",
"a",
"request",
"to",
"the",
"Peer",
"."
] | ee08cce6234f24fd2373774988186dd374306c43 | https://github.com/uber/tchannel-python/blob/ee08cce6234f24fd2373774988186dd374306c43/tchannel/tornado/peer.py#L352-L442 | train | 36,602 |
uber/tchannel-python | tchannel/tornado/peer.py | PeerGroup.clear | def clear(self):
"""Reset this PeerGroup.
This closes all connections to all known peers and forgets about
these peers.
:returns:
A Future that resolves with a value of None when the operation
has finished
"""
try:
for peer in self._peers.values():
peer.close()
finally:
self._peers = {}
self._resetting = False | python | def clear(self):
"""Reset this PeerGroup.
This closes all connections to all known peers and forgets about
these peers.
:returns:
A Future that resolves with a value of None when the operation
has finished
"""
try:
for peer in self._peers.values():
peer.close()
finally:
self._peers = {}
self._resetting = False | [
"def",
"clear",
"(",
"self",
")",
":",
"try",
":",
"for",
"peer",
"in",
"self",
".",
"_peers",
".",
"values",
"(",
")",
":",
"peer",
".",
"close",
"(",
")",
"finally",
":",
"self",
".",
"_peers",
"=",
"{",
"}",
"self",
".",
"_resetting",
"=",
"... | Reset this PeerGroup.
This closes all connections to all known peers and forgets about
these peers.
:returns:
A Future that resolves with a value of None when the operation
has finished | [
"Reset",
"this",
"PeerGroup",
"."
] | ee08cce6234f24fd2373774988186dd374306c43 | https://github.com/uber/tchannel-python/blob/ee08cce6234f24fd2373774988186dd374306c43/tchannel/tornado/peer.py#L592-L607 | train | 36,603 |
uber/tchannel-python | tchannel/tornado/peer.py | PeerGroup.remove | def remove(self, hostport):
"""Delete the Peer for the given host port.
Does nothing if a matching Peer does not exist.
:returns: The removed Peer
"""
assert hostport, "hostport is required"
peer = self._peers.pop(hostport, None)
peer_in_heap = peer and peer.index != -1
if peer_in_heap:
self.peer_heap.remove_peer(peer)
return peer | python | def remove(self, hostport):
"""Delete the Peer for the given host port.
Does nothing if a matching Peer does not exist.
:returns: The removed Peer
"""
assert hostport, "hostport is required"
peer = self._peers.pop(hostport, None)
peer_in_heap = peer and peer.index != -1
if peer_in_heap:
self.peer_heap.remove_peer(peer)
return peer | [
"def",
"remove",
"(",
"self",
",",
"hostport",
")",
":",
"assert",
"hostport",
",",
"\"hostport is required\"",
"peer",
"=",
"self",
".",
"_peers",
".",
"pop",
"(",
"hostport",
",",
"None",
")",
"peer_in_heap",
"=",
"peer",
"and",
"peer",
".",
"index",
"... | Delete the Peer for the given host port.
Does nothing if a matching Peer does not exist.
:returns: The removed Peer | [
"Delete",
"the",
"Peer",
"for",
"the",
"given",
"host",
"port",
"."
] | ee08cce6234f24fd2373774988186dd374306c43 | https://github.com/uber/tchannel-python/blob/ee08cce6234f24fd2373774988186dd374306c43/tchannel/tornado/peer.py#L617-L629 | train | 36,604 |
uber/tchannel-python | tchannel/tornado/peer.py | PeerGroup.get | def get(self, hostport):
"""Get a Peer for the given destination.
A new Peer is added to the peer heap and returned if one does
not already exist for the given host-port. Otherwise, the
existing Peer is returned.
"""
assert hostport, "hostport is required"
assert isinstance(hostport, basestring), "hostport must be a string"
if hostport not in self._peers:
self._add(hostport)
return self._peers[hostport] | python | def get(self, hostport):
"""Get a Peer for the given destination.
A new Peer is added to the peer heap and returned if one does
not already exist for the given host-port. Otherwise, the
existing Peer is returned.
"""
assert hostport, "hostport is required"
assert isinstance(hostport, basestring), "hostport must be a string"
if hostport not in self._peers:
self._add(hostport)
return self._peers[hostport] | [
"def",
"get",
"(",
"self",
",",
"hostport",
")",
":",
"assert",
"hostport",
",",
"\"hostport is required\"",
"assert",
"isinstance",
"(",
"hostport",
",",
"basestring",
")",
",",
"\"hostport must be a string\"",
"if",
"hostport",
"not",
"in",
"self",
".",
"_peer... | Get a Peer for the given destination.
A new Peer is added to the peer heap and returned if one does
not already exist for the given host-port. Otherwise, the
existing Peer is returned. | [
"Get",
"a",
"Peer",
"for",
"the",
"given",
"destination",
"."
] | ee08cce6234f24fd2373774988186dd374306c43 | https://github.com/uber/tchannel-python/blob/ee08cce6234f24fd2373774988186dd374306c43/tchannel/tornado/peer.py#L631-L644 | train | 36,605 |
uber/tchannel-python | tchannel/tornado/peer.py | PeerGroup._add | def _add(self, hostport):
"""Creates a peer from the hostport and adds it to the peer heap"""
peer = self.peer_class(
tchannel=self.tchannel,
hostport=hostport,
on_conn_change=self._update_heap,
)
peer.rank = self.rank_calculator.get_rank(peer)
self._peers[peer.hostport] = peer
self.peer_heap.add_and_shuffle(peer) | python | def _add(self, hostport):
"""Creates a peer from the hostport and adds it to the peer heap"""
peer = self.peer_class(
tchannel=self.tchannel,
hostport=hostport,
on_conn_change=self._update_heap,
)
peer.rank = self.rank_calculator.get_rank(peer)
self._peers[peer.hostport] = peer
self.peer_heap.add_and_shuffle(peer) | [
"def",
"_add",
"(",
"self",
",",
"hostport",
")",
":",
"peer",
"=",
"self",
".",
"peer_class",
"(",
"tchannel",
"=",
"self",
".",
"tchannel",
",",
"hostport",
"=",
"hostport",
",",
"on_conn_change",
"=",
"self",
".",
"_update_heap",
",",
")",
"peer",
"... | Creates a peer from the hostport and adds it to the peer heap | [
"Creates",
"a",
"peer",
"from",
"the",
"hostport",
"and",
"adds",
"it",
"to",
"the",
"peer",
"heap"
] | ee08cce6234f24fd2373774988186dd374306c43 | https://github.com/uber/tchannel-python/blob/ee08cce6234f24fd2373774988186dd374306c43/tchannel/tornado/peer.py#L646-L656 | train | 36,606 |
uber/tchannel-python | tchannel/tornado/peer.py | PeerGroup._update_heap | def _update_heap(self, peer):
"""Recalculate the peer's rank and update itself in the peer heap."""
rank = self.rank_calculator.get_rank(peer)
if rank == peer.rank:
return
peer.rank = rank
self.peer_heap.update_peer(peer) | python | def _update_heap(self, peer):
"""Recalculate the peer's rank and update itself in the peer heap."""
rank = self.rank_calculator.get_rank(peer)
if rank == peer.rank:
return
peer.rank = rank
self.peer_heap.update_peer(peer) | [
"def",
"_update_heap",
"(",
"self",
",",
"peer",
")",
":",
"rank",
"=",
"self",
".",
"rank_calculator",
".",
"get_rank",
"(",
"peer",
")",
"if",
"rank",
"==",
"peer",
".",
"rank",
":",
"return",
"peer",
".",
"rank",
"=",
"rank",
"self",
".",
"peer_he... | Recalculate the peer's rank and update itself in the peer heap. | [
"Recalculate",
"the",
"peer",
"s",
"rank",
"and",
"update",
"itself",
"in",
"the",
"peer",
"heap",
"."
] | ee08cce6234f24fd2373774988186dd374306c43 | https://github.com/uber/tchannel-python/blob/ee08cce6234f24fd2373774988186dd374306c43/tchannel/tornado/peer.py#L658-L665 | train | 36,607 |
uber/tchannel-python | tchannel/tornado/peer.py | PeerGroup._get_isolated | def _get_isolated(self, hostport):
"""Get a Peer for the given destination for a request.
A new Peer is added and returned if one does not already exist for the
given host-port. Otherwise, the existing Peer is returned.
**NOTE** new peers will not be added to the peer heap.
"""
assert hostport, "hostport is required"
if hostport not in self._peers:
# Add a peer directly from a hostport, do NOT add it to the peer
# heap
peer = self.peer_class(
tchannel=self.tchannel,
hostport=hostport,
)
self._peers[peer.hostport] = peer
return self._peers[hostport] | python | def _get_isolated(self, hostport):
"""Get a Peer for the given destination for a request.
A new Peer is added and returned if one does not already exist for the
given host-port. Otherwise, the existing Peer is returned.
**NOTE** new peers will not be added to the peer heap.
"""
assert hostport, "hostport is required"
if hostport not in self._peers:
# Add a peer directly from a hostport, do NOT add it to the peer
# heap
peer = self.peer_class(
tchannel=self.tchannel,
hostport=hostport,
)
self._peers[peer.hostport] = peer
return self._peers[hostport] | [
"def",
"_get_isolated",
"(",
"self",
",",
"hostport",
")",
":",
"assert",
"hostport",
",",
"\"hostport is required\"",
"if",
"hostport",
"not",
"in",
"self",
".",
"_peers",
":",
"# Add a peer directly from a hostport, do NOT add it to the peer",
"# heap",
"peer",
"=",
... | Get a Peer for the given destination for a request.
A new Peer is added and returned if one does not already exist for the
given host-port. Otherwise, the existing Peer is returned.
**NOTE** new peers will not be added to the peer heap. | [
"Get",
"a",
"Peer",
"for",
"the",
"given",
"destination",
"for",
"a",
"request",
"."
] | ee08cce6234f24fd2373774988186dd374306c43 | https://github.com/uber/tchannel-python/blob/ee08cce6234f24fd2373774988186dd374306c43/tchannel/tornado/peer.py#L667-L685 | train | 36,608 |
uber/tchannel-python | tchannel/tornado/peer.py | PeerGroup.request | def request(self, service, hostport=None, **kwargs):
"""Initiate a new request through this PeerGroup.
:param hostport:
If specified, requests will be sent to the specific host.
Otherwise, a known peer will be picked at random.
:param service:
Name of the service being called. Defaults to an empty string.
"""
return PeerClientOperation(
peer_group=self,
service=service,
hostport=hostport,
**kwargs) | python | def request(self, service, hostport=None, **kwargs):
"""Initiate a new request through this PeerGroup.
:param hostport:
If specified, requests will be sent to the specific host.
Otherwise, a known peer will be picked at random.
:param service:
Name of the service being called. Defaults to an empty string.
"""
return PeerClientOperation(
peer_group=self,
service=service,
hostport=hostport,
**kwargs) | [
"def",
"request",
"(",
"self",
",",
"service",
",",
"hostport",
"=",
"None",
",",
"*",
"*",
"kwargs",
")",
":",
"return",
"PeerClientOperation",
"(",
"peer_group",
"=",
"self",
",",
"service",
"=",
"service",
",",
"hostport",
"=",
"hostport",
",",
"*",
... | Initiate a new request through this PeerGroup.
:param hostport:
If specified, requests will be sent to the specific host.
Otherwise, a known peer will be picked at random.
:param service:
Name of the service being called. Defaults to an empty string. | [
"Initiate",
"a",
"new",
"request",
"through",
"this",
"PeerGroup",
"."
] | ee08cce6234f24fd2373774988186dd374306c43 | https://github.com/uber/tchannel-python/blob/ee08cce6234f24fd2373774988186dd374306c43/tchannel/tornado/peer.py#L697-L710 | train | 36,609 |
uber/tchannel-python | tchannel/tornado/peer.py | PeerGroup.choose | def choose(self, hostport=None, blacklist=None):
"""Choose a Peer that matches the given criteria.
:param hostport:
Specifies that the returned Peer must be for the given host-port.
Without this, all peers managed by this PeerGroup are
candidates.
:param blacklist:
Peers on the blacklist won't be chosen.
:returns:
A Peer that matches all the requested criteria or None if no such
Peer was found.
"""
blacklist = blacklist or set()
if hostport:
return self._get_isolated(hostport)
return self.peer_heap.smallest_peer(
(lambda p: p.hostport not in blacklist and not p.is_ephemeral),
) | python | def choose(self, hostport=None, blacklist=None):
"""Choose a Peer that matches the given criteria.
:param hostport:
Specifies that the returned Peer must be for the given host-port.
Without this, all peers managed by this PeerGroup are
candidates.
:param blacklist:
Peers on the blacklist won't be chosen.
:returns:
A Peer that matches all the requested criteria or None if no such
Peer was found.
"""
blacklist = blacklist or set()
if hostport:
return self._get_isolated(hostport)
return self.peer_heap.smallest_peer(
(lambda p: p.hostport not in blacklist and not p.is_ephemeral),
) | [
"def",
"choose",
"(",
"self",
",",
"hostport",
"=",
"None",
",",
"blacklist",
"=",
"None",
")",
":",
"blacklist",
"=",
"blacklist",
"or",
"set",
"(",
")",
"if",
"hostport",
":",
"return",
"self",
".",
"_get_isolated",
"(",
"hostport",
")",
"return",
"s... | Choose a Peer that matches the given criteria.
:param hostport:
Specifies that the returned Peer must be for the given host-port.
Without this, all peers managed by this PeerGroup are
candidates.
:param blacklist:
Peers on the blacklist won't be chosen.
:returns:
A Peer that matches all the requested criteria or None if no such
Peer was found. | [
"Choose",
"a",
"Peer",
"that",
"matches",
"the",
"given",
"criteria",
"."
] | ee08cce6234f24fd2373774988186dd374306c43 | https://github.com/uber/tchannel-python/blob/ee08cce6234f24fd2373774988186dd374306c43/tchannel/tornado/peer.py#L712-L732 | train | 36,610 |
uber/tchannel-python | tchannel/_future.py | fail_to | def fail_to(future):
"""A decorator for function callbacks to catch uncaught non-async
exceptions and forward them to the given future.
The primary use for this is to catch exceptions in async callbacks and
propagate them to futures. For example, consider,
.. code-block:: python
answer = Future()
def on_done(future):
foo = bar()
answer.set_result(foo)
some_async_operation().add_done_callback(on_done)
If ``bar()`` fails, ``answer`` will never get filled with an exception or
a result. Now if we change ``on_done`` to,
.. code-block:: python
@fail_to(answer)
def on_done(future):
foo = bar()
answer.set_result(foo)
Uncaught exceptions in ``on_done`` will be caught and propagated to
``answer``. Note that ``on_done`` will return None if an exception was
caught.
:param answer:
Future to which the result will be written.
"""
assert is_future(future), 'you forgot to pass a future'
def decorator(f):
@wraps(f)
def new_f(*args, **kwargs):
try:
return f(*args, **kwargs)
except Exception:
future.set_exc_info(sys.exc_info())
return new_f
return decorator | python | def fail_to(future):
"""A decorator for function callbacks to catch uncaught non-async
exceptions and forward them to the given future.
The primary use for this is to catch exceptions in async callbacks and
propagate them to futures. For example, consider,
.. code-block:: python
answer = Future()
def on_done(future):
foo = bar()
answer.set_result(foo)
some_async_operation().add_done_callback(on_done)
If ``bar()`` fails, ``answer`` will never get filled with an exception or
a result. Now if we change ``on_done`` to,
.. code-block:: python
@fail_to(answer)
def on_done(future):
foo = bar()
answer.set_result(foo)
Uncaught exceptions in ``on_done`` will be caught and propagated to
``answer``. Note that ``on_done`` will return None if an exception was
caught.
:param answer:
Future to which the result will be written.
"""
assert is_future(future), 'you forgot to pass a future'
def decorator(f):
@wraps(f)
def new_f(*args, **kwargs):
try:
return f(*args, **kwargs)
except Exception:
future.set_exc_info(sys.exc_info())
return new_f
return decorator | [
"def",
"fail_to",
"(",
"future",
")",
":",
"assert",
"is_future",
"(",
"future",
")",
",",
"'you forgot to pass a future'",
"def",
"decorator",
"(",
"f",
")",
":",
"@",
"wraps",
"(",
"f",
")",
"def",
"new_f",
"(",
"*",
"args",
",",
"*",
"*",
"kwargs",
... | A decorator for function callbacks to catch uncaught non-async
exceptions and forward them to the given future.
The primary use for this is to catch exceptions in async callbacks and
propagate them to futures. For example, consider,
.. code-block:: python
answer = Future()
def on_done(future):
foo = bar()
answer.set_result(foo)
some_async_operation().add_done_callback(on_done)
If ``bar()`` fails, ``answer`` will never get filled with an exception or
a result. Now if we change ``on_done`` to,
.. code-block:: python
@fail_to(answer)
def on_done(future):
foo = bar()
answer.set_result(foo)
Uncaught exceptions in ``on_done`` will be caught and propagated to
``answer``. Note that ``on_done`` will return None if an exception was
caught.
:param answer:
Future to which the result will be written. | [
"A",
"decorator",
"for",
"function",
"callbacks",
"to",
"catch",
"uncaught",
"non",
"-",
"async",
"exceptions",
"and",
"forward",
"them",
"to",
"the",
"given",
"future",
"."
] | ee08cce6234f24fd2373774988186dd374306c43 | https://github.com/uber/tchannel-python/blob/ee08cce6234f24fd2373774988186dd374306c43/tchannel/_future.py#L31-L78 | train | 36,611 |
uber/tchannel-python | tchannel/tornado/util.py | get_arg | def get_arg(context, index):
"""get value from arg stream in async way"""
if index < len(context.argstreams):
arg = ""
chunk = yield context.argstreams[index].read()
while chunk:
arg += chunk
chunk = yield context.argstreams[index].read()
raise tornado.gen.Return(arg)
else:
raise TChannelError() | python | def get_arg(context, index):
"""get value from arg stream in async way"""
if index < len(context.argstreams):
arg = ""
chunk = yield context.argstreams[index].read()
while chunk:
arg += chunk
chunk = yield context.argstreams[index].read()
raise tornado.gen.Return(arg)
else:
raise TChannelError() | [
"def",
"get_arg",
"(",
"context",
",",
"index",
")",
":",
"if",
"index",
"<",
"len",
"(",
"context",
".",
"argstreams",
")",
":",
"arg",
"=",
"\"\"",
"chunk",
"=",
"yield",
"context",
".",
"argstreams",
"[",
"index",
"]",
".",
"read",
"(",
")",
"wh... | get value from arg stream in async way | [
"get",
"value",
"from",
"arg",
"stream",
"in",
"async",
"way"
] | ee08cce6234f24fd2373774988186dd374306c43 | https://github.com/uber/tchannel-python/blob/ee08cce6234f24fd2373774988186dd374306c43/tchannel/tornado/util.py#L30-L41 | train | 36,612 |
uber/tchannel-python | tchannel/_queue.py | Queue.put | def put(self, value):
"""Puts an item into the queue.
Returns a Future that resolves to None once the value has been
accepted by the queue.
"""
io_loop = IOLoop.current()
new_hole = Future()
new_put = Future()
new_put.set_result(new_hole)
with self._lock:
self._put, put = new_put, self._put
answer = Future()
def _on_put(future):
if future.exception(): # pragma: no cover (never happens)
return answer.set_exc_info(future.exc_info())
old_hole = put.result()
old_hole.set_result(Node(value, new_hole))
answer.set_result(None)
io_loop.add_future(put, _on_put)
return answer | python | def put(self, value):
"""Puts an item into the queue.
Returns a Future that resolves to None once the value has been
accepted by the queue.
"""
io_loop = IOLoop.current()
new_hole = Future()
new_put = Future()
new_put.set_result(new_hole)
with self._lock:
self._put, put = new_put, self._put
answer = Future()
def _on_put(future):
if future.exception(): # pragma: no cover (never happens)
return answer.set_exc_info(future.exc_info())
old_hole = put.result()
old_hole.set_result(Node(value, new_hole))
answer.set_result(None)
io_loop.add_future(put, _on_put)
return answer | [
"def",
"put",
"(",
"self",
",",
"value",
")",
":",
"io_loop",
"=",
"IOLoop",
".",
"current",
"(",
")",
"new_hole",
"=",
"Future",
"(",
")",
"new_put",
"=",
"Future",
"(",
")",
"new_put",
".",
"set_result",
"(",
"new_hole",
")",
"with",
"self",
".",
... | Puts an item into the queue.
Returns a Future that resolves to None once the value has been
accepted by the queue. | [
"Puts",
"an",
"item",
"into",
"the",
"queue",
"."
] | ee08cce6234f24fd2373774988186dd374306c43 | https://github.com/uber/tchannel-python/blob/ee08cce6234f24fd2373774988186dd374306c43/tchannel/_queue.py#L107-L133 | train | 36,613 |
uber/tchannel-python | tchannel/_queue.py | Queue.get_nowait | def get_nowait(self):
"""Returns a value from the queue without waiting.
Raises ``QueueEmpty`` if no values are available right now.
"""
new_get = Future()
with self._lock:
if not self._get.done():
raise QueueEmpty
get, self._get = self._get, new_get
hole = get.result()
if not hole.done():
# Restore the unfinished hole.
new_get.set_result(hole)
raise QueueEmpty
node = hole.result()
value = node.value
new_hole, node.next = node.next, None
new_get.set_result(new_hole)
return value | python | def get_nowait(self):
"""Returns a value from the queue without waiting.
Raises ``QueueEmpty`` if no values are available right now.
"""
new_get = Future()
with self._lock:
if not self._get.done():
raise QueueEmpty
get, self._get = self._get, new_get
hole = get.result()
if not hole.done():
# Restore the unfinished hole.
new_get.set_result(hole)
raise QueueEmpty
node = hole.result()
value = node.value
new_hole, node.next = node.next, None
new_get.set_result(new_hole)
return value | [
"def",
"get_nowait",
"(",
"self",
")",
":",
"new_get",
"=",
"Future",
"(",
")",
"with",
"self",
".",
"_lock",
":",
"if",
"not",
"self",
".",
"_get",
".",
"done",
"(",
")",
":",
"raise",
"QueueEmpty",
"get",
",",
"self",
".",
"_get",
"=",
"self",
... | Returns a value from the queue without waiting.
Raises ``QueueEmpty`` if no values are available right now. | [
"Returns",
"a",
"value",
"from",
"the",
"queue",
"without",
"waiting",
"."
] | ee08cce6234f24fd2373774988186dd374306c43 | https://github.com/uber/tchannel-python/blob/ee08cce6234f24fd2373774988186dd374306c43/tchannel/_queue.py#L135-L157 | train | 36,614 |
uber/tchannel-python | tchannel/_queue.py | Queue.get | def get(self):
"""Gets the next item from the queue.
Returns a Future that resolves to the next item once it is available.
"""
io_loop = IOLoop.current()
new_get = Future()
with self._lock:
get, self._get = self._get, new_get
answer = Future()
def _on_node(future):
if future.exception(): # pragma: no cover (never happens)
return answer.set_exc_info(future.exc_info())
node = future.result()
value = node.value
new_hole, node.next = node.next, None
new_get.set_result(new_hole)
answer.set_result(value)
def _on_get(future):
if future.exception(): # pragma: no cover (never happens)
return answer.set_exc_info(future.exc_info())
hole = future.result()
io_loop.add_future(hole, _on_node)
io_loop.add_future(get, _on_get)
return answer | python | def get(self):
"""Gets the next item from the queue.
Returns a Future that resolves to the next item once it is available.
"""
io_loop = IOLoop.current()
new_get = Future()
with self._lock:
get, self._get = self._get, new_get
answer = Future()
def _on_node(future):
if future.exception(): # pragma: no cover (never happens)
return answer.set_exc_info(future.exc_info())
node = future.result()
value = node.value
new_hole, node.next = node.next, None
new_get.set_result(new_hole)
answer.set_result(value)
def _on_get(future):
if future.exception(): # pragma: no cover (never happens)
return answer.set_exc_info(future.exc_info())
hole = future.result()
io_loop.add_future(hole, _on_node)
io_loop.add_future(get, _on_get)
return answer | [
"def",
"get",
"(",
"self",
")",
":",
"io_loop",
"=",
"IOLoop",
".",
"current",
"(",
")",
"new_get",
"=",
"Future",
"(",
")",
"with",
"self",
".",
"_lock",
":",
"get",
",",
"self",
".",
"_get",
"=",
"self",
".",
"_get",
",",
"new_get",
"answer",
"... | Gets the next item from the queue.
Returns a Future that resolves to the next item once it is available. | [
"Gets",
"the",
"next",
"item",
"from",
"the",
"queue",
"."
] | ee08cce6234f24fd2373774988186dd374306c43 | https://github.com/uber/tchannel-python/blob/ee08cce6234f24fd2373774988186dd374306c43/tchannel/_queue.py#L159-L190 | train | 36,615 |
uber/tchannel-python | tchannel/messages/call_continue.py | CallContinueMessage.fragment | def fragment(self, space_left, fragment_msg):
"""Streaming Message got fragmented based on
payload size. All the data within space_left
will be kept. All the rest will be shifted to
next fragment message.
:param space_left:
space left for current frame
:param fragment_msg:
the type is either CallRequestMessage or
CallResponseMessage
:return: None if there is space left
or next fragment message
"""
new_args = []
key_length = 2 # 2bytes for size
for i, arg in enumerate(self.args):
if space_left >= key_length:
space_left -= key_length
if arg is not None:
arg_length = len(arg)
if space_left < arg_length:
fragment_msg.args.append(arg[space_left:])
new_args.append(arg[:space_left])
space_left = 0
else:
new_args.append(arg)
space_left -= arg_length
if space_left <= key_length:
# boundary for arg
fragment_msg.args.append("")
else:
new_args.append("")
else:
for l in range(i, len(self.args)):
fragment_msg.args.append(self.args[l])
break
self.args = new_args
if space_left >= 0 and len(fragment_msg.args) == 0:
# don't need to fragment any more
return None
else:
self.flags = FlagsType.fragment
fragment_msg.id = self.id
return fragment_msg | python | def fragment(self, space_left, fragment_msg):
"""Streaming Message got fragmented based on
payload size. All the data within space_left
will be kept. All the rest will be shifted to
next fragment message.
:param space_left:
space left for current frame
:param fragment_msg:
the type is either CallRequestMessage or
CallResponseMessage
:return: None if there is space left
or next fragment message
"""
new_args = []
key_length = 2 # 2bytes for size
for i, arg in enumerate(self.args):
if space_left >= key_length:
space_left -= key_length
if arg is not None:
arg_length = len(arg)
if space_left < arg_length:
fragment_msg.args.append(arg[space_left:])
new_args.append(arg[:space_left])
space_left = 0
else:
new_args.append(arg)
space_left -= arg_length
if space_left <= key_length:
# boundary for arg
fragment_msg.args.append("")
else:
new_args.append("")
else:
for l in range(i, len(self.args)):
fragment_msg.args.append(self.args[l])
break
self.args = new_args
if space_left >= 0 and len(fragment_msg.args) == 0:
# don't need to fragment any more
return None
else:
self.flags = FlagsType.fragment
fragment_msg.id = self.id
return fragment_msg | [
"def",
"fragment",
"(",
"self",
",",
"space_left",
",",
"fragment_msg",
")",
":",
"new_args",
"=",
"[",
"]",
"key_length",
"=",
"2",
"# 2bytes for size",
"for",
"i",
",",
"arg",
"in",
"enumerate",
"(",
"self",
".",
"args",
")",
":",
"if",
"space_left",
... | Streaming Message got fragmented based on
payload size. All the data within space_left
will be kept. All the rest will be shifted to
next fragment message.
:param space_left:
space left for current frame
:param fragment_msg:
the type is either CallRequestMessage or
CallResponseMessage
:return: None if there is space left
or next fragment message | [
"Streaming",
"Message",
"got",
"fragmented",
"based",
"on",
"payload",
"size",
".",
"All",
"the",
"data",
"within",
"space_left",
"will",
"be",
"kept",
".",
"All",
"the",
"rest",
"will",
"be",
"shifted",
"to",
"next",
"fragment",
"message",
"."
] | ee08cce6234f24fd2373774988186dd374306c43 | https://github.com/uber/tchannel-python/blob/ee08cce6234f24fd2373774988186dd374306c43/tchannel/messages/call_continue.py#L55-L101 | train | 36,616 |
uber/tchannel-python | tchannel/response.py | response_from_mixed | def response_from_mixed(mixed):
"""Create Response from mixed input."""
# if none then give empty Response
if mixed is None:
return Response()
# if not Response, then treat like body
if not isinstance(mixed, Response):
return Response(mixed)
# it's already a Response
return mixed | python | def response_from_mixed(mixed):
"""Create Response from mixed input."""
# if none then give empty Response
if mixed is None:
return Response()
# if not Response, then treat like body
if not isinstance(mixed, Response):
return Response(mixed)
# it's already a Response
return mixed | [
"def",
"response_from_mixed",
"(",
"mixed",
")",
":",
"# if none then give empty Response",
"if",
"mixed",
"is",
"None",
":",
"return",
"Response",
"(",
")",
"# if not Response, then treat like body",
"if",
"not",
"isinstance",
"(",
"mixed",
",",
"Response",
")",
":... | Create Response from mixed input. | [
"Create",
"Response",
"from",
"mixed",
"input",
"."
] | ee08cce6234f24fd2373774988186dd374306c43 | https://github.com/uber/tchannel-python/blob/ee08cce6234f24fd2373774988186dd374306c43/tchannel/response.py#L104-L116 | train | 36,617 |
uber/tchannel-python | tchannel/event.py | EventEmitter.register_hook | def register_hook(self, hook, event_type=None):
"""
If ``event_type`` is provided, then ``hook`` will be called whenever
that event is fired.
If no ``event_type`` is specifid, but ``hook`` implements any methods
with names matching an event hook, then those will be registered with
their corresponding events. This allows for more stateful, class-based
event handlers.
"""
if event_type is not None:
assert type(event_type) is int, "register hooks with int values"
return self.hooks[event_type].append(hook)
for event_type in EventType._fields:
func = getattr(hook, event_type, None)
if callable(func):
event_value = getattr(EventType, event_type)
self.register_hook(func, event_value) | python | def register_hook(self, hook, event_type=None):
"""
If ``event_type`` is provided, then ``hook`` will be called whenever
that event is fired.
If no ``event_type`` is specifid, but ``hook`` implements any methods
with names matching an event hook, then those will be registered with
their corresponding events. This allows for more stateful, class-based
event handlers.
"""
if event_type is not None:
assert type(event_type) is int, "register hooks with int values"
return self.hooks[event_type].append(hook)
for event_type in EventType._fields:
func = getattr(hook, event_type, None)
if callable(func):
event_value = getattr(EventType, event_type)
self.register_hook(func, event_value) | [
"def",
"register_hook",
"(",
"self",
",",
"hook",
",",
"event_type",
"=",
"None",
")",
":",
"if",
"event_type",
"is",
"not",
"None",
":",
"assert",
"type",
"(",
"event_type",
")",
"is",
"int",
",",
"\"register hooks with int values\"",
"return",
"self",
".",... | If ``event_type`` is provided, then ``hook`` will be called whenever
that event is fired.
If no ``event_type`` is specifid, but ``hook`` implements any methods
with names matching an event hook, then those will be registered with
their corresponding events. This allows for more stateful, class-based
event handlers. | [
"If",
"event_type",
"is",
"provided",
"then",
"hook",
"will",
"be",
"called",
"whenever",
"that",
"event",
"is",
"fired",
"."
] | ee08cce6234f24fd2373774988186dd374306c43 | https://github.com/uber/tchannel-python/blob/ee08cce6234f24fd2373774988186dd374306c43/tchannel/event.py#L133-L151 | train | 36,618 |
uber/tchannel-python | tchannel/container/heap.py | init | def init(h):
"""Initialize existing object into the heap."""
# heapify
n = h.size()
for i in six.moves.range(int(math.floor(n/2)) - 1, -1, -1):
down(h, i, n) | python | def init(h):
"""Initialize existing object into the heap."""
# heapify
n = h.size()
for i in six.moves.range(int(math.floor(n/2)) - 1, -1, -1):
down(h, i, n) | [
"def",
"init",
"(",
"h",
")",
":",
"# heapify",
"n",
"=",
"h",
".",
"size",
"(",
")",
"for",
"i",
"in",
"six",
".",
"moves",
".",
"range",
"(",
"int",
"(",
"math",
".",
"floor",
"(",
"n",
"/",
"2",
")",
")",
"-",
"1",
",",
"-",
"1",
",",
... | Initialize existing object into the heap. | [
"Initialize",
"existing",
"object",
"into",
"the",
"heap",
"."
] | ee08cce6234f24fd2373774988186dd374306c43 | https://github.com/uber/tchannel-python/blob/ee08cce6234f24fd2373774988186dd374306c43/tchannel/container/heap.py#L74-L79 | train | 36,619 |
uber/tchannel-python | tchannel/container/heap.py | push | def push(h, x):
"""Push a new value into heap."""
h.push(x)
up(h, h.size()-1) | python | def push(h, x):
"""Push a new value into heap."""
h.push(x)
up(h, h.size()-1) | [
"def",
"push",
"(",
"h",
",",
"x",
")",
":",
"h",
".",
"push",
"(",
"x",
")",
"up",
"(",
"h",
",",
"h",
".",
"size",
"(",
")",
"-",
"1",
")"
] | Push a new value into heap. | [
"Push",
"a",
"new",
"value",
"into",
"heap",
"."
] | ee08cce6234f24fd2373774988186dd374306c43 | https://github.com/uber/tchannel-python/blob/ee08cce6234f24fd2373774988186dd374306c43/tchannel/container/heap.py#L82-L85 | train | 36,620 |
uber/tchannel-python | tchannel/container/heap.py | pop | def pop(h):
"""Pop the heap value from the heap."""
n = h.size() - 1
h.swap(0, n)
down(h, 0, n)
return h.pop() | python | def pop(h):
"""Pop the heap value from the heap."""
n = h.size() - 1
h.swap(0, n)
down(h, 0, n)
return h.pop() | [
"def",
"pop",
"(",
"h",
")",
":",
"n",
"=",
"h",
".",
"size",
"(",
")",
"-",
"1",
"h",
".",
"swap",
"(",
"0",
",",
"n",
")",
"down",
"(",
"h",
",",
"0",
",",
"n",
")",
"return",
"h",
".",
"pop",
"(",
")"
] | Pop the heap value from the heap. | [
"Pop",
"the",
"heap",
"value",
"from",
"the",
"heap",
"."
] | ee08cce6234f24fd2373774988186dd374306c43 | https://github.com/uber/tchannel-python/blob/ee08cce6234f24fd2373774988186dd374306c43/tchannel/container/heap.py#L88-L93 | train | 36,621 |
uber/tchannel-python | tchannel/container/heap.py | remove | def remove(h, i):
"""Remove the item at position i of the heap."""
n = h.size() - 1
if n != i:
h.swap(i, n)
down(h, i, n)
up(h, i)
return h.pop() | python | def remove(h, i):
"""Remove the item at position i of the heap."""
n = h.size() - 1
if n != i:
h.swap(i, n)
down(h, i, n)
up(h, i)
return h.pop() | [
"def",
"remove",
"(",
"h",
",",
"i",
")",
":",
"n",
"=",
"h",
".",
"size",
"(",
")",
"-",
"1",
"if",
"n",
"!=",
"i",
":",
"h",
".",
"swap",
"(",
"i",
",",
"n",
")",
"down",
"(",
"h",
",",
"i",
",",
"n",
")",
"up",
"(",
"h",
",",
"i"... | Remove the item at position i of the heap. | [
"Remove",
"the",
"item",
"at",
"position",
"i",
"of",
"the",
"heap",
"."
] | ee08cce6234f24fd2373774988186dd374306c43 | https://github.com/uber/tchannel-python/blob/ee08cce6234f24fd2373774988186dd374306c43/tchannel/container/heap.py#L96-L104 | train | 36,622 |
uber/tchannel-python | tchannel/container/heap.py | fix | def fix(h, i):
"""Rearrange the heap after the item at position i got updated."""
down(h, i, h.size())
up(h, i) | python | def fix(h, i):
"""Rearrange the heap after the item at position i got updated."""
down(h, i, h.size())
up(h, i) | [
"def",
"fix",
"(",
"h",
",",
"i",
")",
":",
"down",
"(",
"h",
",",
"i",
",",
"h",
".",
"size",
"(",
")",
")",
"up",
"(",
"h",
",",
"i",
")"
] | Rearrange the heap after the item at position i got updated. | [
"Rearrange",
"the",
"heap",
"after",
"the",
"item",
"at",
"position",
"i",
"got",
"updated",
"."
] | ee08cce6234f24fd2373774988186dd374306c43 | https://github.com/uber/tchannel-python/blob/ee08cce6234f24fd2373774988186dd374306c43/tchannel/container/heap.py#L107-L110 | train | 36,623 |
uber/tchannel-python | tchannel/container/heap.py | smallest | def smallest(heap, predicate):
"""Finds the index of the smallest item in the heap that matches the given
predicate.
:param heap:
Heap on which this search is being performed.
:param predicate:
Function that accepts an item from the heap and returns true or false.
:returns:
Index of the first item for which ``predicate`` returned true.
:raises NoMatchError:
If no matching items were found.
"""
n = heap.size()
# items contains indexes of items yet to be checked.
items = deque([0])
while items:
current = items.popleft()
if current >= n:
continue
if predicate(heap.peek(current)):
return current
child1 = 2 * current + 1
child2 = child1 + 1
if child1 < n and child2 < n and heap.lt(child2, child1):
# make sure we check the smaller child first.
child1, child2 = child2, child1
if child1 < n:
items.append(child1)
if child2 < n:
items.append(child2)
raise NoMatchError() | python | def smallest(heap, predicate):
"""Finds the index of the smallest item in the heap that matches the given
predicate.
:param heap:
Heap on which this search is being performed.
:param predicate:
Function that accepts an item from the heap and returns true or false.
:returns:
Index of the first item for which ``predicate`` returned true.
:raises NoMatchError:
If no matching items were found.
"""
n = heap.size()
# items contains indexes of items yet to be checked.
items = deque([0])
while items:
current = items.popleft()
if current >= n:
continue
if predicate(heap.peek(current)):
return current
child1 = 2 * current + 1
child2 = child1 + 1
if child1 < n and child2 < n and heap.lt(child2, child1):
# make sure we check the smaller child first.
child1, child2 = child2, child1
if child1 < n:
items.append(child1)
if child2 < n:
items.append(child2)
raise NoMatchError() | [
"def",
"smallest",
"(",
"heap",
",",
"predicate",
")",
":",
"n",
"=",
"heap",
".",
"size",
"(",
")",
"# items contains indexes of items yet to be checked.",
"items",
"=",
"deque",
"(",
"[",
"0",
"]",
")",
"while",
"items",
":",
"current",
"=",
"items",
"."... | Finds the index of the smallest item in the heap that matches the given
predicate.
:param heap:
Heap on which this search is being performed.
:param predicate:
Function that accepts an item from the heap and returns true or false.
:returns:
Index of the first item for which ``predicate`` returned true.
:raises NoMatchError:
If no matching items were found. | [
"Finds",
"the",
"index",
"of",
"the",
"smallest",
"item",
"in",
"the",
"heap",
"that",
"matches",
"the",
"given",
"predicate",
"."
] | ee08cce6234f24fd2373774988186dd374306c43 | https://github.com/uber/tchannel-python/blob/ee08cce6234f24fd2373774988186dd374306c43/tchannel/container/heap.py#L141-L179 | train | 36,624 |
uber/tchannel-python | tchannel/tracing.py | ServerTracer.start_basic_span | def start_basic_span(self, request):
"""
Start tracing span from the protocol's `tracing` fields.
This will only work if the `tracer` supports Zipkin-style span context.
:param request: inbound request
:type request: tchannel.tornado.request.Request
"""
# noinspection PyBroadException
try:
# Currently Java does not populate Tracing field, so do not
# mistaken it for a real trace ID.
if request.tracing.trace_id:
context = self.tracer.extract(
format=ZIPKIN_SPAN_FORMAT,
carrier=request.tracing)
self.span = self.tracer.start_span(
operation_name=request.endpoint,
child_of=context,
tags={tags.SPAN_KIND: tags.SPAN_KIND_RPC_SERVER},
)
except opentracing.UnsupportedFormatException:
pass # tracer might not support Zipkin format
except:
log.exception('Cannot extract tracing span from Trace field') | python | def start_basic_span(self, request):
"""
Start tracing span from the protocol's `tracing` fields.
This will only work if the `tracer` supports Zipkin-style span context.
:param request: inbound request
:type request: tchannel.tornado.request.Request
"""
# noinspection PyBroadException
try:
# Currently Java does not populate Tracing field, so do not
# mistaken it for a real trace ID.
if request.tracing.trace_id:
context = self.tracer.extract(
format=ZIPKIN_SPAN_FORMAT,
carrier=request.tracing)
self.span = self.tracer.start_span(
operation_name=request.endpoint,
child_of=context,
tags={tags.SPAN_KIND: tags.SPAN_KIND_RPC_SERVER},
)
except opentracing.UnsupportedFormatException:
pass # tracer might not support Zipkin format
except:
log.exception('Cannot extract tracing span from Trace field') | [
"def",
"start_basic_span",
"(",
"self",
",",
"request",
")",
":",
"# noinspection PyBroadException",
"try",
":",
"# Currently Java does not populate Tracing field, so do not",
"# mistaken it for a real trace ID.",
"if",
"request",
".",
"tracing",
".",
"trace_id",
":",
"contex... | Start tracing span from the protocol's `tracing` fields.
This will only work if the `tracer` supports Zipkin-style span context.
:param request: inbound request
:type request: tchannel.tornado.request.Request | [
"Start",
"tracing",
"span",
"from",
"the",
"protocol",
"s",
"tracing",
"fields",
".",
"This",
"will",
"only",
"work",
"if",
"the",
"tracer",
"supports",
"Zipkin",
"-",
"style",
"span",
"context",
"."
] | ee08cce6234f24fd2373774988186dd374306c43 | https://github.com/uber/tchannel-python/blob/ee08cce6234f24fd2373774988186dd374306c43/tchannel/tracing.py#L111-L135 | train | 36,625 |
uber/tchannel-python | tchannel/tracing.py | ServerTracer.start_span | def start_span(self, request, headers, peer_host, peer_port):
"""
Start a new server-side span. If the span has already been started
by `start_basic_span`, this method only adds baggage from the headers.
:param request: inbound tchannel.tornado.request.Request
:param headers: dictionary containing parsed application headers
:return:
"""
parent_context = None
# noinspection PyBroadException
try:
if headers and hasattr(headers, 'iteritems'):
tracing_headers = {
k[len(TRACING_KEY_PREFIX):]: v
for k, v in headers.iteritems()
if k.startswith(TRACING_KEY_PREFIX)
}
parent_context = self.tracer.extract(
format=opentracing.Format.TEXT_MAP,
carrier=tracing_headers
)
if self.span and parent_context:
# we already started a span from Tracing fields,
# so only copy baggage from the headers.
for k, v in parent_context.baggage.iteritems():
self.span.set_baggage_item(k, v)
except:
log.exception('Cannot extract tracing span from headers')
if self.span is None:
self.span = self.tracer.start_span(
operation_name=request.endpoint,
child_of=parent_context,
tags={tags.SPAN_KIND: tags.SPAN_KIND_RPC_SERVER},
)
if 'cn' in request.headers:
self.span.set_tag(tags.PEER_SERVICE, request.headers['cn'])
if peer_host:
self.span.set_tag(tags.PEER_HOST_IPV4, peer_host)
if peer_port:
self.span.set_tag(tags.PEER_PORT, peer_port)
if 'as' in request.headers:
self.span.set_tag('as', request.headers['as'])
return self.span | python | def start_span(self, request, headers, peer_host, peer_port):
"""
Start a new server-side span. If the span has already been started
by `start_basic_span`, this method only adds baggage from the headers.
:param request: inbound tchannel.tornado.request.Request
:param headers: dictionary containing parsed application headers
:return:
"""
parent_context = None
# noinspection PyBroadException
try:
if headers and hasattr(headers, 'iteritems'):
tracing_headers = {
k[len(TRACING_KEY_PREFIX):]: v
for k, v in headers.iteritems()
if k.startswith(TRACING_KEY_PREFIX)
}
parent_context = self.tracer.extract(
format=opentracing.Format.TEXT_MAP,
carrier=tracing_headers
)
if self.span and parent_context:
# we already started a span from Tracing fields,
# so only copy baggage from the headers.
for k, v in parent_context.baggage.iteritems():
self.span.set_baggage_item(k, v)
except:
log.exception('Cannot extract tracing span from headers')
if self.span is None:
self.span = self.tracer.start_span(
operation_name=request.endpoint,
child_of=parent_context,
tags={tags.SPAN_KIND: tags.SPAN_KIND_RPC_SERVER},
)
if 'cn' in request.headers:
self.span.set_tag(tags.PEER_SERVICE, request.headers['cn'])
if peer_host:
self.span.set_tag(tags.PEER_HOST_IPV4, peer_host)
if peer_port:
self.span.set_tag(tags.PEER_PORT, peer_port)
if 'as' in request.headers:
self.span.set_tag('as', request.headers['as'])
return self.span | [
"def",
"start_span",
"(",
"self",
",",
"request",
",",
"headers",
",",
"peer_host",
",",
"peer_port",
")",
":",
"parent_context",
"=",
"None",
"# noinspection PyBroadException",
"try",
":",
"if",
"headers",
"and",
"hasattr",
"(",
"headers",
",",
"'iteritems'",
... | Start a new server-side span. If the span has already been started
by `start_basic_span`, this method only adds baggage from the headers.
:param request: inbound tchannel.tornado.request.Request
:param headers: dictionary containing parsed application headers
:return: | [
"Start",
"a",
"new",
"server",
"-",
"side",
"span",
".",
"If",
"the",
"span",
"has",
"already",
"been",
"started",
"by",
"start_basic_span",
"this",
"method",
"only",
"adds",
"baggage",
"from",
"the",
"headers",
"."
] | ee08cce6234f24fd2373774988186dd374306c43 | https://github.com/uber/tchannel-python/blob/ee08cce6234f24fd2373774988186dd374306c43/tchannel/tracing.py#L137-L180 | train | 36,626 |
uber/tchannel-python | tchannel/errors.py | TChannelError.from_code | def from_code(cls, code, **kw):
"""Construct a ``TChannelError`` instance from an error code.
This will return the appropriate class type for the given code.
"""
return {
TIMEOUT: TimeoutError,
CANCELED: CanceledError,
BUSY: BusyError,
DECLINED: DeclinedError,
UNEXPECTED_ERROR: UnexpectedError,
BAD_REQUEST: BadRequestError,
NETWORK_ERROR: NetworkError,
UNHEALTHY: UnhealthyError,
FATAL: FatalProtocolError,
}[code](**kw) | python | def from_code(cls, code, **kw):
"""Construct a ``TChannelError`` instance from an error code.
This will return the appropriate class type for the given code.
"""
return {
TIMEOUT: TimeoutError,
CANCELED: CanceledError,
BUSY: BusyError,
DECLINED: DeclinedError,
UNEXPECTED_ERROR: UnexpectedError,
BAD_REQUEST: BadRequestError,
NETWORK_ERROR: NetworkError,
UNHEALTHY: UnhealthyError,
FATAL: FatalProtocolError,
}[code](**kw) | [
"def",
"from_code",
"(",
"cls",
",",
"code",
",",
"*",
"*",
"kw",
")",
":",
"return",
"{",
"TIMEOUT",
":",
"TimeoutError",
",",
"CANCELED",
":",
"CanceledError",
",",
"BUSY",
":",
"BusyError",
",",
"DECLINED",
":",
"DeclinedError",
",",
"UNEXPECTED_ERROR",... | Construct a ``TChannelError`` instance from an error code.
This will return the appropriate class type for the given code. | [
"Construct",
"a",
"TChannelError",
"instance",
"from",
"an",
"error",
"code",
"."
] | ee08cce6234f24fd2373774988186dd374306c43 | https://github.com/uber/tchannel-python/blob/ee08cce6234f24fd2373774988186dd374306c43/tchannel/errors.py#L84-L99 | train | 36,627 |
uber/tchannel-python | tchannel/thrift/server.py | register | def register(dispatcher, service_module, handler, method=None, service=None):
"""Registers a Thrift service method with the given RequestDispatcher.
.. code-block:: python
# For,
#
# service HelloWorld { string hello(1: string name); }
import tchannel.thrift
import HelloWorld
def hello(request, response):
name = request.args.name
response.write_result("Hello, %s" % name)
dispatcher = RequestDispatcher()
tchannel.thrift.register(dispatcher, HelloWorld, hello)
:param dispatcher:
TChannel dispatcher with which the Thrift service will be registered.
:param service_module:
The service module generated by Thrift. This module contains the
service ``Iface``, ``Client``, ``Processor``, etc. classes.
:param handler:
A function implementing the request handler. The function must accept
a ``request``, a ``response``, and a ``tchannel``.
:param service:
Thrift service name. This is the `service` name specified in the
Thrift IDL. If omitted, it is automatically determined based on the
name of ``service_module``.
:param method:
Name of the method. Defaults to the name of the ``handler`` function.
"""
if not service:
service = service_module.__name__.rsplit('.', 1)[-1]
if not method:
method = handler.__name__
assert service, 'A service name could not be determined'
assert method, 'A method name could not be determined'
assert hasattr(service_module.Iface, method), (
"Service %s doesn't define method %s" % (service, method)
)
assert hasattr(service_module, method + '_result'), (
"oneway methods are not yet supported"
)
endpoint = '%s::%s' % (service, method)
args_type = getattr(service_module, method + '_args')
result_type = getattr(service_module, method + '_result')
# if the dispatcher is set to deal with handlers that
# return responses, then use new api, else use deprecated
if dispatcher._handler_returns_response:
new_handler = build_handler(result_type, handler)
else:
new_handler = deprecated_build_handler(result_type, handler)
dispatcher.register(
endpoint,
new_handler,
ThriftSerializer(args_type),
ThriftSerializer(result_type)
)
return handler | python | def register(dispatcher, service_module, handler, method=None, service=None):
"""Registers a Thrift service method with the given RequestDispatcher.
.. code-block:: python
# For,
#
# service HelloWorld { string hello(1: string name); }
import tchannel.thrift
import HelloWorld
def hello(request, response):
name = request.args.name
response.write_result("Hello, %s" % name)
dispatcher = RequestDispatcher()
tchannel.thrift.register(dispatcher, HelloWorld, hello)
:param dispatcher:
TChannel dispatcher with which the Thrift service will be registered.
:param service_module:
The service module generated by Thrift. This module contains the
service ``Iface``, ``Client``, ``Processor``, etc. classes.
:param handler:
A function implementing the request handler. The function must accept
a ``request``, a ``response``, and a ``tchannel``.
:param service:
Thrift service name. This is the `service` name specified in the
Thrift IDL. If omitted, it is automatically determined based on the
name of ``service_module``.
:param method:
Name of the method. Defaults to the name of the ``handler`` function.
"""
if not service:
service = service_module.__name__.rsplit('.', 1)[-1]
if not method:
method = handler.__name__
assert service, 'A service name could not be determined'
assert method, 'A method name could not be determined'
assert hasattr(service_module.Iface, method), (
"Service %s doesn't define method %s" % (service, method)
)
assert hasattr(service_module, method + '_result'), (
"oneway methods are not yet supported"
)
endpoint = '%s::%s' % (service, method)
args_type = getattr(service_module, method + '_args')
result_type = getattr(service_module, method + '_result')
# if the dispatcher is set to deal with handlers that
# return responses, then use new api, else use deprecated
if dispatcher._handler_returns_response:
new_handler = build_handler(result_type, handler)
else:
new_handler = deprecated_build_handler(result_type, handler)
dispatcher.register(
endpoint,
new_handler,
ThriftSerializer(args_type),
ThriftSerializer(result_type)
)
return handler | [
"def",
"register",
"(",
"dispatcher",
",",
"service_module",
",",
"handler",
",",
"method",
"=",
"None",
",",
"service",
"=",
"None",
")",
":",
"if",
"not",
"service",
":",
"service",
"=",
"service_module",
".",
"__name__",
".",
"rsplit",
"(",
"'.'",
","... | Registers a Thrift service method with the given RequestDispatcher.
.. code-block:: python
# For,
#
# service HelloWorld { string hello(1: string name); }
import tchannel.thrift
import HelloWorld
def hello(request, response):
name = request.args.name
response.write_result("Hello, %s" % name)
dispatcher = RequestDispatcher()
tchannel.thrift.register(dispatcher, HelloWorld, hello)
:param dispatcher:
TChannel dispatcher with which the Thrift service will be registered.
:param service_module:
The service module generated by Thrift. This module contains the
service ``Iface``, ``Client``, ``Processor``, etc. classes.
:param handler:
A function implementing the request handler. The function must accept
a ``request``, a ``response``, and a ``tchannel``.
:param service:
Thrift service name. This is the `service` name specified in the
Thrift IDL. If omitted, it is automatically determined based on the
name of ``service_module``.
:param method:
Name of the method. Defaults to the name of the ``handler`` function. | [
"Registers",
"a",
"Thrift",
"service",
"method",
"with",
"the",
"given",
"RequestDispatcher",
"."
] | ee08cce6234f24fd2373774988186dd374306c43 | https://github.com/uber/tchannel-python/blob/ee08cce6234f24fd2373774988186dd374306c43/tchannel/thrift/server.py#L35-L100 | train | 36,628 |
uber/tchannel-python | tchannel/thrift/server.py | ThriftResponse.write_result | def write_result(self, result):
"""Send back the result of this call.
Only one of this and `write_exc_info` may be called.
:param result:
Return value of the call
"""
assert not self.finished, "Already sent a response"
if not self.result.thrift_spec:
self.finished = True
return
spec = self.result.thrift_spec[0]
if result is not None:
assert spec, "Tried to return a result for a void method."
setattr(self.result, spec[2], result)
self.finished = True | python | def write_result(self, result):
"""Send back the result of this call.
Only one of this and `write_exc_info` may be called.
:param result:
Return value of the call
"""
assert not self.finished, "Already sent a response"
if not self.result.thrift_spec:
self.finished = True
return
spec = self.result.thrift_spec[0]
if result is not None:
assert spec, "Tried to return a result for a void method."
setattr(self.result, spec[2], result)
self.finished = True | [
"def",
"write_result",
"(",
"self",
",",
"result",
")",
":",
"assert",
"not",
"self",
".",
"finished",
",",
"\"Already sent a response\"",
"if",
"not",
"self",
".",
"result",
".",
"thrift_spec",
":",
"self",
".",
"finished",
"=",
"True",
"return",
"spec",
... | Send back the result of this call.
Only one of this and `write_exc_info` may be called.
:param result:
Return value of the call | [
"Send",
"back",
"the",
"result",
"of",
"this",
"call",
"."
] | ee08cce6234f24fd2373774988186dd374306c43 | https://github.com/uber/tchannel-python/blob/ee08cce6234f24fd2373774988186dd374306c43/tchannel/thrift/server.py#L206-L225 | train | 36,629 |
uber/tchannel-python | tchannel/thrift/server.py | ThriftResponse.write_exc_info | def write_exc_info(self, exc_info=None):
"""Write exception information to the response.
Only one of this and ``write_result`` may be called.
:param exc_info:
3-tuple of exception information. If omitted, the last exception
will be retrieved using ``sys.exc_info()``.
"""
exc_info = exc_info or sys.exc_info()
exc = exc_info[1]
self.code = StatusCode.error
for spec in self.result.thrift_spec[1:]:
if spec and isinstance(exc, spec[3][0]):
assert not self.finished, "Already sent a response"
setattr(self.result, spec[2], exc)
self.finished = True
return
# Re-raise the exception (with the same traceback) if it didn't match.
raise exc_info[0], exc_info[1], exc_info[2] | python | def write_exc_info(self, exc_info=None):
"""Write exception information to the response.
Only one of this and ``write_result`` may be called.
:param exc_info:
3-tuple of exception information. If omitted, the last exception
will be retrieved using ``sys.exc_info()``.
"""
exc_info = exc_info or sys.exc_info()
exc = exc_info[1]
self.code = StatusCode.error
for spec in self.result.thrift_spec[1:]:
if spec and isinstance(exc, spec[3][0]):
assert not self.finished, "Already sent a response"
setattr(self.result, spec[2], exc)
self.finished = True
return
# Re-raise the exception (with the same traceback) if it didn't match.
raise exc_info[0], exc_info[1], exc_info[2] | [
"def",
"write_exc_info",
"(",
"self",
",",
"exc_info",
"=",
"None",
")",
":",
"exc_info",
"=",
"exc_info",
"or",
"sys",
".",
"exc_info",
"(",
")",
"exc",
"=",
"exc_info",
"[",
"1",
"]",
"self",
".",
"code",
"=",
"StatusCode",
".",
"error",
"for",
"sp... | Write exception information to the response.
Only one of this and ``write_result`` may be called.
:param exc_info:
3-tuple of exception information. If omitted, the last exception
will be retrieved using ``sys.exc_info()``. | [
"Write",
"exception",
"information",
"to",
"the",
"response",
"."
] | ee08cce6234f24fd2373774988186dd374306c43 | https://github.com/uber/tchannel-python/blob/ee08cce6234f24fd2373774988186dd374306c43/tchannel/thrift/server.py#L227-L248 | train | 36,630 |
uber/tchannel-python | tchannel/tchannel.py | TChannel.call | def call(
self,
scheme,
service,
arg1,
arg2=None,
arg3=None,
timeout=None,
retry_on=None,
retry_limit=None,
routing_delegate=None,
hostport=None,
shard_key=None,
tracing_span=None,
trace=None, # to trace or not, defaults to self._dep_tchannel.trace
caller_name=None,
):
"""Make low-level requests to TChannel services.
**Note:** Usually you would interact with a higher-level arg scheme
like :py:class:`tchannel.schemes.JsonArgScheme` or
:py:class:`tchannel.schemes.ThriftArgScheme`.
"""
# TODO - don't use asserts for public API
assert format, "format is required"
assert service, "service is required"
assert arg1, "arg1 is required"
# default args
if arg2 is None:
arg2 = ""
if arg3 is None:
arg3 = ""
if timeout is None:
timeout = DEFAULT_TIMEOUT
if retry_on is None:
retry_on = retry.DEFAULT
if retry_limit is None:
retry_limit = retry.DEFAULT_RETRY_LIMIT
# TODO - allow filters/steps for serialization, tracing, etc...
tracing.apply_trace_flag(tracing_span, trace, self._dep_tchannel.trace)
# calls tchannel.tornado.peer.PeerClientOperation.__init__
operation = self._dep_tchannel.request(
service=service,
hostport=hostport,
arg_scheme=scheme,
retry=retry_on,
tracing_span=tracing_span
)
# fire operation
transport_headers = {
transport.SCHEME: scheme,
transport.CALLER_NAME: caller_name or self.name,
}
if shard_key:
transport_headers[transport.SHARD_KEY] = shard_key
if routing_delegate:
transport_headers[transport.ROUTING_DELEGATE] = routing_delegate
response = yield operation.send(
arg1=arg1,
arg2=arg2,
arg3=arg3,
headers=transport_headers,
retry_limit=retry_limit,
ttl=timeout,
)
# unwrap response
body = yield response.get_body()
headers = yield response.get_header()
t = TransportHeaders.from_dict(response.headers)
result = Response(
body=body,
headers=headers,
transport=t,
status=response.code,
)
raise gen.Return(result) | python | def call(
self,
scheme,
service,
arg1,
arg2=None,
arg3=None,
timeout=None,
retry_on=None,
retry_limit=None,
routing_delegate=None,
hostport=None,
shard_key=None,
tracing_span=None,
trace=None, # to trace or not, defaults to self._dep_tchannel.trace
caller_name=None,
):
"""Make low-level requests to TChannel services.
**Note:** Usually you would interact with a higher-level arg scheme
like :py:class:`tchannel.schemes.JsonArgScheme` or
:py:class:`tchannel.schemes.ThriftArgScheme`.
"""
# TODO - don't use asserts for public API
assert format, "format is required"
assert service, "service is required"
assert arg1, "arg1 is required"
# default args
if arg2 is None:
arg2 = ""
if arg3 is None:
arg3 = ""
if timeout is None:
timeout = DEFAULT_TIMEOUT
if retry_on is None:
retry_on = retry.DEFAULT
if retry_limit is None:
retry_limit = retry.DEFAULT_RETRY_LIMIT
# TODO - allow filters/steps for serialization, tracing, etc...
tracing.apply_trace_flag(tracing_span, trace, self._dep_tchannel.trace)
# calls tchannel.tornado.peer.PeerClientOperation.__init__
operation = self._dep_tchannel.request(
service=service,
hostport=hostport,
arg_scheme=scheme,
retry=retry_on,
tracing_span=tracing_span
)
# fire operation
transport_headers = {
transport.SCHEME: scheme,
transport.CALLER_NAME: caller_name or self.name,
}
if shard_key:
transport_headers[transport.SHARD_KEY] = shard_key
if routing_delegate:
transport_headers[transport.ROUTING_DELEGATE] = routing_delegate
response = yield operation.send(
arg1=arg1,
arg2=arg2,
arg3=arg3,
headers=transport_headers,
retry_limit=retry_limit,
ttl=timeout,
)
# unwrap response
body = yield response.get_body()
headers = yield response.get_header()
t = TransportHeaders.from_dict(response.headers)
result = Response(
body=body,
headers=headers,
transport=t,
status=response.code,
)
raise gen.Return(result) | [
"def",
"call",
"(",
"self",
",",
"scheme",
",",
"service",
",",
"arg1",
",",
"arg2",
"=",
"None",
",",
"arg3",
"=",
"None",
",",
"timeout",
"=",
"None",
",",
"retry_on",
"=",
"None",
",",
"retry_limit",
"=",
"None",
",",
"routing_delegate",
"=",
"Non... | Make low-level requests to TChannel services.
**Note:** Usually you would interact with a higher-level arg scheme
like :py:class:`tchannel.schemes.JsonArgScheme` or
:py:class:`tchannel.schemes.ThriftArgScheme`. | [
"Make",
"low",
"-",
"level",
"requests",
"to",
"TChannel",
"services",
"."
] | ee08cce6234f24fd2373774988186dd374306c43 | https://github.com/uber/tchannel-python/blob/ee08cce6234f24fd2373774988186dd374306c43/tchannel/tchannel.py#L151-L235 | train | 36,631 |
uber/tchannel-python | tchannel/thrift/module.py | thrift_request_builder | def thrift_request_builder(service, thrift_module, hostport=None,
thrift_class_name=None):
"""Provide TChannel compatibility with Thrift-generated modules.
The service this creates is meant to be used with TChannel like so:
.. code-block:: python
from tchannel import TChannel, thrift_request_builder
from some_other_service_thrift import some_other_service
tchannel = TChannel('my-service')
some_service = thrift_request_builder(
service='some-other-service',
thrift_module=some_other_service
)
resp = tchannel.thrift(
some_service.fetchPotatoes()
)
.. deprecated:: 0.18.0
Please switch to :py:func:`tchannel.thrift.load`.
.. warning::
This API is deprecated and will be removed in a future version.
:param string service:
Name of Thrift service to call. This is used internally for
grouping and stats, but also to route requests over Hyperbahn.
:param thrift_module:
The top-level module of the Apache Thrift generated code for
the service that will be called.
:param string hostport:
When calling the Thrift service directly, and not over Hyperbahn,
this 'host:port' value should be provided.
:param string thrift_class_name:
When the Apache Thrift generated Iface class name does not match
thrift_module, then this should be provided.
"""
# start with a request maker instance
maker = ThriftRequestMaker(
service=service,
thrift_module=thrift_module,
hostport=hostport,
thrift_class_name=thrift_class_name
)
# create methods that mirror thrift client
# and each return ThriftRequest
methods = _create_methods(thrift_module)
# then attach to instane
for name, method in methods.iteritems():
method = types.MethodType(method, maker, ThriftRequestMaker)
setattr(maker, name, method)
return maker | python | def thrift_request_builder(service, thrift_module, hostport=None,
thrift_class_name=None):
"""Provide TChannel compatibility with Thrift-generated modules.
The service this creates is meant to be used with TChannel like so:
.. code-block:: python
from tchannel import TChannel, thrift_request_builder
from some_other_service_thrift import some_other_service
tchannel = TChannel('my-service')
some_service = thrift_request_builder(
service='some-other-service',
thrift_module=some_other_service
)
resp = tchannel.thrift(
some_service.fetchPotatoes()
)
.. deprecated:: 0.18.0
Please switch to :py:func:`tchannel.thrift.load`.
.. warning::
This API is deprecated and will be removed in a future version.
:param string service:
Name of Thrift service to call. This is used internally for
grouping and stats, but also to route requests over Hyperbahn.
:param thrift_module:
The top-level module of the Apache Thrift generated code for
the service that will be called.
:param string hostport:
When calling the Thrift service directly, and not over Hyperbahn,
this 'host:port' value should be provided.
:param string thrift_class_name:
When the Apache Thrift generated Iface class name does not match
thrift_module, then this should be provided.
"""
# start with a request maker instance
maker = ThriftRequestMaker(
service=service,
thrift_module=thrift_module,
hostport=hostport,
thrift_class_name=thrift_class_name
)
# create methods that mirror thrift client
# and each return ThriftRequest
methods = _create_methods(thrift_module)
# then attach to instane
for name, method in methods.iteritems():
method = types.MethodType(method, maker, ThriftRequestMaker)
setattr(maker, name, method)
return maker | [
"def",
"thrift_request_builder",
"(",
"service",
",",
"thrift_module",
",",
"hostport",
"=",
"None",
",",
"thrift_class_name",
"=",
"None",
")",
":",
"# start with a request maker instance",
"maker",
"=",
"ThriftRequestMaker",
"(",
"service",
"=",
"service",
",",
"t... | Provide TChannel compatibility with Thrift-generated modules.
The service this creates is meant to be used with TChannel like so:
.. code-block:: python
from tchannel import TChannel, thrift_request_builder
from some_other_service_thrift import some_other_service
tchannel = TChannel('my-service')
some_service = thrift_request_builder(
service='some-other-service',
thrift_module=some_other_service
)
resp = tchannel.thrift(
some_service.fetchPotatoes()
)
.. deprecated:: 0.18.0
Please switch to :py:func:`tchannel.thrift.load`.
.. warning::
This API is deprecated and will be removed in a future version.
:param string service:
Name of Thrift service to call. This is used internally for
grouping and stats, but also to route requests over Hyperbahn.
:param thrift_module:
The top-level module of the Apache Thrift generated code for
the service that will be called.
:param string hostport:
When calling the Thrift service directly, and not over Hyperbahn,
this 'host:port' value should be provided.
:param string thrift_class_name:
When the Apache Thrift generated Iface class name does not match
thrift_module, then this should be provided. | [
"Provide",
"TChannel",
"compatibility",
"with",
"Thrift",
"-",
"generated",
"modules",
"."
] | ee08cce6234f24fd2373774988186dd374306c43 | https://github.com/uber/tchannel-python/blob/ee08cce6234f24fd2373774988186dd374306c43/tchannel/thrift/module.py#L40-L104 | train | 36,632 |
uber/tchannel-python | tchannel/thrift/module.py | ThriftRequest.read_body | def read_body(self, body):
"""Handles the response body for this request.
If the response body includes a result, returns the result unwrapped
from the response union. If the response contains an exception, raises
that exception.
"""
result_spec = self.result_type.thrift_spec
# raise application exception, if present
for exc_spec in result_spec[1:]:
exc = getattr(body, exc_spec[2])
if exc is not None:
raise exc
# success - non-void
if len(result_spec) >= 1 and result_spec[0] is not None:
# value expected, but got none
# TODO - server side should use this same logic
if body.success is None:
raise ValueExpectedError(
'Expected a value to be returned for %s, '
'but recieved None - only void procedures can '
'return None.' % self.endpoint
)
return body.success
# success - void
else:
return None | python | def read_body(self, body):
"""Handles the response body for this request.
If the response body includes a result, returns the result unwrapped
from the response union. If the response contains an exception, raises
that exception.
"""
result_spec = self.result_type.thrift_spec
# raise application exception, if present
for exc_spec in result_spec[1:]:
exc = getattr(body, exc_spec[2])
if exc is not None:
raise exc
# success - non-void
if len(result_spec) >= 1 and result_spec[0] is not None:
# value expected, but got none
# TODO - server side should use this same logic
if body.success is None:
raise ValueExpectedError(
'Expected a value to be returned for %s, '
'but recieved None - only void procedures can '
'return None.' % self.endpoint
)
return body.success
# success - void
else:
return None | [
"def",
"read_body",
"(",
"self",
",",
"body",
")",
":",
"result_spec",
"=",
"self",
".",
"result_type",
".",
"thrift_spec",
"# raise application exception, if present",
"for",
"exc_spec",
"in",
"result_spec",
"[",
"1",
":",
"]",
":",
"exc",
"=",
"getattr",
"("... | Handles the response body for this request.
If the response body includes a result, returns the result unwrapped
from the response union. If the response contains an exception, raises
that exception. | [
"Handles",
"the",
"response",
"body",
"for",
"this",
"request",
"."
] | ee08cce6234f24fd2373774988186dd374306c43 | https://github.com/uber/tchannel-python/blob/ee08cce6234f24fd2373774988186dd374306c43/tchannel/thrift/module.py#L209-L240 | train | 36,633 |
uber/tchannel-python | tchannel/peer_strategy.py | PreferIncomingCalculator.get_rank | def get_rank(self, peer):
"""Calculate the peer rank based on connections.
If the peer has no incoming connections, it will have largest rank.
In our peer selection strategy, the largest number has least priority
in the heap.
If the peer has incoming connections, we will return number of outbound
pending requests and responses.
:param peer: instance of `tchannel.tornado.peer.Peer`
:return: rank of the peer
"""
if not peer.connections:
return self.TIERS[0]
if not peer.has_incoming_connections:
return self.TIERS[1] + peer.total_outbound_pendings
return self.TIERS[2] + peer.total_outbound_pendings | python | def get_rank(self, peer):
"""Calculate the peer rank based on connections.
If the peer has no incoming connections, it will have largest rank.
In our peer selection strategy, the largest number has least priority
in the heap.
If the peer has incoming connections, we will return number of outbound
pending requests and responses.
:param peer: instance of `tchannel.tornado.peer.Peer`
:return: rank of the peer
"""
if not peer.connections:
return self.TIERS[0]
if not peer.has_incoming_connections:
return self.TIERS[1] + peer.total_outbound_pendings
return self.TIERS[2] + peer.total_outbound_pendings | [
"def",
"get_rank",
"(",
"self",
",",
"peer",
")",
":",
"if",
"not",
"peer",
".",
"connections",
":",
"return",
"self",
".",
"TIERS",
"[",
"0",
"]",
"if",
"not",
"peer",
".",
"has_incoming_connections",
":",
"return",
"self",
".",
"TIERS",
"[",
"1",
"... | Calculate the peer rank based on connections.
If the peer has no incoming connections, it will have largest rank.
In our peer selection strategy, the largest number has least priority
in the heap.
If the peer has incoming connections, we will return number of outbound
pending requests and responses.
:param peer: instance of `tchannel.tornado.peer.Peer`
:return: rank of the peer | [
"Calculate",
"the",
"peer",
"rank",
"based",
"on",
"connections",
"."
] | ee08cce6234f24fd2373774988186dd374306c43 | https://github.com/uber/tchannel-python/blob/ee08cce6234f24fd2373774988186dd374306c43/tchannel/peer_strategy.py#L41-L60 | train | 36,634 |
uber/tchannel-python | tchannel/singleton.py | TChannel.prepare | def prepare(cls, *args, **kwargs):
"""Set arguments to be used when instantiating a TChannel instance.
Arguments are the same as :py:meth:`tchannel.TChannel.__init__`.
"""
cls.args = args
cls.kwargs = kwargs
cls.prepared = True | python | def prepare(cls, *args, **kwargs):
"""Set arguments to be used when instantiating a TChannel instance.
Arguments are the same as :py:meth:`tchannel.TChannel.__init__`.
"""
cls.args = args
cls.kwargs = kwargs
cls.prepared = True | [
"def",
"prepare",
"(",
"cls",
",",
"*",
"args",
",",
"*",
"*",
"kwargs",
")",
":",
"cls",
".",
"args",
"=",
"args",
"cls",
".",
"kwargs",
"=",
"kwargs",
"cls",
".",
"prepared",
"=",
"True"
] | Set arguments to be used when instantiating a TChannel instance.
Arguments are the same as :py:meth:`tchannel.TChannel.__init__`. | [
"Set",
"arguments",
"to",
"be",
"used",
"when",
"instantiating",
"a",
"TChannel",
"instance",
"."
] | ee08cce6234f24fd2373774988186dd374306c43 | https://github.com/uber/tchannel-python/blob/ee08cce6234f24fd2373774988186dd374306c43/tchannel/singleton.py#L44-L51 | train | 36,635 |
uber/tchannel-python | tchannel/singleton.py | TChannel.reset | def reset(cls, *args, **kwargs):
"""Undo call to prepare, useful for testing."""
cls.local.tchannel = None
cls.args = None
cls.kwargs = None
cls.prepared = False | python | def reset(cls, *args, **kwargs):
"""Undo call to prepare, useful for testing."""
cls.local.tchannel = None
cls.args = None
cls.kwargs = None
cls.prepared = False | [
"def",
"reset",
"(",
"cls",
",",
"*",
"args",
",",
"*",
"*",
"kwargs",
")",
":",
"cls",
".",
"local",
".",
"tchannel",
"=",
"None",
"cls",
".",
"args",
"=",
"None",
"cls",
".",
"kwargs",
"=",
"None",
"cls",
".",
"prepared",
"=",
"False"
] | Undo call to prepare, useful for testing. | [
"Undo",
"call",
"to",
"prepare",
"useful",
"for",
"testing",
"."
] | ee08cce6234f24fd2373774988186dd374306c43 | https://github.com/uber/tchannel-python/blob/ee08cce6234f24fd2373774988186dd374306c43/tchannel/singleton.py#L54-L59 | train | 36,636 |
uber/tchannel-python | tchannel/singleton.py | TChannel.get_instance | def get_instance(cls):
"""Get a configured, thread-safe, singleton TChannel instance.
:returns tchannel.TChannel:
"""
if not cls.prepared:
raise SingletonNotPreparedError(
"prepare must be called before get_instance"
)
if hasattr(cls.local, 'tchannel') and cls.local.tchannel is not None:
return cls.local.tchannel
cls.local.tchannel = cls.tchannel_cls(*cls.args, **cls.kwargs)
return cls.local.tchannel | python | def get_instance(cls):
"""Get a configured, thread-safe, singleton TChannel instance.
:returns tchannel.TChannel:
"""
if not cls.prepared:
raise SingletonNotPreparedError(
"prepare must be called before get_instance"
)
if hasattr(cls.local, 'tchannel') and cls.local.tchannel is not None:
return cls.local.tchannel
cls.local.tchannel = cls.tchannel_cls(*cls.args, **cls.kwargs)
return cls.local.tchannel | [
"def",
"get_instance",
"(",
"cls",
")",
":",
"if",
"not",
"cls",
".",
"prepared",
":",
"raise",
"SingletonNotPreparedError",
"(",
"\"prepare must be called before get_instance\"",
")",
"if",
"hasattr",
"(",
"cls",
".",
"local",
",",
"'tchannel'",
")",
"and",
"cl... | Get a configured, thread-safe, singleton TChannel instance.
:returns tchannel.TChannel: | [
"Get",
"a",
"configured",
"thread",
"-",
"safe",
"singleton",
"TChannel",
"instance",
"."
] | ee08cce6234f24fd2373774988186dd374306c43 | https://github.com/uber/tchannel-python/blob/ee08cce6234f24fd2373774988186dd374306c43/tchannel/singleton.py#L62-L77 | train | 36,637 |
uber/tchannel-python | tchannel/tornado/dispatch.py | RequestDispatcher.handle_pre_call | def handle_pre_call(self, message, connection):
"""Handle incoming request message including CallRequestMessage and
CallRequestContinueMessage
This method will build the User friendly request object based on the
incoming messages.
It passes all the messages into the message_factory to build the init
request object. Only when it get a CallRequestMessage and a completed
arg_1=argstream[0], the message_factory will return a request object.
Then it will trigger the async handle_call method.
:param message: CallRequestMessage or CallRequestContinueMessage
:param connection: tornado connection
"""
req = None
try:
req = connection.request_message_factory.build(message)
# message_factory will create Request only when it receives
# CallRequestMessage. It will return None, if it receives
# CallRequestContinueMessage.
if req:
self.handle_call(req, connection)
except TChannelError as e:
log.warn('Received a bad request.', exc_info=True)
if req:
e.tracing = req.tracing
connection.send_error(e) | python | def handle_pre_call(self, message, connection):
"""Handle incoming request message including CallRequestMessage and
CallRequestContinueMessage
This method will build the User friendly request object based on the
incoming messages.
It passes all the messages into the message_factory to build the init
request object. Only when it get a CallRequestMessage and a completed
arg_1=argstream[0], the message_factory will return a request object.
Then it will trigger the async handle_call method.
:param message: CallRequestMessage or CallRequestContinueMessage
:param connection: tornado connection
"""
req = None
try:
req = connection.request_message_factory.build(message)
# message_factory will create Request only when it receives
# CallRequestMessage. It will return None, if it receives
# CallRequestContinueMessage.
if req:
self.handle_call(req, connection)
except TChannelError as e:
log.warn('Received a bad request.', exc_info=True)
if req:
e.tracing = req.tracing
connection.send_error(e) | [
"def",
"handle_pre_call",
"(",
"self",
",",
"message",
",",
"connection",
")",
":",
"req",
"=",
"None",
"try",
":",
"req",
"=",
"connection",
".",
"request_message_factory",
".",
"build",
"(",
"message",
")",
"# message_factory will create Request only when it recei... | Handle incoming request message including CallRequestMessage and
CallRequestContinueMessage
This method will build the User friendly request object based on the
incoming messages.
It passes all the messages into the message_factory to build the init
request object. Only when it get a CallRequestMessage and a completed
arg_1=argstream[0], the message_factory will return a request object.
Then it will trigger the async handle_call method.
:param message: CallRequestMessage or CallRequestContinueMessage
:param connection: tornado connection | [
"Handle",
"incoming",
"request",
"message",
"including",
"CallRequestMessage",
"and",
"CallRequestContinueMessage"
] | ee08cce6234f24fd2373774988186dd374306c43 | https://github.com/uber/tchannel-python/blob/ee08cce6234f24fd2373774988186dd374306c43/tchannel/tornado/dispatch.py#L89-L117 | train | 36,638 |
uber/tchannel-python | tchannel/tornado/dispatch.py | RequestDispatcher.register | def register(
self,
rule,
handler,
req_serializer=None,
resp_serializer=None
):
"""Register a new endpoint with the given name.
.. code-block:: python
@dispatcher.register('is_healthy')
def check_health(request, response):
# ...
:param rule:
Name of the endpoint. Incoming Call Requests must have this as
``arg1`` to dispatch to this handler.
If ``RequestHandler.FALLBACK`` is specified as a rule, the given
handler will be used as the 'fallback' handler when requests don't
match any registered rules.
:param handler:
A function that gets called with ``Request`` and ``Response``.
:param req_serializer:
Arg scheme serializer of this endpoint. It should be
``RawSerializer``, ``JsonSerializer``, and ``ThriftSerializer``.
:param resp_serializer:
Arg scheme serializer of this endpoint. It should be
``RawSerializer``, ``JsonSerializer``, and ``ThriftSerializer``.
"""
assert handler, "handler must not be None"
req_serializer = req_serializer or RawSerializer()
resp_serializer = resp_serializer or RawSerializer()
self.handlers[rule] = Handler(handler, req_serializer, resp_serializer) | python | def register(
self,
rule,
handler,
req_serializer=None,
resp_serializer=None
):
"""Register a new endpoint with the given name.
.. code-block:: python
@dispatcher.register('is_healthy')
def check_health(request, response):
# ...
:param rule:
Name of the endpoint. Incoming Call Requests must have this as
``arg1`` to dispatch to this handler.
If ``RequestHandler.FALLBACK`` is specified as a rule, the given
handler will be used as the 'fallback' handler when requests don't
match any registered rules.
:param handler:
A function that gets called with ``Request`` and ``Response``.
:param req_serializer:
Arg scheme serializer of this endpoint. It should be
``RawSerializer``, ``JsonSerializer``, and ``ThriftSerializer``.
:param resp_serializer:
Arg scheme serializer of this endpoint. It should be
``RawSerializer``, ``JsonSerializer``, and ``ThriftSerializer``.
"""
assert handler, "handler must not be None"
req_serializer = req_serializer or RawSerializer()
resp_serializer = resp_serializer or RawSerializer()
self.handlers[rule] = Handler(handler, req_serializer, resp_serializer) | [
"def",
"register",
"(",
"self",
",",
"rule",
",",
"handler",
",",
"req_serializer",
"=",
"None",
",",
"resp_serializer",
"=",
"None",
")",
":",
"assert",
"handler",
",",
"\"handler must not be None\"",
"req_serializer",
"=",
"req_serializer",
"or",
"RawSerializer"... | Register a new endpoint with the given name.
.. code-block:: python
@dispatcher.register('is_healthy')
def check_health(request, response):
# ...
:param rule:
Name of the endpoint. Incoming Call Requests must have this as
``arg1`` to dispatch to this handler.
If ``RequestHandler.FALLBACK`` is specified as a rule, the given
handler will be used as the 'fallback' handler when requests don't
match any registered rules.
:param handler:
A function that gets called with ``Request`` and ``Response``.
:param req_serializer:
Arg scheme serializer of this endpoint. It should be
``RawSerializer``, ``JsonSerializer``, and ``ThriftSerializer``.
:param resp_serializer:
Arg scheme serializer of this endpoint. It should be
``RawSerializer``, ``JsonSerializer``, and ``ThriftSerializer``. | [
"Register",
"a",
"new",
"endpoint",
"with",
"the",
"given",
"name",
"."
] | ee08cce6234f24fd2373774988186dd374306c43 | https://github.com/uber/tchannel-python/blob/ee08cce6234f24fd2373774988186dd374306c43/tchannel/tornado/dispatch.py#L291-L329 | train | 36,639 |
uber/tchannel-python | tchannel/messages/common.py | generate_checksum | def generate_checksum(message, previous_csum=0):
"""Generate checksum for messages with
CALL_REQ, CALL_REQ_CONTINUE,
CALL_RES,CALL_RES_CONTINUE types.
:param message: outgoing message
:param previous_csum: accumulated checksum value
"""
if message.message_type in CHECKSUM_MSG_TYPES:
csum = compute_checksum(
message.checksum[0],
message.args,
previous_csum,
)
message.checksum = (message.checksum[0], csum) | python | def generate_checksum(message, previous_csum=0):
"""Generate checksum for messages with
CALL_REQ, CALL_REQ_CONTINUE,
CALL_RES,CALL_RES_CONTINUE types.
:param message: outgoing message
:param previous_csum: accumulated checksum value
"""
if message.message_type in CHECKSUM_MSG_TYPES:
csum = compute_checksum(
message.checksum[0],
message.args,
previous_csum,
)
message.checksum = (message.checksum[0], csum) | [
"def",
"generate_checksum",
"(",
"message",
",",
"previous_csum",
"=",
"0",
")",
":",
"if",
"message",
".",
"message_type",
"in",
"CHECKSUM_MSG_TYPES",
":",
"csum",
"=",
"compute_checksum",
"(",
"message",
".",
"checksum",
"[",
"0",
"]",
",",
"message",
".",... | Generate checksum for messages with
CALL_REQ, CALL_REQ_CONTINUE,
CALL_RES,CALL_RES_CONTINUE types.
:param message: outgoing message
:param previous_csum: accumulated checksum value | [
"Generate",
"checksum",
"for",
"messages",
"with",
"CALL_REQ",
"CALL_REQ_CONTINUE",
"CALL_RES",
"CALL_RES_CONTINUE",
"types",
"."
] | ee08cce6234f24fd2373774988186dd374306c43 | https://github.com/uber/tchannel-python/blob/ee08cce6234f24fd2373774988186dd374306c43/tchannel/messages/common.py#L138-L153 | train | 36,640 |
uber/tchannel-python | tchannel/messages/common.py | verify_checksum | def verify_checksum(message, previous_csum=0):
"""Verify checksum for incoming message.
:param message: incoming message
:param previous_csum: accumulated checksum value
:return return True if message checksum type is None
or checksum is correct
"""
if message.message_type in CHECKSUM_MSG_TYPES:
csum = compute_checksum(
message.checksum[0],
message.args,
previous_csum,
)
if csum == message.checksum[1]:
return True
else:
return False
else:
return True | python | def verify_checksum(message, previous_csum=0):
"""Verify checksum for incoming message.
:param message: incoming message
:param previous_csum: accumulated checksum value
:return return True if message checksum type is None
or checksum is correct
"""
if message.message_type in CHECKSUM_MSG_TYPES:
csum = compute_checksum(
message.checksum[0],
message.args,
previous_csum,
)
if csum == message.checksum[1]:
return True
else:
return False
else:
return True | [
"def",
"verify_checksum",
"(",
"message",
",",
"previous_csum",
"=",
"0",
")",
":",
"if",
"message",
".",
"message_type",
"in",
"CHECKSUM_MSG_TYPES",
":",
"csum",
"=",
"compute_checksum",
"(",
"message",
".",
"checksum",
"[",
"0",
"]",
",",
"message",
".",
... | Verify checksum for incoming message.
:param message: incoming message
:param previous_csum: accumulated checksum value
:return return True if message checksum type is None
or checksum is correct | [
"Verify",
"checksum",
"for",
"incoming",
"message",
"."
] | ee08cce6234f24fd2373774988186dd374306c43 | https://github.com/uber/tchannel-python/blob/ee08cce6234f24fd2373774988186dd374306c43/tchannel/messages/common.py#L156-L177 | train | 36,641 |
uber/tchannel-python | tchannel/tornado/hyperbahn.py | Advertiser.start | def start(self):
"""Starts the advertise loop.
Returns the result of the first ad request.
"""
if self.running:
raise Exception('Advertiser is already running')
if self.io_loop is None:
self.io_loop = tornado.ioloop.IOLoop.current()
self.running = True
answer = tornado.gen.Future()
self._schedule_ad(0, answer)
return answer | python | def start(self):
"""Starts the advertise loop.
Returns the result of the first ad request.
"""
if self.running:
raise Exception('Advertiser is already running')
if self.io_loop is None:
self.io_loop = tornado.ioloop.IOLoop.current()
self.running = True
answer = tornado.gen.Future()
self._schedule_ad(0, answer)
return answer | [
"def",
"start",
"(",
"self",
")",
":",
"if",
"self",
".",
"running",
":",
"raise",
"Exception",
"(",
"'Advertiser is already running'",
")",
"if",
"self",
".",
"io_loop",
"is",
"None",
":",
"self",
".",
"io_loop",
"=",
"tornado",
".",
"ioloop",
".",
"IOL... | Starts the advertise loop.
Returns the result of the first ad request. | [
"Starts",
"the",
"advertise",
"loop",
"."
] | ee08cce6234f24fd2373774988186dd374306c43 | https://github.com/uber/tchannel-python/blob/ee08cce6234f24fd2373774988186dd374306c43/tchannel/tornado/hyperbahn.py#L81-L94 | train | 36,642 |
uber/tchannel-python | tchannel/tornado/hyperbahn.py | Advertiser._schedule_ad | def _schedule_ad(self, delay=None, response_future=None):
"""Schedules an ``ad`` request.
:param delay:
Time in seconds to wait before making the ``ad`` request. Defaults
to self.interval_secs. Regardless of value, a jitter of
self.interval_max_jitter_secs is applied to this.
:param response_future:
If non-None, the result of the advertise request is filled into
this future.
"""
if not self.running:
return
if delay is None:
delay = self.interval_secs
delay += random.uniform(0, self.interval_max_jitter_secs)
self._next_ad = self.io_loop.call_later(delay, self._ad,
response_future) | python | def _schedule_ad(self, delay=None, response_future=None):
"""Schedules an ``ad`` request.
:param delay:
Time in seconds to wait before making the ``ad`` request. Defaults
to self.interval_secs. Regardless of value, a jitter of
self.interval_max_jitter_secs is applied to this.
:param response_future:
If non-None, the result of the advertise request is filled into
this future.
"""
if not self.running:
return
if delay is None:
delay = self.interval_secs
delay += random.uniform(0, self.interval_max_jitter_secs)
self._next_ad = self.io_loop.call_later(delay, self._ad,
response_future) | [
"def",
"_schedule_ad",
"(",
"self",
",",
"delay",
"=",
"None",
",",
"response_future",
"=",
"None",
")",
":",
"if",
"not",
"self",
".",
"running",
":",
"return",
"if",
"delay",
"is",
"None",
":",
"delay",
"=",
"self",
".",
"interval_secs",
"delay",
"+=... | Schedules an ``ad`` request.
:param delay:
Time in seconds to wait before making the ``ad`` request. Defaults
to self.interval_secs. Regardless of value, a jitter of
self.interval_max_jitter_secs is applied to this.
:param response_future:
If non-None, the result of the advertise request is filled into
this future. | [
"Schedules",
"an",
"ad",
"request",
"."
] | ee08cce6234f24fd2373774988186dd374306c43 | https://github.com/uber/tchannel-python/blob/ee08cce6234f24fd2373774988186dd374306c43/tchannel/tornado/hyperbahn.py#L102-L121 | train | 36,643 |
uber/tchannel-python | tchannel/tornado/tombstone.py | Cemetery.add | def add(self, id, ttl_secs):
"""Adds a new request to the Cemetery that is known to have timed out.
The request will be forgotten after ``ttl_secs + ttl_offset_secs``
seconds.
:param id:
ID of the request
:param ttl_secs:
TTL of the request (in seconds)
"""
ttl_secs = min(ttl_secs + self.ttl_offset_secs, self.max_ttl_secs)
self._tombstones[id] = IOLoop.current().call_later(
ttl_secs, self.forget, id,
) | python | def add(self, id, ttl_secs):
"""Adds a new request to the Cemetery that is known to have timed out.
The request will be forgotten after ``ttl_secs + ttl_offset_secs``
seconds.
:param id:
ID of the request
:param ttl_secs:
TTL of the request (in seconds)
"""
ttl_secs = min(ttl_secs + self.ttl_offset_secs, self.max_ttl_secs)
self._tombstones[id] = IOLoop.current().call_later(
ttl_secs, self.forget, id,
) | [
"def",
"add",
"(",
"self",
",",
"id",
",",
"ttl_secs",
")",
":",
"ttl_secs",
"=",
"min",
"(",
"ttl_secs",
"+",
"self",
".",
"ttl_offset_secs",
",",
"self",
".",
"max_ttl_secs",
")",
"self",
".",
"_tombstones",
"[",
"id",
"]",
"=",
"IOLoop",
".",
"cur... | Adds a new request to the Cemetery that is known to have timed out.
The request will be forgotten after ``ttl_secs + ttl_offset_secs``
seconds.
:param id:
ID of the request
:param ttl_secs:
TTL of the request (in seconds) | [
"Adds",
"a",
"new",
"request",
"to",
"the",
"Cemetery",
"that",
"is",
"known",
"to",
"have",
"timed",
"out",
"."
] | ee08cce6234f24fd2373774988186dd374306c43 | https://github.com/uber/tchannel-python/blob/ee08cce6234f24fd2373774988186dd374306c43/tchannel/tornado/tombstone.py#L83-L97 | train | 36,644 |
uber/tchannel-python | tchannel/tornado/tombstone.py | Cemetery.clear | def clear(self):
"""Forget about all requests."""
io_loop = IOLoop.current()
while self._tombstones:
_, req_timeout = self._tombstones.popitem()
io_loop.remove_timeout(req_timeout) | python | def clear(self):
"""Forget about all requests."""
io_loop = IOLoop.current()
while self._tombstones:
_, req_timeout = self._tombstones.popitem()
io_loop.remove_timeout(req_timeout) | [
"def",
"clear",
"(",
"self",
")",
":",
"io_loop",
"=",
"IOLoop",
".",
"current",
"(",
")",
"while",
"self",
".",
"_tombstones",
":",
"_",
",",
"req_timeout",
"=",
"self",
".",
"_tombstones",
".",
"popitem",
"(",
")",
"io_loop",
".",
"remove_timeout",
"... | Forget about all requests. | [
"Forget",
"about",
"all",
"requests",
"."
] | ee08cce6234f24fd2373774988186dd374306c43 | https://github.com/uber/tchannel-python/blob/ee08cce6234f24fd2373774988186dd374306c43/tchannel/tornado/tombstone.py#L103-L108 | train | 36,645 |
uber/tchannel-python | tchannel/tornado/response.py | Response.get_header | def get_header(self):
"""Get the header value from the response.
:return: a future contains the deserialized value of header
"""
raw_header = yield get_arg(self, 1)
if not self.serializer:
raise tornado.gen.Return(raw_header)
else:
header = self.serializer.deserialize_header(raw_header)
raise tornado.gen.Return(header) | python | def get_header(self):
"""Get the header value from the response.
:return: a future contains the deserialized value of header
"""
raw_header = yield get_arg(self, 1)
if not self.serializer:
raise tornado.gen.Return(raw_header)
else:
header = self.serializer.deserialize_header(raw_header)
raise tornado.gen.Return(header) | [
"def",
"get_header",
"(",
"self",
")",
":",
"raw_header",
"=",
"yield",
"get_arg",
"(",
"self",
",",
"1",
")",
"if",
"not",
"self",
".",
"serializer",
":",
"raise",
"tornado",
".",
"gen",
".",
"Return",
"(",
"raw_header",
")",
"else",
":",
"header",
... | Get the header value from the response.
:return: a future contains the deserialized value of header | [
"Get",
"the",
"header",
"value",
"from",
"the",
"response",
"."
] | ee08cce6234f24fd2373774988186dd374306c43 | https://github.com/uber/tchannel-python/blob/ee08cce6234f24fd2373774988186dd374306c43/tchannel/tornado/response.py#L109-L119 | train | 36,646 |
uber/tchannel-python | tchannel/tornado/response.py | Response.get_body | def get_body(self):
"""Get the body value from the response.
:return: a future contains the deserialized value of body
"""
raw_body = yield get_arg(self, 2)
if not self.serializer:
raise tornado.gen.Return(raw_body)
else:
body = self.serializer.deserialize_body(raw_body)
raise tornado.gen.Return(body) | python | def get_body(self):
"""Get the body value from the response.
:return: a future contains the deserialized value of body
"""
raw_body = yield get_arg(self, 2)
if not self.serializer:
raise tornado.gen.Return(raw_body)
else:
body = self.serializer.deserialize_body(raw_body)
raise tornado.gen.Return(body) | [
"def",
"get_body",
"(",
"self",
")",
":",
"raw_body",
"=",
"yield",
"get_arg",
"(",
"self",
",",
"2",
")",
"if",
"not",
"self",
".",
"serializer",
":",
"raise",
"tornado",
".",
"gen",
".",
"Return",
"(",
"raw_body",
")",
"else",
":",
"body",
"=",
"... | Get the body value from the response.
:return: a future contains the deserialized value of body | [
"Get",
"the",
"body",
"value",
"from",
"the",
"response",
"."
] | ee08cce6234f24fd2373774988186dd374306c43 | https://github.com/uber/tchannel-python/blob/ee08cce6234f24fd2373774988186dd374306c43/tchannel/tornado/response.py#L122-L133 | train | 36,647 |
uber/tchannel-python | tchannel/tornado/response.py | Response.set_body_s | def set_body_s(self, stream):
"""Set customized body stream.
Note: the body stream can only be changed before the stream
is consumed.
:param stream: InMemStream/PipeStream for body
:except TChannelError:
Raise TChannelError if the stream is being sent when you try
to change the stream.
"""
if self.argstreams[2].state == StreamState.init:
self.argstreams[2] = stream
else:
raise TChannelError(
"Unable to change the body since the streaming has started") | python | def set_body_s(self, stream):
"""Set customized body stream.
Note: the body stream can only be changed before the stream
is consumed.
:param stream: InMemStream/PipeStream for body
:except TChannelError:
Raise TChannelError if the stream is being sent when you try
to change the stream.
"""
if self.argstreams[2].state == StreamState.init:
self.argstreams[2] = stream
else:
raise TChannelError(
"Unable to change the body since the streaming has started") | [
"def",
"set_body_s",
"(",
"self",
",",
"stream",
")",
":",
"if",
"self",
".",
"argstreams",
"[",
"2",
"]",
".",
"state",
"==",
"StreamState",
".",
"init",
":",
"self",
".",
"argstreams",
"[",
"2",
"]",
"=",
"stream",
"else",
":",
"raise",
"TChannelEr... | Set customized body stream.
Note: the body stream can only be changed before the stream
is consumed.
:param stream: InMemStream/PipeStream for body
:except TChannelError:
Raise TChannelError if the stream is being sent when you try
to change the stream. | [
"Set",
"customized",
"body",
"stream",
"."
] | ee08cce6234f24fd2373774988186dd374306c43 | https://github.com/uber/tchannel-python/blob/ee08cce6234f24fd2373774988186dd374306c43/tchannel/tornado/response.py#L135-L151 | train | 36,648 |
uber/tchannel-python | tchannel/tornado/response.py | Response.set_header_s | def set_header_s(self, stream):
"""Set customized header stream.
Note: the header stream can only be changed before the stream
is consumed.
:param stream: InMemStream/PipeStream for header
:except TChannelError:
Raise TChannelError if the stream is being sent when you try
to change the stream.
"""
if self.argstreams[1].state == StreamState.init:
self.argstreams[1] = stream
else:
raise TChannelError(
"Unable to change the header since the streaming has started") | python | def set_header_s(self, stream):
"""Set customized header stream.
Note: the header stream can only be changed before the stream
is consumed.
:param stream: InMemStream/PipeStream for header
:except TChannelError:
Raise TChannelError if the stream is being sent when you try
to change the stream.
"""
if self.argstreams[1].state == StreamState.init:
self.argstreams[1] = stream
else:
raise TChannelError(
"Unable to change the header since the streaming has started") | [
"def",
"set_header_s",
"(",
"self",
",",
"stream",
")",
":",
"if",
"self",
".",
"argstreams",
"[",
"1",
"]",
".",
"state",
"==",
"StreamState",
".",
"init",
":",
"self",
".",
"argstreams",
"[",
"1",
"]",
"=",
"stream",
"else",
":",
"raise",
"TChannel... | Set customized header stream.
Note: the header stream can only be changed before the stream
is consumed.
:param stream: InMemStream/PipeStream for header
:except TChannelError:
Raise TChannelError if the stream is being sent when you try
to change the stream. | [
"Set",
"customized",
"header",
"stream",
"."
] | ee08cce6234f24fd2373774988186dd374306c43 | https://github.com/uber/tchannel-python/blob/ee08cce6234f24fd2373774988186dd374306c43/tchannel/tornado/response.py#L153-L170 | train | 36,649 |
uber/tchannel-python | tchannel/tornado/connection.py | read_message | def read_message(stream):
"""Reads a message from the given IOStream.
:param IOStream stream:
IOStream to read from.
"""
answer = tornado.gen.Future()
io_loop = IOLoop.current()
def on_error(future):
log.info('Failed to read data: %s', future.exception())
return answer.set_exc_info(future.exc_info())
@fail_to(answer)
def on_body(size, future):
if future.exception():
return on_error(future)
body = future.result()
f = frame.frame_rw.read(BytesIO(body), size=size)
message_type = f.header.message_type
message_rw = messages.RW.get(message_type)
if not message_rw:
exc = errors.FatalProtocolError(
'Unknown message type %s', str(message_type)
)
return answer.set_exception(exc)
message = message_rw.read(BytesIO(f.payload))
message.id = f.header.message_id
answer.set_result(message)
@fail_to(answer)
def on_read_size(future):
if future.exception():
return answer.set_exc_info(future.exc_info())
size_bytes = future.result()
size = frame.frame_rw.size_rw.read(BytesIO(size_bytes))
io_loop.add_future(
stream.read_bytes(size - FRAME_SIZE_WIDTH),
lambda f: on_body(size, f)
)
try:
# read_bytes may fail if the stream has already been closed
read_size_future = stream.read_bytes(FRAME_SIZE_WIDTH)
except Exception:
answer.set_exc_info(sys.exc_info())
else:
read_size_future.add_done_callback(on_read_size)
return answer | python | def read_message(stream):
"""Reads a message from the given IOStream.
:param IOStream stream:
IOStream to read from.
"""
answer = tornado.gen.Future()
io_loop = IOLoop.current()
def on_error(future):
log.info('Failed to read data: %s', future.exception())
return answer.set_exc_info(future.exc_info())
@fail_to(answer)
def on_body(size, future):
if future.exception():
return on_error(future)
body = future.result()
f = frame.frame_rw.read(BytesIO(body), size=size)
message_type = f.header.message_type
message_rw = messages.RW.get(message_type)
if not message_rw:
exc = errors.FatalProtocolError(
'Unknown message type %s', str(message_type)
)
return answer.set_exception(exc)
message = message_rw.read(BytesIO(f.payload))
message.id = f.header.message_id
answer.set_result(message)
@fail_to(answer)
def on_read_size(future):
if future.exception():
return answer.set_exc_info(future.exc_info())
size_bytes = future.result()
size = frame.frame_rw.size_rw.read(BytesIO(size_bytes))
io_loop.add_future(
stream.read_bytes(size - FRAME_SIZE_WIDTH),
lambda f: on_body(size, f)
)
try:
# read_bytes may fail if the stream has already been closed
read_size_future = stream.read_bytes(FRAME_SIZE_WIDTH)
except Exception:
answer.set_exc_info(sys.exc_info())
else:
read_size_future.add_done_callback(on_read_size)
return answer | [
"def",
"read_message",
"(",
"stream",
")",
":",
"answer",
"=",
"tornado",
".",
"gen",
".",
"Future",
"(",
")",
"io_loop",
"=",
"IOLoop",
".",
"current",
"(",
")",
"def",
"on_error",
"(",
"future",
")",
":",
"log",
".",
"info",
"(",
"'Failed to read dat... | Reads a message from the given IOStream.
:param IOStream stream:
IOStream to read from. | [
"Reads",
"a",
"message",
"from",
"the",
"given",
"IOStream",
"."
] | ee08cce6234f24fd2373774988186dd374306c43 | https://github.com/uber/tchannel-python/blob/ee08cce6234f24fd2373774988186dd374306c43/tchannel/tornado/connection.py#L905-L956 | train | 36,650 |
uber/tchannel-python | tchannel/tornado/connection.py | TornadoConnection.set_close_callback | def set_close_callback(self, cb):
"""Specify a function to be called when this connection is closed.
:param cb:
A callable that takes no arguments. This callable will be called
when this connection is closed.
"""
assert self._close_cb is None, (
'A close_callback has already been set for this connection.'
)
self._close_cb = stack_context.wrap(cb)
if self.closed:
self._close_cb() | python | def set_close_callback(self, cb):
"""Specify a function to be called when this connection is closed.
:param cb:
A callable that takes no arguments. This callable will be called
when this connection is closed.
"""
assert self._close_cb is None, (
'A close_callback has already been set for this connection.'
)
self._close_cb = stack_context.wrap(cb)
if self.closed:
self._close_cb() | [
"def",
"set_close_callback",
"(",
"self",
",",
"cb",
")",
":",
"assert",
"self",
".",
"_close_cb",
"is",
"None",
",",
"(",
"'A close_callback has already been set for this connection.'",
")",
"self",
".",
"_close_cb",
"=",
"stack_context",
".",
"wrap",
"(",
"cb",
... | Specify a function to be called when this connection is closed.
:param cb:
A callable that takes no arguments. This callable will be called
when this connection is closed. | [
"Specify",
"a",
"function",
"to",
"be",
"called",
"when",
"this",
"connection",
"is",
"closed",
"."
] | ee08cce6234f24fd2373774988186dd374306c43 | https://github.com/uber/tchannel-python/blob/ee08cce6234f24fd2373774988186dd374306c43/tchannel/tornado/connection.py#L156-L168 | train | 36,651 |
uber/tchannel-python | tchannel/tornado/connection.py | TornadoConnection.send | def send(self, message):
"""Send the given message up the wire.
Use this for messages which have a response message.
:param message:
Message to send
:returns:
A Future containing the response for the message
"""
assert self._handshake_performed, "Perform a handshake first."
assert message.message_type in self.CALL_REQ_TYPES, (
"Message '%s' can't use send" % repr(message)
)
message.id = message.id or self.writer.next_message_id()
assert message.id not in self._outbound_pending_call, (
"Message ID '%d' already being used" % message.id
)
future = tornado.gen.Future()
self._outbound_pending_call[message.id] = future
self.write(message)
return future | python | def send(self, message):
"""Send the given message up the wire.
Use this for messages which have a response message.
:param message:
Message to send
:returns:
A Future containing the response for the message
"""
assert self._handshake_performed, "Perform a handshake first."
assert message.message_type in self.CALL_REQ_TYPES, (
"Message '%s' can't use send" % repr(message)
)
message.id = message.id or self.writer.next_message_id()
assert message.id not in self._outbound_pending_call, (
"Message ID '%d' already being used" % message.id
)
future = tornado.gen.Future()
self._outbound_pending_call[message.id] = future
self.write(message)
return future | [
"def",
"send",
"(",
"self",
",",
"message",
")",
":",
"assert",
"self",
".",
"_handshake_performed",
",",
"\"Perform a handshake first.\"",
"assert",
"message",
".",
"message_type",
"in",
"self",
".",
"CALL_REQ_TYPES",
",",
"(",
"\"Message '%s' can't use send\"",
"%... | Send the given message up the wire.
Use this for messages which have a response message.
:param message:
Message to send
:returns:
A Future containing the response for the message | [
"Send",
"the",
"given",
"message",
"up",
"the",
"wire",
".",
"Use",
"this",
"for",
"messages",
"which",
"have",
"a",
"response",
"message",
"."
] | ee08cce6234f24fd2373774988186dd374306c43 | https://github.com/uber/tchannel-python/blob/ee08cce6234f24fd2373774988186dd374306c43/tchannel/tornado/connection.py#L278-L300 | train | 36,652 |
uber/tchannel-python | tchannel/tornado/connection.py | TornadoConnection.write | def write(self, message):
"""Writes the given message up the wire.
Does not expect a response back for the message.
:param message:
Message to write.
"""
message.id = message.id or self.writer.next_message_id()
if message.message_type in self.CALL_REQ_TYPES:
message_factory = self.request_message_factory
else:
message_factory = self.response_message_factory
fragments = message_factory.fragment(message)
return self._write_fragments(fragments) | python | def write(self, message):
"""Writes the given message up the wire.
Does not expect a response back for the message.
:param message:
Message to write.
"""
message.id = message.id or self.writer.next_message_id()
if message.message_type in self.CALL_REQ_TYPES:
message_factory = self.request_message_factory
else:
message_factory = self.response_message_factory
fragments = message_factory.fragment(message)
return self._write_fragments(fragments) | [
"def",
"write",
"(",
"self",
",",
"message",
")",
":",
"message",
".",
"id",
"=",
"message",
".",
"id",
"or",
"self",
".",
"writer",
".",
"next_message_id",
"(",
")",
"if",
"message",
".",
"message_type",
"in",
"self",
".",
"CALL_REQ_TYPES",
":",
"mess... | Writes the given message up the wire.
Does not expect a response back for the message.
:param message:
Message to write. | [
"Writes",
"the",
"given",
"message",
"up",
"the",
"wire",
"."
] | ee08cce6234f24fd2373774988186dd374306c43 | https://github.com/uber/tchannel-python/blob/ee08cce6234f24fd2373774988186dd374306c43/tchannel/tornado/connection.py#L302-L318 | train | 36,653 |
uber/tchannel-python | tchannel/tornado/connection.py | TornadoConnection.initiate_handshake | def initiate_handshake(self, headers, timeout=None):
"""Initiate a handshake with the remote host.
:param headers:
A dictionary of headers to send.
:returns:
A future that resolves (with a value of None) when the handshake
is complete.
"""
io_loop = IOLoop.current()
timeout = timeout or DEFAULT_INIT_TIMEOUT_SECS
self.writer.put(messages.InitRequestMessage(
version=PROTOCOL_VERSION,
headers=headers
))
init_res_future = self.reader.get()
timeout_handle = io_loop.call_later(timeout, (
lambda: init_res_future.set_exception(errors.TimeoutError(
'Handshake with %s:%d timed out. Did not receive an INIT_RES '
'after %s seconds' % (
self.remote_host, self.remote_host_port, str(timeout)
)
))
))
io_loop.add_future(
init_res_future,
(lambda _: io_loop.remove_timeout(timeout_handle)),
)
init_res = yield init_res_future
if init_res.message_type != Types.INIT_RES:
raise errors.UnexpectedError(
"Expected handshake response, got %s" % repr(init_res)
)
self._extract_handshake_headers(init_res)
self._handshake_performed = True
# The receive loop is started only after the handshake has been
# completed.
self._loop()
raise tornado.gen.Return(init_res) | python | def initiate_handshake(self, headers, timeout=None):
"""Initiate a handshake with the remote host.
:param headers:
A dictionary of headers to send.
:returns:
A future that resolves (with a value of None) when the handshake
is complete.
"""
io_loop = IOLoop.current()
timeout = timeout or DEFAULT_INIT_TIMEOUT_SECS
self.writer.put(messages.InitRequestMessage(
version=PROTOCOL_VERSION,
headers=headers
))
init_res_future = self.reader.get()
timeout_handle = io_loop.call_later(timeout, (
lambda: init_res_future.set_exception(errors.TimeoutError(
'Handshake with %s:%d timed out. Did not receive an INIT_RES '
'after %s seconds' % (
self.remote_host, self.remote_host_port, str(timeout)
)
))
))
io_loop.add_future(
init_res_future,
(lambda _: io_loop.remove_timeout(timeout_handle)),
)
init_res = yield init_res_future
if init_res.message_type != Types.INIT_RES:
raise errors.UnexpectedError(
"Expected handshake response, got %s" % repr(init_res)
)
self._extract_handshake_headers(init_res)
self._handshake_performed = True
# The receive loop is started only after the handshake has been
# completed.
self._loop()
raise tornado.gen.Return(init_res) | [
"def",
"initiate_handshake",
"(",
"self",
",",
"headers",
",",
"timeout",
"=",
"None",
")",
":",
"io_loop",
"=",
"IOLoop",
".",
"current",
"(",
")",
"timeout",
"=",
"timeout",
"or",
"DEFAULT_INIT_TIMEOUT_SECS",
"self",
".",
"writer",
".",
"put",
"(",
"mess... | Initiate a handshake with the remote host.
:param headers:
A dictionary of headers to send.
:returns:
A future that resolves (with a value of None) when the handshake
is complete. | [
"Initiate",
"a",
"handshake",
"with",
"the",
"remote",
"host",
"."
] | ee08cce6234f24fd2373774988186dd374306c43 | https://github.com/uber/tchannel-python/blob/ee08cce6234f24fd2373774988186dd374306c43/tchannel/tornado/connection.py#L359-L402 | train | 36,654 |
uber/tchannel-python | tchannel/tornado/connection.py | TornadoConnection.expect_handshake | def expect_handshake(self, headers):
"""Expect a handshake from the remote host.
:param headers:
Headers to respond with
:returns:
A future that resolves (with a value of None) when the handshake
is complete.
"""
init_req = yield self.reader.get()
if init_req.message_type != Types.INIT_REQ:
raise errors.UnexpectedError(
"You need to shake my hand first. Got %s" % repr(init_req)
)
self._extract_handshake_headers(init_req)
self._handshake_performed = True
self.writer.put(
messages.InitResponseMessage(
PROTOCOL_VERSION, headers, init_req.id),
)
# The receive loop is started only after the handshake has been
# completed.
self._loop()
raise tornado.gen.Return(init_req) | python | def expect_handshake(self, headers):
"""Expect a handshake from the remote host.
:param headers:
Headers to respond with
:returns:
A future that resolves (with a value of None) when the handshake
is complete.
"""
init_req = yield self.reader.get()
if init_req.message_type != Types.INIT_REQ:
raise errors.UnexpectedError(
"You need to shake my hand first. Got %s" % repr(init_req)
)
self._extract_handshake_headers(init_req)
self._handshake_performed = True
self.writer.put(
messages.InitResponseMessage(
PROTOCOL_VERSION, headers, init_req.id),
)
# The receive loop is started only after the handshake has been
# completed.
self._loop()
raise tornado.gen.Return(init_req) | [
"def",
"expect_handshake",
"(",
"self",
",",
"headers",
")",
":",
"init_req",
"=",
"yield",
"self",
".",
"reader",
".",
"get",
"(",
")",
"if",
"init_req",
".",
"message_type",
"!=",
"Types",
".",
"INIT_REQ",
":",
"raise",
"errors",
".",
"UnexpectedError",
... | Expect a handshake from the remote host.
:param headers:
Headers to respond with
:returns:
A future that resolves (with a value of None) when the handshake
is complete. | [
"Expect",
"a",
"handshake",
"from",
"the",
"remote",
"host",
"."
] | ee08cce6234f24fd2373774988186dd374306c43 | https://github.com/uber/tchannel-python/blob/ee08cce6234f24fd2373774988186dd374306c43/tchannel/tornado/connection.py#L405-L431 | train | 36,655 |
uber/tchannel-python | tchannel/tornado/connection.py | TornadoConnection.outgoing | def outgoing(cls, hostport, process_name=None, serve_hostport=None,
handler=None, tchannel=None):
"""Initiate a new connection to the given host.
:param hostport:
String in the form ``$host:$port`` specifying the target host
:param process_name:
Process name of the entity making the connection.
:param serve_hostport:
String in the form ``$host:$port`` specifying an address at which
the caller can be reached. If omitted, ``0.0.0.0:0`` is used.
:param handler:
If given, any calls received from this connection will be sent to
this RequestHandler.
"""
host, port = hostport.rsplit(":", 1)
process_name = process_name or "%s[%s]" % (sys.argv[0], os.getpid())
serve_hostport = serve_hostport or "0.0.0.0:0"
# TODO: change this to tornado.tcpclient.TCPClient to do async DNS
# lookups.
stream = tornado.iostream.IOStream(
socket.socket(socket.AF_INET, socket.SOCK_STREAM)
)
log.debug("Connecting to %s", hostport)
try:
yield stream.connect((host, int(port)))
connection = cls(stream, tchannel, direction=OUTGOING)
log.debug("Performing handshake with %s", hostport)
yield connection.initiate_handshake(headers={
'host_port': serve_hostport,
'process_name': process_name,
'tchannel_language': TCHANNEL_LANGUAGE,
'tchannel_language_version': TCHANNEL_LANGUAGE_VERSION,
'tchannel_version': TCHANNEL_VERSION,
})
except (StreamClosedError, socket.error, errors.TimeoutError) as e:
log.warn("Couldn't connect to %s", hostport)
raise NetworkError(
"Couldn't connect to %s" % hostport, e
)
if handler:
connection.serve(handler)
raise tornado.gen.Return(connection) | python | def outgoing(cls, hostport, process_name=None, serve_hostport=None,
handler=None, tchannel=None):
"""Initiate a new connection to the given host.
:param hostport:
String in the form ``$host:$port`` specifying the target host
:param process_name:
Process name of the entity making the connection.
:param serve_hostport:
String in the form ``$host:$port`` specifying an address at which
the caller can be reached. If omitted, ``0.0.0.0:0`` is used.
:param handler:
If given, any calls received from this connection will be sent to
this RequestHandler.
"""
host, port = hostport.rsplit(":", 1)
process_name = process_name or "%s[%s]" % (sys.argv[0], os.getpid())
serve_hostport = serve_hostport or "0.0.0.0:0"
# TODO: change this to tornado.tcpclient.TCPClient to do async DNS
# lookups.
stream = tornado.iostream.IOStream(
socket.socket(socket.AF_INET, socket.SOCK_STREAM)
)
log.debug("Connecting to %s", hostport)
try:
yield stream.connect((host, int(port)))
connection = cls(stream, tchannel, direction=OUTGOING)
log.debug("Performing handshake with %s", hostport)
yield connection.initiate_handshake(headers={
'host_port': serve_hostport,
'process_name': process_name,
'tchannel_language': TCHANNEL_LANGUAGE,
'tchannel_language_version': TCHANNEL_LANGUAGE_VERSION,
'tchannel_version': TCHANNEL_VERSION,
})
except (StreamClosedError, socket.error, errors.TimeoutError) as e:
log.warn("Couldn't connect to %s", hostport)
raise NetworkError(
"Couldn't connect to %s" % hostport, e
)
if handler:
connection.serve(handler)
raise tornado.gen.Return(connection) | [
"def",
"outgoing",
"(",
"cls",
",",
"hostport",
",",
"process_name",
"=",
"None",
",",
"serve_hostport",
"=",
"None",
",",
"handler",
"=",
"None",
",",
"tchannel",
"=",
"None",
")",
":",
"host",
",",
"port",
"=",
"hostport",
".",
"rsplit",
"(",
"\":\""... | Initiate a new connection to the given host.
:param hostport:
String in the form ``$host:$port`` specifying the target host
:param process_name:
Process name of the entity making the connection.
:param serve_hostport:
String in the form ``$host:$port`` specifying an address at which
the caller can be reached. If omitted, ``0.0.0.0:0`` is used.
:param handler:
If given, any calls received from this connection will be sent to
this RequestHandler. | [
"Initiate",
"a",
"new",
"connection",
"to",
"the",
"given",
"host",
"."
] | ee08cce6234f24fd2373774988186dd374306c43 | https://github.com/uber/tchannel-python/blob/ee08cce6234f24fd2373774988186dd374306c43/tchannel/tornado/connection.py#L452-L501 | train | 36,656 |
uber/tchannel-python | tchannel/tornado/connection.py | TornadoConnection.serve | def serve(self, handler):
"""Serve calls over this connection using the given RequestHandler.
:param handler:
RequestHandler to process the requests through
:return:
A Future that resolves (to None) once the loop is done running --
which happens once this connection is closed.
"""
assert handler, "handler is required"
while not self.closed:
message = yield self.await()
try:
handler(message, self)
except Exception:
# TODO Send error frame back
log.exception("Failed to process %s", repr(message)) | python | def serve(self, handler):
"""Serve calls over this connection using the given RequestHandler.
:param handler:
RequestHandler to process the requests through
:return:
A Future that resolves (to None) once the loop is done running --
which happens once this connection is closed.
"""
assert handler, "handler is required"
while not self.closed:
message = yield self.await()
try:
handler(message, self)
except Exception:
# TODO Send error frame back
log.exception("Failed to process %s", repr(message)) | [
"def",
"serve",
"(",
"self",
",",
"handler",
")",
":",
"assert",
"handler",
",",
"\"handler is required\"",
"while",
"not",
"self",
".",
"closed",
":",
"message",
"=",
"yield",
"self",
".",
"await",
"(",
")",
"try",
":",
"handler",
"(",
"message",
",",
... | Serve calls over this connection using the given RequestHandler.
:param handler:
RequestHandler to process the requests through
:return:
A Future that resolves (to None) once the loop is done running --
which happens once this connection is closed. | [
"Serve",
"calls",
"over",
"this",
"connection",
"using",
"the",
"given",
"RequestHandler",
"."
] | ee08cce6234f24fd2373774988186dd374306c43 | https://github.com/uber/tchannel-python/blob/ee08cce6234f24fd2373774988186dd374306c43/tchannel/tornado/connection.py#L504-L522 | train | 36,657 |
uber/tchannel-python | tchannel/tornado/connection.py | TornadoConnection.send_error | def send_error(self, error):
"""Convenience method for writing Error frames up the wire.
:param error:
TChannel Error. :py:class`tchannel.errors.TChannelError`.
:returns:
A future that resolves when the write finishes.
"""
error_message = build_raw_error_message(error)
write_future = self.writer.put(error_message)
write_future.add_done_callback(
lambda f: IOLoop.current().add_callback(
self.tchannel.event_emitter.fire(
EventType.after_send_error,
error,
)
)
)
return write_future | python | def send_error(self, error):
"""Convenience method for writing Error frames up the wire.
:param error:
TChannel Error. :py:class`tchannel.errors.TChannelError`.
:returns:
A future that resolves when the write finishes.
"""
error_message = build_raw_error_message(error)
write_future = self.writer.put(error_message)
write_future.add_done_callback(
lambda f: IOLoop.current().add_callback(
self.tchannel.event_emitter.fire(
EventType.after_send_error,
error,
)
)
)
return write_future | [
"def",
"send_error",
"(",
"self",
",",
"error",
")",
":",
"error_message",
"=",
"build_raw_error_message",
"(",
"error",
")",
"write_future",
"=",
"self",
".",
"writer",
".",
"put",
"(",
"error_message",
")",
"write_future",
".",
"add_done_callback",
"(",
"lam... | Convenience method for writing Error frames up the wire.
:param error:
TChannel Error. :py:class`tchannel.errors.TChannelError`.
:returns:
A future that resolves when the write finishes. | [
"Convenience",
"method",
"for",
"writing",
"Error",
"frames",
"up",
"the",
"wire",
"."
] | ee08cce6234f24fd2373774988186dd374306c43 | https://github.com/uber/tchannel-python/blob/ee08cce6234f24fd2373774988186dd374306c43/tchannel/tornado/connection.py#L524-L543 | train | 36,658 |
uber/tchannel-python | tchannel/tornado/connection.py | StreamConnection.stream_request | def stream_request(self, request, out_future):
"""send the given request and response is not required"""
request.close_argstreams()
def on_done(future):
if future.exception() and out_future.running():
out_future.set_exc_info(future.exc_info())
request.close_argstreams(force=True)
stream_future = self._stream(request, self.request_message_factory)
stream_future.add_done_callback(on_done)
return stream_future | python | def stream_request(self, request, out_future):
"""send the given request and response is not required"""
request.close_argstreams()
def on_done(future):
if future.exception() and out_future.running():
out_future.set_exc_info(future.exc_info())
request.close_argstreams(force=True)
stream_future = self._stream(request, self.request_message_factory)
stream_future.add_done_callback(on_done)
return stream_future | [
"def",
"stream_request",
"(",
"self",
",",
"request",
",",
"out_future",
")",
":",
"request",
".",
"close_argstreams",
"(",
")",
"def",
"on_done",
"(",
"future",
")",
":",
"if",
"future",
".",
"exception",
"(",
")",
"and",
"out_future",
".",
"running",
"... | send the given request and response is not required | [
"send",
"the",
"given",
"request",
"and",
"response",
"is",
"not",
"required"
] | ee08cce6234f24fd2373774988186dd374306c43 | https://github.com/uber/tchannel-python/blob/ee08cce6234f24fd2373774988186dd374306c43/tchannel/tornado/connection.py#L646-L657 | train | 36,659 |
uber/tchannel-python | tchannel/tornado/connection.py | StreamConnection.send_request | def send_request(self, request):
"""Send the given request and response is required.
Use this for messages which have a response message.
:param request:
request to send
:returns:
A Future containing the response for the request
"""
assert self._handshake_performed, "Perform a handshake first."
assert request.id not in self._outbound_pending_call, (
"Message ID '%d' already being used" % request.id
)
future = tornado.gen.Future()
self._outbound_pending_call[request.id] = future
self.add_pending_outbound()
self.stream_request(request, future).add_done_callback(
lambda f: self.remove_pending_outbound()
)
if request.ttl:
self._add_timeout(request, future)
# the actual future that caller will yield
response_future = tornado.gen.Future()
# TODO: fire before_receive_response
IOLoop.current().add_future(
future,
lambda f: self.adapt_result(f, request, response_future),
)
return response_future | python | def send_request(self, request):
"""Send the given request and response is required.
Use this for messages which have a response message.
:param request:
request to send
:returns:
A Future containing the response for the request
"""
assert self._handshake_performed, "Perform a handshake first."
assert request.id not in self._outbound_pending_call, (
"Message ID '%d' already being used" % request.id
)
future = tornado.gen.Future()
self._outbound_pending_call[request.id] = future
self.add_pending_outbound()
self.stream_request(request, future).add_done_callback(
lambda f: self.remove_pending_outbound()
)
if request.ttl:
self._add_timeout(request, future)
# the actual future that caller will yield
response_future = tornado.gen.Future()
# TODO: fire before_receive_response
IOLoop.current().add_future(
future,
lambda f: self.adapt_result(f, request, response_future),
)
return response_future | [
"def",
"send_request",
"(",
"self",
",",
"request",
")",
":",
"assert",
"self",
".",
"_handshake_performed",
",",
"\"Perform a handshake first.\"",
"assert",
"request",
".",
"id",
"not",
"in",
"self",
".",
"_outbound_pending_call",
",",
"(",
"\"Message ID '%d' alrea... | Send the given request and response is required.
Use this for messages which have a response message.
:param request:
request to send
:returns:
A Future containing the response for the request | [
"Send",
"the",
"given",
"request",
"and",
"response",
"is",
"required",
"."
] | ee08cce6234f24fd2373774988186dd374306c43 | https://github.com/uber/tchannel-python/blob/ee08cce6234f24fd2373774988186dd374306c43/tchannel/tornado/connection.py#L659-L693 | train | 36,660 |
uber/tchannel-python | tchannel/tornado/connection.py | StreamConnection._add_timeout | def _add_timeout(self, request, future):
"""Adds a timeout for the given request to the given future."""
io_loop = IOLoop.current()
t = io_loop.call_later(
request.ttl,
self._request_timed_out,
request.id,
request.service,
request.ttl,
future,
)
io_loop.add_future(future, lambda f: io_loop.remove_timeout(t)) | python | def _add_timeout(self, request, future):
"""Adds a timeout for the given request to the given future."""
io_loop = IOLoop.current()
t = io_loop.call_later(
request.ttl,
self._request_timed_out,
request.id,
request.service,
request.ttl,
future,
)
io_loop.add_future(future, lambda f: io_loop.remove_timeout(t)) | [
"def",
"_add_timeout",
"(",
"self",
",",
"request",
",",
"future",
")",
":",
"io_loop",
"=",
"IOLoop",
".",
"current",
"(",
")",
"t",
"=",
"io_loop",
".",
"call_later",
"(",
"request",
".",
"ttl",
",",
"self",
".",
"_request_timed_out",
",",
"request",
... | Adds a timeout for the given request to the given future. | [
"Adds",
"a",
"timeout",
"for",
"the",
"given",
"request",
"to",
"the",
"given",
"future",
"."
] | ee08cce6234f24fd2373774988186dd374306c43 | https://github.com/uber/tchannel-python/blob/ee08cce6234f24fd2373774988186dd374306c43/tchannel/tornado/connection.py#L713-L724 | train | 36,661 |
uber/tchannel-python | tchannel/tornado/connection.py | Reader.get | def get(self):
"""Receive the next message off the wire.
:returns:
A Future that resolves to the next message off the wire.
"""
if not self.filling:
self.fill()
answer = tornado.gen.Future()
def _on_result(future):
if future.exception():
return answer.set_exc_info(future.exc_info())
answer.set_result(future.result())
def _on_item(future):
if future.exception():
return answer.set_exc_info(future.exc_info())
future.result().add_done_callback(_on_result)
self.queue.get().add_done_callback(_on_item)
return answer | python | def get(self):
"""Receive the next message off the wire.
:returns:
A Future that resolves to the next message off the wire.
"""
if not self.filling:
self.fill()
answer = tornado.gen.Future()
def _on_result(future):
if future.exception():
return answer.set_exc_info(future.exc_info())
answer.set_result(future.result())
def _on_item(future):
if future.exception():
return answer.set_exc_info(future.exc_info())
future.result().add_done_callback(_on_result)
self.queue.get().add_done_callback(_on_item)
return answer | [
"def",
"get",
"(",
"self",
")",
":",
"if",
"not",
"self",
".",
"filling",
":",
"self",
".",
"fill",
"(",
")",
"answer",
"=",
"tornado",
".",
"gen",
".",
"Future",
"(",
")",
"def",
"_on_result",
"(",
"future",
")",
":",
"if",
"future",
".",
"excep... | Receive the next message off the wire.
:returns:
A Future that resolves to the next message off the wire. | [
"Receive",
"the",
"next",
"message",
"off",
"the",
"wire",
"."
] | ee08cce6234f24fd2373774988186dd374306c43 | https://github.com/uber/tchannel-python/blob/ee08cce6234f24fd2373774988186dd374306c43/tchannel/tornado/connection.py#L777-L799 | train | 36,662 |
uber/tchannel-python | tchannel/tornado/connection.py | Writer.put | def put(self, message):
"""Enqueues the given message for writing to the wire.
The message must be small enough to fit in a single frame.
"""
if self.draining is False:
self.drain()
return self._enqueue(message) | python | def put(self, message):
"""Enqueues the given message for writing to the wire.
The message must be small enough to fit in a single frame.
"""
if self.draining is False:
self.drain()
return self._enqueue(message) | [
"def",
"put",
"(",
"self",
",",
"message",
")",
":",
"if",
"self",
".",
"draining",
"is",
"False",
":",
"self",
".",
"drain",
"(",
")",
"return",
"self",
".",
"_enqueue",
"(",
"message",
")"
] | Enqueues the given message for writing to the wire.
The message must be small enough to fit in a single frame. | [
"Enqueues",
"the",
"given",
"message",
"for",
"writing",
"to",
"the",
"wire",
"."
] | ee08cce6234f24fd2373774988186dd374306c43 | https://github.com/uber/tchannel-python/blob/ee08cce6234f24fd2373774988186dd374306c43/tchannel/tornado/connection.py#L850-L858 | train | 36,663 |
uber/tchannel-python | tchannel/tornado/tchannel.py | TChannel.advertise | def advertise(
self,
routers=None,
name=None,
timeout=None,
router_file=None,
jitter=None,
):
"""Make a service available on the Hyperbahn routing mesh.
This will make contact with a Hyperbahn host from a list of known
Hyperbahn routers. Additional Hyperbahn connections will be established
once contact has been made with the network.
:param router:
A seed list of addresses of Hyperbahn routers, e.g.,
``["127.0.0.1:23000"]``.
:param name:
The identity of this service on the Hyperbahn.
This is usually unnecessary, as it defaults to the name given when
initializing the :py:class:`TChannel` (which is used as your
identity as a caller).
:returns:
A future that resolves to the remote server's response after
the first advertise finishes.
Advertisement will continue to happen periodically.
"""
name = name or self.name
if not self.is_listening():
self.listen()
return hyperbahn.advertise(
self,
name,
routers,
timeout,
router_file,
jitter,
) | python | def advertise(
self,
routers=None,
name=None,
timeout=None,
router_file=None,
jitter=None,
):
"""Make a service available on the Hyperbahn routing mesh.
This will make contact with a Hyperbahn host from a list of known
Hyperbahn routers. Additional Hyperbahn connections will be established
once contact has been made with the network.
:param router:
A seed list of addresses of Hyperbahn routers, e.g.,
``["127.0.0.1:23000"]``.
:param name:
The identity of this service on the Hyperbahn.
This is usually unnecessary, as it defaults to the name given when
initializing the :py:class:`TChannel` (which is used as your
identity as a caller).
:returns:
A future that resolves to the remote server's response after
the first advertise finishes.
Advertisement will continue to happen periodically.
"""
name = name or self.name
if not self.is_listening():
self.listen()
return hyperbahn.advertise(
self,
name,
routers,
timeout,
router_file,
jitter,
) | [
"def",
"advertise",
"(",
"self",
",",
"routers",
"=",
"None",
",",
"name",
"=",
"None",
",",
"timeout",
"=",
"None",
",",
"router_file",
"=",
"None",
",",
"jitter",
"=",
"None",
",",
")",
":",
"name",
"=",
"name",
"or",
"self",
".",
"name",
"if",
... | Make a service available on the Hyperbahn routing mesh.
This will make contact with a Hyperbahn host from a list of known
Hyperbahn routers. Additional Hyperbahn connections will be established
once contact has been made with the network.
:param router:
A seed list of addresses of Hyperbahn routers, e.g.,
``["127.0.0.1:23000"]``.
:param name:
The identity of this service on the Hyperbahn.
This is usually unnecessary, as it defaults to the name given when
initializing the :py:class:`TChannel` (which is used as your
identity as a caller).
:returns:
A future that resolves to the remote server's response after
the first advertise finishes.
Advertisement will continue to happen periodically. | [
"Make",
"a",
"service",
"available",
"on",
"the",
"Hyperbahn",
"routing",
"mesh",
"."
] | ee08cce6234f24fd2373774988186dd374306c43 | https://github.com/uber/tchannel-python/blob/ee08cce6234f24fd2373774988186dd374306c43/tchannel/tornado/tchannel.py#L189-L232 | train | 36,664 |
uber/tchannel-python | tchannel/tornado/tchannel.py | TChannel.request | def request(self,
hostport=None,
service=None,
arg_scheme=None,
retry=None,
**kwargs):
"""Initiate a new request through this TChannel.
:param hostport:
Host to which the request will be made. If unspecified, a random
known peer will be picked. This is not necessary if using
Hyperbahn.
:param service:
The name of a service available on Hyperbahn. Defaults to an empty
string.
:param arg_scheme:
Determines the serialization scheme for the request. One of 'raw',
'json', or 'thrift'. Defaults to 'raw'.
:param rety:
One of 'n' (never retry), 'c' (retry on connection errors), 't'
(retry on timeout), 'ct' (retry on connection errors and timeouts).
Defaults to 'c'.
"""
# TODO disallow certain parameters or don't propagate them backwards.
# For example, blacklist and rank threshold aren't really
# user-configurable right now.
return self.peers.request(hostport=hostport,
service=service,
arg_scheme=arg_scheme,
retry=retry,
**kwargs) | python | def request(self,
hostport=None,
service=None,
arg_scheme=None,
retry=None,
**kwargs):
"""Initiate a new request through this TChannel.
:param hostport:
Host to which the request will be made. If unspecified, a random
known peer will be picked. This is not necessary if using
Hyperbahn.
:param service:
The name of a service available on Hyperbahn. Defaults to an empty
string.
:param arg_scheme:
Determines the serialization scheme for the request. One of 'raw',
'json', or 'thrift'. Defaults to 'raw'.
:param rety:
One of 'n' (never retry), 'c' (retry on connection errors), 't'
(retry on timeout), 'ct' (retry on connection errors and timeouts).
Defaults to 'c'.
"""
# TODO disallow certain parameters or don't propagate them backwards.
# For example, blacklist and rank threshold aren't really
# user-configurable right now.
return self.peers.request(hostport=hostport,
service=service,
arg_scheme=arg_scheme,
retry=retry,
**kwargs) | [
"def",
"request",
"(",
"self",
",",
"hostport",
"=",
"None",
",",
"service",
"=",
"None",
",",
"arg_scheme",
"=",
"None",
",",
"retry",
"=",
"None",
",",
"*",
"*",
"kwargs",
")",
":",
"# TODO disallow certain parameters or don't propagate them backwards.",
"# Fo... | Initiate a new request through this TChannel.
:param hostport:
Host to which the request will be made. If unspecified, a random
known peer will be picked. This is not necessary if using
Hyperbahn.
:param service:
The name of a service available on Hyperbahn. Defaults to an empty
string.
:param arg_scheme:
Determines the serialization scheme for the request. One of 'raw',
'json', or 'thrift'. Defaults to 'raw'.
:param rety:
One of 'n' (never retry), 'c' (retry on connection errors), 't'
(retry on timeout), 'ct' (retry on connection errors and timeouts).
Defaults to 'c'. | [
"Initiate",
"a",
"new",
"request",
"through",
"this",
"TChannel",
"."
] | ee08cce6234f24fd2373774988186dd374306c43 | https://github.com/uber/tchannel-python/blob/ee08cce6234f24fd2373774988186dd374306c43/tchannel/tornado/tchannel.py#L262-L296 | train | 36,665 |
uber/tchannel-python | tchannel/tornado/tchannel.py | TChannel.listen | def listen(self, port=None):
"""Start listening for incoming connections.
A request handler must have already been specified with
``TChannel.host``.
:param port:
An explicit port to listen on. This is unnecessary when advertising
on Hyperbahn.
:returns:
Returns immediately.
:raises AlreadyListeningError:
If listen was already called.
"""
if self.is_listening():
raise AlreadyListeningError(
"listen has already been called"
)
if port:
assert not self._port, "Port has already been set."
self._port = int(port)
assert self._handler, "Call .host with a RequestHandler first"
server = TChannelServer(self)
bind_sockets_kwargs = {
'port': self._port,
# ipv6 causes random address already in use (socket.error w errno
# == 98) when getaddrinfo() returns multiple values
# @see https://github.com/uber/tchannel-python/issues/256
'family': socket.AF_INET,
}
if self._reuse_port is True:
# allow multiple processes to share the same port,
# this is really useful in a world where services launch N
# processes per container/os-space, where N is
# the amount of cpus for example
bind_sockets_kwargs['reuse_port'] = True
sockets = bind_sockets(**bind_sockets_kwargs)
assert sockets, "No sockets bound for port %d" % self._port
# If port was 0, the OS probably assigned something better.
self._port = sockets[0].getsockname()[1]
server.add_sockets(sockets)
# assign server so we don't listen twice
self._server = server | python | def listen(self, port=None):
"""Start listening for incoming connections.
A request handler must have already been specified with
``TChannel.host``.
:param port:
An explicit port to listen on. This is unnecessary when advertising
on Hyperbahn.
:returns:
Returns immediately.
:raises AlreadyListeningError:
If listen was already called.
"""
if self.is_listening():
raise AlreadyListeningError(
"listen has already been called"
)
if port:
assert not self._port, "Port has already been set."
self._port = int(port)
assert self._handler, "Call .host with a RequestHandler first"
server = TChannelServer(self)
bind_sockets_kwargs = {
'port': self._port,
# ipv6 causes random address already in use (socket.error w errno
# == 98) when getaddrinfo() returns multiple values
# @see https://github.com/uber/tchannel-python/issues/256
'family': socket.AF_INET,
}
if self._reuse_port is True:
# allow multiple processes to share the same port,
# this is really useful in a world where services launch N
# processes per container/os-space, where N is
# the amount of cpus for example
bind_sockets_kwargs['reuse_port'] = True
sockets = bind_sockets(**bind_sockets_kwargs)
assert sockets, "No sockets bound for port %d" % self._port
# If port was 0, the OS probably assigned something better.
self._port = sockets[0].getsockname()[1]
server.add_sockets(sockets)
# assign server so we don't listen twice
self._server = server | [
"def",
"listen",
"(",
"self",
",",
"port",
"=",
"None",
")",
":",
"if",
"self",
".",
"is_listening",
"(",
")",
":",
"raise",
"AlreadyListeningError",
"(",
"\"listen has already been called\"",
")",
"if",
"port",
":",
"assert",
"not",
"self",
".",
"_port",
... | Start listening for incoming connections.
A request handler must have already been specified with
``TChannel.host``.
:param port:
An explicit port to listen on. This is unnecessary when advertising
on Hyperbahn.
:returns:
Returns immediately.
:raises AlreadyListeningError:
If listen was already called. | [
"Start",
"listening",
"for",
"incoming",
"connections",
"."
] | ee08cce6234f24fd2373774988186dd374306c43 | https://github.com/uber/tchannel-python/blob/ee08cce6234f24fd2373774988186dd374306c43/tchannel/tornado/tchannel.py#L298-L350 | train | 36,666 |
uber/tchannel-python | tchannel/tornado/tchannel.py | TChannel._register_simple | def _register_simple(self, endpoint, scheme, f):
"""Register a simple endpoint with this TChannel.
:param endpoint:
Name of the endpoint being registered.
:param scheme:
Name of the arg scheme under which the endpoint will be
registered.
:param f:
Callable handler for the endpoint.
"""
assert scheme in DEFAULT_NAMES, ("Unsupported arg scheme %s" % scheme)
if scheme == JSON:
req_serializer = JsonSerializer()
resp_serializer = JsonSerializer()
else:
req_serializer = RawSerializer()
resp_serializer = RawSerializer()
self._handler.register(endpoint, f, req_serializer, resp_serializer)
return f | python | def _register_simple(self, endpoint, scheme, f):
"""Register a simple endpoint with this TChannel.
:param endpoint:
Name of the endpoint being registered.
:param scheme:
Name of the arg scheme under which the endpoint will be
registered.
:param f:
Callable handler for the endpoint.
"""
assert scheme in DEFAULT_NAMES, ("Unsupported arg scheme %s" % scheme)
if scheme == JSON:
req_serializer = JsonSerializer()
resp_serializer = JsonSerializer()
else:
req_serializer = RawSerializer()
resp_serializer = RawSerializer()
self._handler.register(endpoint, f, req_serializer, resp_serializer)
return f | [
"def",
"_register_simple",
"(",
"self",
",",
"endpoint",
",",
"scheme",
",",
"f",
")",
":",
"assert",
"scheme",
"in",
"DEFAULT_NAMES",
",",
"(",
"\"Unsupported arg scheme %s\"",
"%",
"scheme",
")",
"if",
"scheme",
"==",
"JSON",
":",
"req_serializer",
"=",
"J... | Register a simple endpoint with this TChannel.
:param endpoint:
Name of the endpoint being registered.
:param scheme:
Name of the arg scheme under which the endpoint will be
registered.
:param f:
Callable handler for the endpoint. | [
"Register",
"a",
"simple",
"endpoint",
"with",
"this",
"TChannel",
"."
] | ee08cce6234f24fd2373774988186dd374306c43 | https://github.com/uber/tchannel-python/blob/ee08cce6234f24fd2373774988186dd374306c43/tchannel/tornado/tchannel.py#L368-L387 | train | 36,667 |
uber/tchannel-python | tchannel/tornado/tchannel.py | TChannel._register_thrift | def _register_thrift(self, service_module, handler, **kwargs):
"""Register a Thrift endpoint on this TChannel.
:param service_module:
Reference to the Thrift-generated module for the service being
registered.
:param handler:
Handler for the endpoint
:param method:
Name of the Thrift method being registered. If omitted, ``f``'s
name is assumed to be the method name.
:param service:
Name of the Thrift service. By default this is determined
automatically from the module name.
"""
import tchannel.thrift as thrift
# Imported inside the function so that we don't have a hard dependency
# on the Thrift library. This function is usable only if the Thrift
# library is installed.
thrift.register(self._handler, service_module, handler, **kwargs)
return handler | python | def _register_thrift(self, service_module, handler, **kwargs):
"""Register a Thrift endpoint on this TChannel.
:param service_module:
Reference to the Thrift-generated module for the service being
registered.
:param handler:
Handler for the endpoint
:param method:
Name of the Thrift method being registered. If omitted, ``f``'s
name is assumed to be the method name.
:param service:
Name of the Thrift service. By default this is determined
automatically from the module name.
"""
import tchannel.thrift as thrift
# Imported inside the function so that we don't have a hard dependency
# on the Thrift library. This function is usable only if the Thrift
# library is installed.
thrift.register(self._handler, service_module, handler, **kwargs)
return handler | [
"def",
"_register_thrift",
"(",
"self",
",",
"service_module",
",",
"handler",
",",
"*",
"*",
"kwargs",
")",
":",
"import",
"tchannel",
".",
"thrift",
"as",
"thrift",
"# Imported inside the function so that we don't have a hard dependency",
"# on the Thrift library. This fu... | Register a Thrift endpoint on this TChannel.
:param service_module:
Reference to the Thrift-generated module for the service being
registered.
:param handler:
Handler for the endpoint
:param method:
Name of the Thrift method being registered. If omitted, ``f``'s
name is assumed to be the method name.
:param service:
Name of the Thrift service. By default this is determined
automatically from the module name. | [
"Register",
"a",
"Thrift",
"endpoint",
"on",
"this",
"TChannel",
"."
] | ee08cce6234f24fd2373774988186dd374306c43 | https://github.com/uber/tchannel-python/blob/ee08cce6234f24fd2373774988186dd374306c43/tchannel/tornado/tchannel.py#L389-L409 | train | 36,668 |
uber/tchannel-python | tchannel/tornado/tchannel.py | TChannel.register | def register(self, endpoint, scheme=None, handler=None, **kwargs):
"""Register a handler with this TChannel.
This may be used as a decorator:
.. code-block:: python
app = TChannel(name='bar')
@app.register("hello", "json")
def hello_handler(request, response):
params = yield request.get_body()
Or as a function:
.. code-block:: python
# Here we have a Thrift handler for `Foo::hello`
app.register(Foo, "hello", hello_thrift_handler)
:param endpoint:
Name of the endpoint being registered. This should be a reference
to the Thrift-generated module if this is a Thrift endpoint. It
may also be ``TChannel.FALLBACK`` if it's intended to be a
catch-all endpoint.
:param scheme:
Name of the scheme under which the endpoint is being registered.
One of "raw", "json", and "thrift". Defaults to "raw", except if
"endpoint" was a module, in which case this defaults to "thrift".
:param handler:
If specified, this is the handler function. If ignored, this
function returns a decorator that can be used to register the
handler function.
:returns:
If ``handler`` was specified, this returns ``handler``. Otherwise,
it returns a decorator that can be applied to a function to
register it as the handler.
"""
assert endpoint is not None, "endpoint is required"
if endpoint is TChannel.FALLBACK:
decorator = partial(self._handler.register, TChannel.FALLBACK)
if handler is not None:
return decorator(handler)
else:
return decorator
if not scheme:
# scheme defaults to raw, unless the endpoint is a service module.
if inspect.ismodule(endpoint):
scheme = "thrift"
else:
scheme = "raw"
scheme = scheme.lower()
if scheme == 'thrift':
decorator = partial(self._register_thrift, endpoint, **kwargs)
else:
decorator = partial(
self._register_simple, endpoint, scheme, **kwargs
)
if handler is not None:
return decorator(handler)
else:
return decorator | python | def register(self, endpoint, scheme=None, handler=None, **kwargs):
"""Register a handler with this TChannel.
This may be used as a decorator:
.. code-block:: python
app = TChannel(name='bar')
@app.register("hello", "json")
def hello_handler(request, response):
params = yield request.get_body()
Or as a function:
.. code-block:: python
# Here we have a Thrift handler for `Foo::hello`
app.register(Foo, "hello", hello_thrift_handler)
:param endpoint:
Name of the endpoint being registered. This should be a reference
to the Thrift-generated module if this is a Thrift endpoint. It
may also be ``TChannel.FALLBACK`` if it's intended to be a
catch-all endpoint.
:param scheme:
Name of the scheme under which the endpoint is being registered.
One of "raw", "json", and "thrift". Defaults to "raw", except if
"endpoint" was a module, in which case this defaults to "thrift".
:param handler:
If specified, this is the handler function. If ignored, this
function returns a decorator that can be used to register the
handler function.
:returns:
If ``handler`` was specified, this returns ``handler``. Otherwise,
it returns a decorator that can be applied to a function to
register it as the handler.
"""
assert endpoint is not None, "endpoint is required"
if endpoint is TChannel.FALLBACK:
decorator = partial(self._handler.register, TChannel.FALLBACK)
if handler is not None:
return decorator(handler)
else:
return decorator
if not scheme:
# scheme defaults to raw, unless the endpoint is a service module.
if inspect.ismodule(endpoint):
scheme = "thrift"
else:
scheme = "raw"
scheme = scheme.lower()
if scheme == 'thrift':
decorator = partial(self._register_thrift, endpoint, **kwargs)
else:
decorator = partial(
self._register_simple, endpoint, scheme, **kwargs
)
if handler is not None:
return decorator(handler)
else:
return decorator | [
"def",
"register",
"(",
"self",
",",
"endpoint",
",",
"scheme",
"=",
"None",
",",
"handler",
"=",
"None",
",",
"*",
"*",
"kwargs",
")",
":",
"assert",
"endpoint",
"is",
"not",
"None",
",",
"\"endpoint is required\"",
"if",
"endpoint",
"is",
"TChannel",
"... | Register a handler with this TChannel.
This may be used as a decorator:
.. code-block:: python
app = TChannel(name='bar')
@app.register("hello", "json")
def hello_handler(request, response):
params = yield request.get_body()
Or as a function:
.. code-block:: python
# Here we have a Thrift handler for `Foo::hello`
app.register(Foo, "hello", hello_thrift_handler)
:param endpoint:
Name of the endpoint being registered. This should be a reference
to the Thrift-generated module if this is a Thrift endpoint. It
may also be ``TChannel.FALLBACK`` if it's intended to be a
catch-all endpoint.
:param scheme:
Name of the scheme under which the endpoint is being registered.
One of "raw", "json", and "thrift". Defaults to "raw", except if
"endpoint" was a module, in which case this defaults to "thrift".
:param handler:
If specified, this is the handler function. If ignored, this
function returns a decorator that can be used to register the
handler function.
:returns:
If ``handler`` was specified, this returns ``handler``. Otherwise,
it returns a decorator that can be applied to a function to
register it as the handler. | [
"Register",
"a",
"handler",
"with",
"this",
"TChannel",
"."
] | ee08cce6234f24fd2373774988186dd374306c43 | https://github.com/uber/tchannel-python/blob/ee08cce6234f24fd2373774988186dd374306c43/tchannel/tornado/tchannel.py#L411-L477 | train | 36,669 |
uber/tchannel-python | tchannel/peer_heap.py | PeerHeap.lt | def lt(self, i, j):
"""Compare the priority of two peers.
Primary comparator will be the rank of each peer. If the ``rank`` is
same then compare the ``order``. The ``order`` attribute of the peer
tracks the heap push order of the peer. This help solve the imbalance
problem caused by randomization when deal with same rank situation.
:param i: ith peer
:param j: jth peer
:return: True or False
"""
if self.peers[i].rank == self.peers[j].rank:
return self.peers[i].order < self.peers[j].order
return self.peers[i].rank < self.peers[j].rank | python | def lt(self, i, j):
"""Compare the priority of two peers.
Primary comparator will be the rank of each peer. If the ``rank`` is
same then compare the ``order``. The ``order`` attribute of the peer
tracks the heap push order of the peer. This help solve the imbalance
problem caused by randomization when deal with same rank situation.
:param i: ith peer
:param j: jth peer
:return: True or False
"""
if self.peers[i].rank == self.peers[j].rank:
return self.peers[i].order < self.peers[j].order
return self.peers[i].rank < self.peers[j].rank | [
"def",
"lt",
"(",
"self",
",",
"i",
",",
"j",
")",
":",
"if",
"self",
".",
"peers",
"[",
"i",
"]",
".",
"rank",
"==",
"self",
".",
"peers",
"[",
"j",
"]",
".",
"rank",
":",
"return",
"self",
".",
"peers",
"[",
"i",
"]",
".",
"order",
"<",
... | Compare the priority of two peers.
Primary comparator will be the rank of each peer. If the ``rank`` is
same then compare the ``order``. The ``order`` attribute of the peer
tracks the heap push order of the peer. This help solve the imbalance
problem caused by randomization when deal with same rank situation.
:param i: ith peer
:param j: jth peer
:return: True or False | [
"Compare",
"the",
"priority",
"of",
"two",
"peers",
"."
] | ee08cce6234f24fd2373774988186dd374306c43 | https://github.com/uber/tchannel-python/blob/ee08cce6234f24fd2373774988186dd374306c43/tchannel/peer_heap.py#L63-L78 | train | 36,670 |
uber/tchannel-python | tchannel/peer_heap.py | PeerHeap.push_peer | def push_peer(self, peer):
"""Push a new peer into the heap"""
self.order += 1
peer.order = self.order + random.randint(0, self.size())
heap.push(self, peer) | python | def push_peer(self, peer):
"""Push a new peer into the heap"""
self.order += 1
peer.order = self.order + random.randint(0, self.size())
heap.push(self, peer) | [
"def",
"push_peer",
"(",
"self",
",",
"peer",
")",
":",
"self",
".",
"order",
"+=",
"1",
"peer",
".",
"order",
"=",
"self",
".",
"order",
"+",
"random",
".",
"randint",
"(",
"0",
",",
"self",
".",
"size",
"(",
")",
")",
"heap",
".",
"push",
"("... | Push a new peer into the heap | [
"Push",
"a",
"new",
"peer",
"into",
"the",
"heap"
] | ee08cce6234f24fd2373774988186dd374306c43 | https://github.com/uber/tchannel-python/blob/ee08cce6234f24fd2373774988186dd374306c43/tchannel/peer_heap.py#L111-L116 | train | 36,671 |
uber/tchannel-python | tchannel/peer_heap.py | PeerHeap.add_and_shuffle | def add_and_shuffle(self, peer):
"""Push a new peer into the heap and shuffle the heap"""
self.push_peer(peer)
r = random.randint(0, self.size() - 1)
self.swap_order(peer.index, r) | python | def add_and_shuffle(self, peer):
"""Push a new peer into the heap and shuffle the heap"""
self.push_peer(peer)
r = random.randint(0, self.size() - 1)
self.swap_order(peer.index, r) | [
"def",
"add_and_shuffle",
"(",
"self",
",",
"peer",
")",
":",
"self",
".",
"push_peer",
"(",
"peer",
")",
"r",
"=",
"random",
".",
"randint",
"(",
"0",
",",
"self",
".",
"size",
"(",
")",
"-",
"1",
")",
"self",
".",
"swap_order",
"(",
"peer",
"."... | Push a new peer into the heap and shuffle the heap | [
"Push",
"a",
"new",
"peer",
"into",
"the",
"heap",
"and",
"shuffle",
"the",
"heap"
] | ee08cce6234f24fd2373774988186dd374306c43 | https://github.com/uber/tchannel-python/blob/ee08cce6234f24fd2373774988186dd374306c43/tchannel/peer_heap.py#L118-L123 | train | 36,672 |
uber/tchannel-python | tchannel/peer_heap.py | PeerHeap.remove_peer | def remove_peer(self, peer):
"""Remove the peer from the heap.
Return: removed peer if peer exists. If peer's index is out of range,
raise IndexError.
"""
if peer.index < 0 or peer.index >= self.size():
raise IndexError('Peer index is out of range')
assert peer is self.peers[peer.index], "peer is not in the heap"
return heap.remove(self, peer.index) | python | def remove_peer(self, peer):
"""Remove the peer from the heap.
Return: removed peer if peer exists. If peer's index is out of range,
raise IndexError.
"""
if peer.index < 0 or peer.index >= self.size():
raise IndexError('Peer index is out of range')
assert peer is self.peers[peer.index], "peer is not in the heap"
return heap.remove(self, peer.index) | [
"def",
"remove_peer",
"(",
"self",
",",
"peer",
")",
":",
"if",
"peer",
".",
"index",
"<",
"0",
"or",
"peer",
".",
"index",
">=",
"self",
".",
"size",
"(",
")",
":",
"raise",
"IndexError",
"(",
"'Peer index is out of range'",
")",
"assert",
"peer",
"is... | Remove the peer from the heap.
Return: removed peer if peer exists. If peer's index is out of range,
raise IndexError. | [
"Remove",
"the",
"peer",
"from",
"the",
"heap",
"."
] | ee08cce6234f24fd2373774988186dd374306c43 | https://github.com/uber/tchannel-python/blob/ee08cce6234f24fd2373774988186dd374306c43/tchannel/peer_heap.py#L136-L147 | train | 36,673 |
zamzterz/Flask-pyoidc | src/flask_pyoidc/pyoidc_facade.py | PyoidcFacade.token_request | def token_request(self, authorization_code):
"""
Makes a token request. If the 'token_endpoint' is not configured in the provider metadata, no request will
be made.
Args:
authorization_code (str): authorization code issued to client after user authorization
Returns:
Union[AccessTokenResponse, TokenErrorResponse, None]: The parsed token response, or None if no token
request was performed.
"""
if not self._client.token_endpoint:
return None
request = {
'grant_type': 'authorization_code',
'code': authorization_code,
'redirect_uri': self._redirect_uri
}
logger.debug('making token request: %s', request)
client_auth_method = self._client.registration_response.get('token_endpoint_auth_method', 'client_secret_basic')
auth_header = _ClientAuthentication(self._client.client_id, self._client.client_secret)(client_auth_method,
request)
resp = self._provider_configuration.requests_session \
.post(self._client.token_endpoint,
data=request,
headers=auth_header) \
.json()
logger.debug('received token response: %s', json.dumps(resp))
if 'error' in resp:
token_resp = TokenErrorResponse(**resp)
else:
token_resp = AccessTokenResponse(**resp)
token_resp.verify(keyjar=self._client.keyjar)
if 'id_token' in resp:
token_resp['id_token_jwt'] = resp['id_token']
return token_resp | python | def token_request(self, authorization_code):
"""
Makes a token request. If the 'token_endpoint' is not configured in the provider metadata, no request will
be made.
Args:
authorization_code (str): authorization code issued to client after user authorization
Returns:
Union[AccessTokenResponse, TokenErrorResponse, None]: The parsed token response, or None if no token
request was performed.
"""
if not self._client.token_endpoint:
return None
request = {
'grant_type': 'authorization_code',
'code': authorization_code,
'redirect_uri': self._redirect_uri
}
logger.debug('making token request: %s', request)
client_auth_method = self._client.registration_response.get('token_endpoint_auth_method', 'client_secret_basic')
auth_header = _ClientAuthentication(self._client.client_id, self._client.client_secret)(client_auth_method,
request)
resp = self._provider_configuration.requests_session \
.post(self._client.token_endpoint,
data=request,
headers=auth_header) \
.json()
logger.debug('received token response: %s', json.dumps(resp))
if 'error' in resp:
token_resp = TokenErrorResponse(**resp)
else:
token_resp = AccessTokenResponse(**resp)
token_resp.verify(keyjar=self._client.keyjar)
if 'id_token' in resp:
token_resp['id_token_jwt'] = resp['id_token']
return token_resp | [
"def",
"token_request",
"(",
"self",
",",
"authorization_code",
")",
":",
"if",
"not",
"self",
".",
"_client",
".",
"token_endpoint",
":",
"return",
"None",
"request",
"=",
"{",
"'grant_type'",
":",
"'authorization_code'",
",",
"'code'",
":",
"authorization_code... | Makes a token request. If the 'token_endpoint' is not configured in the provider metadata, no request will
be made.
Args:
authorization_code (str): authorization code issued to client after user authorization
Returns:
Union[AccessTokenResponse, TokenErrorResponse, None]: The parsed token response, or None if no token
request was performed. | [
"Makes",
"a",
"token",
"request",
".",
"If",
"the",
"token_endpoint",
"is",
"not",
"configured",
"in",
"the",
"provider",
"metadata",
"no",
"request",
"will",
"be",
"made",
"."
] | 0dba3ce8931fe5e039c66d7d645331bdbb52960a | https://github.com/zamzterz/Flask-pyoidc/blob/0dba3ce8931fe5e039c66d7d645331bdbb52960a/src/flask_pyoidc/pyoidc_facade.py#L100-L140 | train | 36,674 |
robotframework/Rammbock | src/Rammbock/robotbackgroundlogger.py | BackgroundLogger.log_background_messages | def log_background_messages(self, name=None):
"""Forwards messages logged on background to Robot Framework log.
By default forwards all messages logged by all threads, but can be
limited to a certain thread by passing thread's name as an argument.
Logged messages are removed from the message storage.
"""
with self.lock:
if name:
self._log_messages_by_thread(name)
else:
self._log_all_messages() | python | def log_background_messages(self, name=None):
"""Forwards messages logged on background to Robot Framework log.
By default forwards all messages logged by all threads, but can be
limited to a certain thread by passing thread's name as an argument.
Logged messages are removed from the message storage.
"""
with self.lock:
if name:
self._log_messages_by_thread(name)
else:
self._log_all_messages() | [
"def",
"log_background_messages",
"(",
"self",
",",
"name",
"=",
"None",
")",
":",
"with",
"self",
".",
"lock",
":",
"if",
"name",
":",
"self",
".",
"_log_messages_by_thread",
"(",
"name",
")",
"else",
":",
"self",
".",
"_log_all_messages",
"(",
")"
] | Forwards messages logged on background to Robot Framework log.
By default forwards all messages logged by all threads, but can be
limited to a certain thread by passing thread's name as an argument.
Logged messages are removed from the message storage. | [
"Forwards",
"messages",
"logged",
"on",
"background",
"to",
"Robot",
"Framework",
"log",
"."
] | c906058d055a6f7c68fe1a6096d78c2e3f642b1c | https://github.com/robotframework/Rammbock/blob/c906058d055a6f7c68fe1a6096d78c2e3f642b1c/src/Rammbock/robotbackgroundlogger.py#L59-L71 | train | 36,675 |
robotframework/Rammbock | src/Rammbock/core.py | RammbockCore.reset_rammbock | def reset_rammbock(self):
"""Closes all connections, deletes all servers, clients, and protocols.
You should call this method before exiting your test run. This will
close all the connections and the ports will therefore be available for
reuse faster.
"""
for client in self._clients:
client.close()
for server in self._servers:
server.close()
self._init_caches() | python | def reset_rammbock(self):
"""Closes all connections, deletes all servers, clients, and protocols.
You should call this method before exiting your test run. This will
close all the connections and the ports will therefore be available for
reuse faster.
"""
for client in self._clients:
client.close()
for server in self._servers:
server.close()
self._init_caches() | [
"def",
"reset_rammbock",
"(",
"self",
")",
":",
"for",
"client",
"in",
"self",
".",
"_clients",
":",
"client",
".",
"close",
"(",
")",
"for",
"server",
"in",
"self",
".",
"_servers",
":",
"server",
".",
"close",
"(",
")",
"self",
".",
"_init_caches",
... | Closes all connections, deletes all servers, clients, and protocols.
You should call this method before exiting your test run. This will
close all the connections and the ports will therefore be available for
reuse faster. | [
"Closes",
"all",
"connections",
"deletes",
"all",
"servers",
"clients",
"and",
"protocols",
"."
] | c906058d055a6f7c68fe1a6096d78c2e3f642b1c | https://github.com/robotframework/Rammbock/blob/c906058d055a6f7c68fe1a6096d78c2e3f642b1c/src/Rammbock/core.py#L136-L147 | train | 36,676 |
robotframework/Rammbock | src/Rammbock/core.py | RammbockCore.clear_message_streams | def clear_message_streams(self):
""" Resets streams and sockets of incoming messages.
You can use this method to reuse the same connections for several
consecutive test cases.
"""
for client in self._clients:
client.empty()
for server in self._servers:
server.empty() | python | def clear_message_streams(self):
""" Resets streams and sockets of incoming messages.
You can use this method to reuse the same connections for several
consecutive test cases.
"""
for client in self._clients:
client.empty()
for server in self._servers:
server.empty() | [
"def",
"clear_message_streams",
"(",
"self",
")",
":",
"for",
"client",
"in",
"self",
".",
"_clients",
":",
"client",
".",
"empty",
"(",
")",
"for",
"server",
"in",
"self",
".",
"_servers",
":",
"server",
".",
"empty",
"(",
")"
] | Resets streams and sockets of incoming messages.
You can use this method to reuse the same connections for several
consecutive test cases. | [
"Resets",
"streams",
"and",
"sockets",
"of",
"incoming",
"messages",
"."
] | c906058d055a6f7c68fe1a6096d78c2e3f642b1c | https://github.com/robotframework/Rammbock/blob/c906058d055a6f7c68fe1a6096d78c2e3f642b1c/src/Rammbock/core.py#L149-L158 | train | 36,677 |
robotframework/Rammbock | src/Rammbock/core.py | RammbockCore.new_protocol | def new_protocol(self, protocol_name):
"""Start defining a new protocol template.
All messages sent and received from a connection that uses a protocol
have to conform to this protocol template.
"""
if self._protocol_in_progress:
raise Exception('Can not start a new protocol definition in middle of old.')
if protocol_name in self._protocols:
raise Exception('Protocol %s already defined' % protocol_name)
self._init_new_message_stack(Protocol(protocol_name, library=self))
self._protocol_in_progress = True | python | def new_protocol(self, protocol_name):
"""Start defining a new protocol template.
All messages sent and received from a connection that uses a protocol
have to conform to this protocol template.
"""
if self._protocol_in_progress:
raise Exception('Can not start a new protocol definition in middle of old.')
if protocol_name in self._protocols:
raise Exception('Protocol %s already defined' % protocol_name)
self._init_new_message_stack(Protocol(protocol_name, library=self))
self._protocol_in_progress = True | [
"def",
"new_protocol",
"(",
"self",
",",
"protocol_name",
")",
":",
"if",
"self",
".",
"_protocol_in_progress",
":",
"raise",
"Exception",
"(",
"'Can not start a new protocol definition in middle of old.'",
")",
"if",
"protocol_name",
"in",
"self",
".",
"_protocols",
... | Start defining a new protocol template.
All messages sent and received from a connection that uses a protocol
have to conform to this protocol template. | [
"Start",
"defining",
"a",
"new",
"protocol",
"template",
"."
] | c906058d055a6f7c68fe1a6096d78c2e3f642b1c | https://github.com/robotframework/Rammbock/blob/c906058d055a6f7c68fe1a6096d78c2e3f642b1c/src/Rammbock/core.py#L160-L171 | train | 36,678 |
robotframework/Rammbock | src/Rammbock/core.py | RammbockCore.end_protocol | def end_protocol(self):
"""End protocol definition."""
protocol = self._get_message_template()
self._protocols[protocol.name] = protocol
self._protocol_in_progress = False | python | def end_protocol(self):
"""End protocol definition."""
protocol = self._get_message_template()
self._protocols[protocol.name] = protocol
self._protocol_in_progress = False | [
"def",
"end_protocol",
"(",
"self",
")",
":",
"protocol",
"=",
"self",
".",
"_get_message_template",
"(",
")",
"self",
".",
"_protocols",
"[",
"protocol",
".",
"name",
"]",
"=",
"protocol",
"self",
".",
"_protocol_in_progress",
"=",
"False"
] | End protocol definition. | [
"End",
"protocol",
"definition",
"."
] | c906058d055a6f7c68fe1a6096d78c2e3f642b1c | https://github.com/robotframework/Rammbock/blob/c906058d055a6f7c68fe1a6096d78c2e3f642b1c/src/Rammbock/core.py#L173-L177 | train | 36,679 |
robotframework/Rammbock | src/Rammbock/core.py | RammbockCore.start_udp_server | def start_udp_server(self, ip, port, name=None, timeout=None, protocol=None, family='ipv4'):
"""Starts a new UDP server to given `ip` and `port`.
Server can be given a `name`, default `timeout` and a `protocol`.
`family` can be either ipv4 (default) or ipv6.
Examples:
| Start UDP server | 10.10.10.2 | 53 |
| Start UDP server | 10.10.10.2 | 53 | Server1 |
| Start UDP server | 10.10.10.2 | 53 | name=Server1 | protocol=GTPV2 |
| Start UDP server | 10.10.10.2 | 53 | timeout=5 |
| Start UDP server | 0:0:0:0:0:0:0:1 | 53 | family=ipv6 |
"""
self._start_server(UDPServer, ip, port, name, timeout, protocol, family) | python | def start_udp_server(self, ip, port, name=None, timeout=None, protocol=None, family='ipv4'):
"""Starts a new UDP server to given `ip` and `port`.
Server can be given a `name`, default `timeout` and a `protocol`.
`family` can be either ipv4 (default) or ipv6.
Examples:
| Start UDP server | 10.10.10.2 | 53 |
| Start UDP server | 10.10.10.2 | 53 | Server1 |
| Start UDP server | 10.10.10.2 | 53 | name=Server1 | protocol=GTPV2 |
| Start UDP server | 10.10.10.2 | 53 | timeout=5 |
| Start UDP server | 0:0:0:0:0:0:0:1 | 53 | family=ipv6 |
"""
self._start_server(UDPServer, ip, port, name, timeout, protocol, family) | [
"def",
"start_udp_server",
"(",
"self",
",",
"ip",
",",
"port",
",",
"name",
"=",
"None",
",",
"timeout",
"=",
"None",
",",
"protocol",
"=",
"None",
",",
"family",
"=",
"'ipv4'",
")",
":",
"self",
".",
"_start_server",
"(",
"UDPServer",
",",
"ip",
",... | Starts a new UDP server to given `ip` and `port`.
Server can be given a `name`, default `timeout` and a `protocol`.
`family` can be either ipv4 (default) or ipv6.
Examples:
| Start UDP server | 10.10.10.2 | 53 |
| Start UDP server | 10.10.10.2 | 53 | Server1 |
| Start UDP server | 10.10.10.2 | 53 | name=Server1 | protocol=GTPV2 |
| Start UDP server | 10.10.10.2 | 53 | timeout=5 |
| Start UDP server | 0:0:0:0:0:0:0:1 | 53 | family=ipv6 | | [
"Starts",
"a",
"new",
"UDP",
"server",
"to",
"given",
"ip",
"and",
"port",
"."
] | c906058d055a6f7c68fe1a6096d78c2e3f642b1c | https://github.com/robotframework/Rammbock/blob/c906058d055a6f7c68fe1a6096d78c2e3f642b1c/src/Rammbock/core.py#L179-L192 | train | 36,680 |
robotframework/Rammbock | src/Rammbock/core.py | RammbockCore.start_tcp_server | def start_tcp_server(self, ip, port, name=None, timeout=None, protocol=None, family='ipv4'):
"""Starts a new TCP server to given `ip` and `port`.
Server can be given a `name`, default `timeout` and a `protocol`.
`family` can be either ipv4 (default) or ipv6. Notice that you have to
use `Accept Connection` keyword for server to receive connections.
Examples:
| Start TCP server | 10.10.10.2 | 53 |
| Start TCP server | 10.10.10.2 | 53 | Server1 |
| Start TCP server | 10.10.10.2 | 53 | name=Server1 | protocol=GTPV2 |
| Start TCP server | 10.10.10.2 | 53 | timeout=5 |
| Start TCP server | 0:0:0:0:0:0:0:1 | 53 | family=ipv6 |
"""
self._start_server(TCPServer, ip, port, name, timeout, protocol, family) | python | def start_tcp_server(self, ip, port, name=None, timeout=None, protocol=None, family='ipv4'):
"""Starts a new TCP server to given `ip` and `port`.
Server can be given a `name`, default `timeout` and a `protocol`.
`family` can be either ipv4 (default) or ipv6. Notice that you have to
use `Accept Connection` keyword for server to receive connections.
Examples:
| Start TCP server | 10.10.10.2 | 53 |
| Start TCP server | 10.10.10.2 | 53 | Server1 |
| Start TCP server | 10.10.10.2 | 53 | name=Server1 | protocol=GTPV2 |
| Start TCP server | 10.10.10.2 | 53 | timeout=5 |
| Start TCP server | 0:0:0:0:0:0:0:1 | 53 | family=ipv6 |
"""
self._start_server(TCPServer, ip, port, name, timeout, protocol, family) | [
"def",
"start_tcp_server",
"(",
"self",
",",
"ip",
",",
"port",
",",
"name",
"=",
"None",
",",
"timeout",
"=",
"None",
",",
"protocol",
"=",
"None",
",",
"family",
"=",
"'ipv4'",
")",
":",
"self",
".",
"_start_server",
"(",
"TCPServer",
",",
"ip",
",... | Starts a new TCP server to given `ip` and `port`.
Server can be given a `name`, default `timeout` and a `protocol`.
`family` can be either ipv4 (default) or ipv6. Notice that you have to
use `Accept Connection` keyword for server to receive connections.
Examples:
| Start TCP server | 10.10.10.2 | 53 |
| Start TCP server | 10.10.10.2 | 53 | Server1 |
| Start TCP server | 10.10.10.2 | 53 | name=Server1 | protocol=GTPV2 |
| Start TCP server | 10.10.10.2 | 53 | timeout=5 |
| Start TCP server | 0:0:0:0:0:0:0:1 | 53 | family=ipv6 | | [
"Starts",
"a",
"new",
"TCP",
"server",
"to",
"given",
"ip",
"and",
"port",
"."
] | c906058d055a6f7c68fe1a6096d78c2e3f642b1c | https://github.com/robotframework/Rammbock/blob/c906058d055a6f7c68fe1a6096d78c2e3f642b1c/src/Rammbock/core.py#L194-L208 | train | 36,681 |
robotframework/Rammbock | src/Rammbock/core.py | RammbockCore.start_sctp_server | def start_sctp_server(self, ip, port, name=None, timeout=None, protocol=None, family='ipv4'):
"""Starts a new STCP server to given `ip` and `port`.
`family` can be either ipv4 (default) or ipv6.
pysctp (https://github.com/philpraxis/pysctp) need to be installed your system.
Server can be given a `name`, default `timeout` and a `protocol`.
Notice that you have to use `Accept Connection` keyword for server to
receive connections.
Examples:
| Start STCP server | 10.10.10.2 | 53 |
| Start STCP server | 10.10.10.2 | 53 | Server1 |
| Start STCP server | 10.10.10.2 | 53 | name=Server1 | protocol=GTPV2 |
| Start STCP server | 10.10.10.2 | 53 | timeout=5 |
"""
self._start_server(SCTPServer, ip, port, name, timeout, protocol, family) | python | def start_sctp_server(self, ip, port, name=None, timeout=None, protocol=None, family='ipv4'):
"""Starts a new STCP server to given `ip` and `port`.
`family` can be either ipv4 (default) or ipv6.
pysctp (https://github.com/philpraxis/pysctp) need to be installed your system.
Server can be given a `name`, default `timeout` and a `protocol`.
Notice that you have to use `Accept Connection` keyword for server to
receive connections.
Examples:
| Start STCP server | 10.10.10.2 | 53 |
| Start STCP server | 10.10.10.2 | 53 | Server1 |
| Start STCP server | 10.10.10.2 | 53 | name=Server1 | protocol=GTPV2 |
| Start STCP server | 10.10.10.2 | 53 | timeout=5 |
"""
self._start_server(SCTPServer, ip, port, name, timeout, protocol, family) | [
"def",
"start_sctp_server",
"(",
"self",
",",
"ip",
",",
"port",
",",
"name",
"=",
"None",
",",
"timeout",
"=",
"None",
",",
"protocol",
"=",
"None",
",",
"family",
"=",
"'ipv4'",
")",
":",
"self",
".",
"_start_server",
"(",
"SCTPServer",
",",
"ip",
... | Starts a new STCP server to given `ip` and `port`.
`family` can be either ipv4 (default) or ipv6.
pysctp (https://github.com/philpraxis/pysctp) need to be installed your system.
Server can be given a `name`, default `timeout` and a `protocol`.
Notice that you have to use `Accept Connection` keyword for server to
receive connections.
Examples:
| Start STCP server | 10.10.10.2 | 53 |
| Start STCP server | 10.10.10.2 | 53 | Server1 |
| Start STCP server | 10.10.10.2 | 53 | name=Server1 | protocol=GTPV2 |
| Start STCP server | 10.10.10.2 | 53 | timeout=5 | | [
"Starts",
"a",
"new",
"STCP",
"server",
"to",
"given",
"ip",
"and",
"port",
"."
] | c906058d055a6f7c68fe1a6096d78c2e3f642b1c | https://github.com/robotframework/Rammbock/blob/c906058d055a6f7c68fe1a6096d78c2e3f642b1c/src/Rammbock/core.py#L210-L226 | train | 36,682 |
robotframework/Rammbock | src/Rammbock/core.py | RammbockCore.start_udp_client | def start_udp_client(self, ip=None, port=None, name=None, timeout=None, protocol=None, family='ipv4'):
"""Starts a new UDP client.
Client can be optionally given `ip` and `port` to bind to, as well as
`name`, default `timeout` and a `protocol`. `family` can be either
ipv4 (default) or ipv6.
You should use `Connect` keyword to connect client to a host.
Examples:
| Start UDP client |
| Start UDP client | name=Client1 | protocol=GTPV2 |
| Start UDP client | 10.10.10.2 | 53 | name=Server1 | protocol=GTPV2 |
| Start UDP client | timeout=5 |
| Start UDP client | 0:0:0:0:0:0:0:1 | 53 | family=ipv6 |
"""
self._start_client(UDPClient, ip, port, name, timeout, protocol, family) | python | def start_udp_client(self, ip=None, port=None, name=None, timeout=None, protocol=None, family='ipv4'):
"""Starts a new UDP client.
Client can be optionally given `ip` and `port` to bind to, as well as
`name`, default `timeout` and a `protocol`. `family` can be either
ipv4 (default) or ipv6.
You should use `Connect` keyword to connect client to a host.
Examples:
| Start UDP client |
| Start UDP client | name=Client1 | protocol=GTPV2 |
| Start UDP client | 10.10.10.2 | 53 | name=Server1 | protocol=GTPV2 |
| Start UDP client | timeout=5 |
| Start UDP client | 0:0:0:0:0:0:0:1 | 53 | family=ipv6 |
"""
self._start_client(UDPClient, ip, port, name, timeout, protocol, family) | [
"def",
"start_udp_client",
"(",
"self",
",",
"ip",
"=",
"None",
",",
"port",
"=",
"None",
",",
"name",
"=",
"None",
",",
"timeout",
"=",
"None",
",",
"protocol",
"=",
"None",
",",
"family",
"=",
"'ipv4'",
")",
":",
"self",
".",
"_start_client",
"(",
... | Starts a new UDP client.
Client can be optionally given `ip` and `port` to bind to, as well as
`name`, default `timeout` and a `protocol`. `family` can be either
ipv4 (default) or ipv6.
You should use `Connect` keyword to connect client to a host.
Examples:
| Start UDP client |
| Start UDP client | name=Client1 | protocol=GTPV2 |
| Start UDP client | 10.10.10.2 | 53 | name=Server1 | protocol=GTPV2 |
| Start UDP client | timeout=5 |
| Start UDP client | 0:0:0:0:0:0:0:1 | 53 | family=ipv6 | | [
"Starts",
"a",
"new",
"UDP",
"client",
"."
] | c906058d055a6f7c68fe1a6096d78c2e3f642b1c | https://github.com/robotframework/Rammbock/blob/c906058d055a6f7c68fe1a6096d78c2e3f642b1c/src/Rammbock/core.py#L233-L249 | train | 36,683 |
robotframework/Rammbock | src/Rammbock/core.py | RammbockCore.start_tcp_client | def start_tcp_client(self, ip=None, port=None, name=None, timeout=None, protocol=None, family='ipv4'):
"""Starts a new TCP client.
Client can be optionally given `ip` and `port` to bind to, as well as
`name`, default `timeout` and a `protocol`. `family` can be either
ipv4 (default) or ipv6.
You should use `Connect` keyword to connect client to a host.
Examples:
| Start TCP client |
| Start TCP client | name=Client1 | protocol=GTPV2 |
| Start TCP client | 10.10.10.2 | 53 | name=Server1 | protocol=GTPV2 |
| Start TCP client | timeout=5 |
| Start TCP client | 0:0:0:0:0:0:0:1 | 53 | family=ipv6 |
"""
self._start_client(TCPClient, ip, port, name, timeout, protocol, family) | python | def start_tcp_client(self, ip=None, port=None, name=None, timeout=None, protocol=None, family='ipv4'):
"""Starts a new TCP client.
Client can be optionally given `ip` and `port` to bind to, as well as
`name`, default `timeout` and a `protocol`. `family` can be either
ipv4 (default) or ipv6.
You should use `Connect` keyword to connect client to a host.
Examples:
| Start TCP client |
| Start TCP client | name=Client1 | protocol=GTPV2 |
| Start TCP client | 10.10.10.2 | 53 | name=Server1 | protocol=GTPV2 |
| Start TCP client | timeout=5 |
| Start TCP client | 0:0:0:0:0:0:0:1 | 53 | family=ipv6 |
"""
self._start_client(TCPClient, ip, port, name, timeout, protocol, family) | [
"def",
"start_tcp_client",
"(",
"self",
",",
"ip",
"=",
"None",
",",
"port",
"=",
"None",
",",
"name",
"=",
"None",
",",
"timeout",
"=",
"None",
",",
"protocol",
"=",
"None",
",",
"family",
"=",
"'ipv4'",
")",
":",
"self",
".",
"_start_client",
"(",
... | Starts a new TCP client.
Client can be optionally given `ip` and `port` to bind to, as well as
`name`, default `timeout` and a `protocol`. `family` can be either
ipv4 (default) or ipv6.
You should use `Connect` keyword to connect client to a host.
Examples:
| Start TCP client |
| Start TCP client | name=Client1 | protocol=GTPV2 |
| Start TCP client | 10.10.10.2 | 53 | name=Server1 | protocol=GTPV2 |
| Start TCP client | timeout=5 |
| Start TCP client | 0:0:0:0:0:0:0:1 | 53 | family=ipv6 | | [
"Starts",
"a",
"new",
"TCP",
"client",
"."
] | c906058d055a6f7c68fe1a6096d78c2e3f642b1c | https://github.com/robotframework/Rammbock/blob/c906058d055a6f7c68fe1a6096d78c2e3f642b1c/src/Rammbock/core.py#L251-L267 | train | 36,684 |
robotframework/Rammbock | src/Rammbock/core.py | RammbockCore.start_sctp_client | def start_sctp_client(self, ip=None, port=None, name=None, timeout=None, protocol=None, family='ipv4'):
"""Starts a new SCTP client.
Client can be optionally given `ip` and `port` to bind to, as well as
`name`, default `timeout` and a `protocol`. `family` can be either
ipv4 (default) or ipv6.
You should use `Connect` keyword to connect client to a host.
Examples:
| Start TCP client |
| Start TCP client | name=Client1 | protocol=GTPV2 |
| Start TCP client | 10.10.10.2 | 53 | name=Server1 | protocol=GTPV2 |
| Start TCP client | timeout=5 |
"""
self._start_client(SCTPClient, ip, port, name, timeout, protocol, family) | python | def start_sctp_client(self, ip=None, port=None, name=None, timeout=None, protocol=None, family='ipv4'):
"""Starts a new SCTP client.
Client can be optionally given `ip` and `port` to bind to, as well as
`name`, default `timeout` and a `protocol`. `family` can be either
ipv4 (default) or ipv6.
You should use `Connect` keyword to connect client to a host.
Examples:
| Start TCP client |
| Start TCP client | name=Client1 | protocol=GTPV2 |
| Start TCP client | 10.10.10.2 | 53 | name=Server1 | protocol=GTPV2 |
| Start TCP client | timeout=5 |
"""
self._start_client(SCTPClient, ip, port, name, timeout, protocol, family) | [
"def",
"start_sctp_client",
"(",
"self",
",",
"ip",
"=",
"None",
",",
"port",
"=",
"None",
",",
"name",
"=",
"None",
",",
"timeout",
"=",
"None",
",",
"protocol",
"=",
"None",
",",
"family",
"=",
"'ipv4'",
")",
":",
"self",
".",
"_start_client",
"(",... | Starts a new SCTP client.
Client can be optionally given `ip` and `port` to bind to, as well as
`name`, default `timeout` and a `protocol`. `family` can be either
ipv4 (default) or ipv6.
You should use `Connect` keyword to connect client to a host.
Examples:
| Start TCP client |
| Start TCP client | name=Client1 | protocol=GTPV2 |
| Start TCP client | 10.10.10.2 | 53 | name=Server1 | protocol=GTPV2 |
| Start TCP client | timeout=5 | | [
"Starts",
"a",
"new",
"SCTP",
"client",
"."
] | c906058d055a6f7c68fe1a6096d78c2e3f642b1c | https://github.com/robotframework/Rammbock/blob/c906058d055a6f7c68fe1a6096d78c2e3f642b1c/src/Rammbock/core.py#L269-L284 | train | 36,685 |
robotframework/Rammbock | src/Rammbock/core.py | RammbockCore.accept_connection | def accept_connection(self, name=None, alias=None, timeout=0):
"""Accepts a connection to server identified by `name` or the latest
server if `name` is empty.
If given an `alias`, the connection is named and can be later referenced
with that name.
If `timeout` is > 0, the connection times out after the time specified.
`timeout` defaults to 0 which will wait indefinitely.
Empty value or None will use socket default timeout.
Examples:
| Accept connection |
| Accept connection | Server1 | my_connection |
| Accept connection | Server1 | my_connection | timeout=5 |
"""
server = self._servers.get(name)
server.accept_connection(alias, timeout) | python | def accept_connection(self, name=None, alias=None, timeout=0):
"""Accepts a connection to server identified by `name` or the latest
server if `name` is empty.
If given an `alias`, the connection is named and can be later referenced
with that name.
If `timeout` is > 0, the connection times out after the time specified.
`timeout` defaults to 0 which will wait indefinitely.
Empty value or None will use socket default timeout.
Examples:
| Accept connection |
| Accept connection | Server1 | my_connection |
| Accept connection | Server1 | my_connection | timeout=5 |
"""
server = self._servers.get(name)
server.accept_connection(alias, timeout) | [
"def",
"accept_connection",
"(",
"self",
",",
"name",
"=",
"None",
",",
"alias",
"=",
"None",
",",
"timeout",
"=",
"0",
")",
":",
"server",
"=",
"self",
".",
"_servers",
".",
"get",
"(",
"name",
")",
"server",
".",
"accept_connection",
"(",
"alias",
... | Accepts a connection to server identified by `name` or the latest
server if `name` is empty.
If given an `alias`, the connection is named and can be later referenced
with that name.
If `timeout` is > 0, the connection times out after the time specified.
`timeout` defaults to 0 which will wait indefinitely.
Empty value or None will use socket default timeout.
Examples:
| Accept connection |
| Accept connection | Server1 | my_connection |
| Accept connection | Server1 | my_connection | timeout=5 | | [
"Accepts",
"a",
"connection",
"to",
"server",
"identified",
"by",
"name",
"or",
"the",
"latest",
"server",
"if",
"name",
"is",
"empty",
"."
] | c906058d055a6f7c68fe1a6096d78c2e3f642b1c | https://github.com/robotframework/Rammbock/blob/c906058d055a6f7c68fe1a6096d78c2e3f642b1c/src/Rammbock/core.py#L306-L323 | train | 36,686 |
robotframework/Rammbock | src/Rammbock/core.py | RammbockCore.connect | def connect(self, host, port, name=None):
"""Connects a client to given `host` and `port`. If client `name` is not
given then connects the latest client.
Examples:
| Connect | 127.0.0.1 | 8080 |
| Connect | 127.0.0.1 | 8080 | Client1 |
"""
client = self._clients.get(name)
client.connect_to(host, port) | python | def connect(self, host, port, name=None):
"""Connects a client to given `host` and `port`. If client `name` is not
given then connects the latest client.
Examples:
| Connect | 127.0.0.1 | 8080 |
| Connect | 127.0.0.1 | 8080 | Client1 |
"""
client = self._clients.get(name)
client.connect_to(host, port) | [
"def",
"connect",
"(",
"self",
",",
"host",
",",
"port",
",",
"name",
"=",
"None",
")",
":",
"client",
"=",
"self",
".",
"_clients",
".",
"get",
"(",
"name",
")",
"client",
".",
"connect_to",
"(",
"host",
",",
"port",
")"
] | Connects a client to given `host` and `port`. If client `name` is not
given then connects the latest client.
Examples:
| Connect | 127.0.0.1 | 8080 |
| Connect | 127.0.0.1 | 8080 | Client1 | | [
"Connects",
"a",
"client",
"to",
"given",
"host",
"and",
"port",
".",
"If",
"client",
"name",
"is",
"not",
"given",
"then",
"connects",
"the",
"latest",
"client",
"."
] | c906058d055a6f7c68fe1a6096d78c2e3f642b1c | https://github.com/robotframework/Rammbock/blob/c906058d055a6f7c68fe1a6096d78c2e3f642b1c/src/Rammbock/core.py#L325-L334 | train | 36,687 |
robotframework/Rammbock | src/Rammbock/core.py | RammbockCore.server_receives_binary_from | def server_receives_binary_from(self, name=None, timeout=None, connection=None, label=None):
"""Receive raw binary message. Returns message, ip, and port.
If server `name` is not given, uses the latest server. Optional message
`label` is shown on logs.
Examples:
| ${binary} | ${ip} | ${port} = | Server receives binary from |
| ${binary} | ${ip} | ${port} = | Server receives binary from | Server1 | connection=my_connection | timeout=5 |
"""
server, name = self._servers.get_with_name(name)
msg, ip, port = server.receive_from(timeout=timeout, alias=connection)
self._register_receive(server, label, name, connection=connection)
return msg, ip, port | python | def server_receives_binary_from(self, name=None, timeout=None, connection=None, label=None):
"""Receive raw binary message. Returns message, ip, and port.
If server `name` is not given, uses the latest server. Optional message
`label` is shown on logs.
Examples:
| ${binary} | ${ip} | ${port} = | Server receives binary from |
| ${binary} | ${ip} | ${port} = | Server receives binary from | Server1 | connection=my_connection | timeout=5 |
"""
server, name = self._servers.get_with_name(name)
msg, ip, port = server.receive_from(timeout=timeout, alias=connection)
self._register_receive(server, label, name, connection=connection)
return msg, ip, port | [
"def",
"server_receives_binary_from",
"(",
"self",
",",
"name",
"=",
"None",
",",
"timeout",
"=",
"None",
",",
"connection",
"=",
"None",
",",
"label",
"=",
"None",
")",
":",
"server",
",",
"name",
"=",
"self",
".",
"_servers",
".",
"get_with_name",
"(",... | Receive raw binary message. Returns message, ip, and port.
If server `name` is not given, uses the latest server. Optional message
`label` is shown on logs.
Examples:
| ${binary} | ${ip} | ${port} = | Server receives binary from |
| ${binary} | ${ip} | ${port} = | Server receives binary from | Server1 | connection=my_connection | timeout=5 | | [
"Receive",
"raw",
"binary",
"message",
".",
"Returns",
"message",
"ip",
"and",
"port",
"."
] | c906058d055a6f7c68fe1a6096d78c2e3f642b1c | https://github.com/robotframework/Rammbock/blob/c906058d055a6f7c68fe1a6096d78c2e3f642b1c/src/Rammbock/core.py#L401-L414 | train | 36,688 |
robotframework/Rammbock | src/Rammbock/core.py | RammbockCore.new_message | def new_message(self, message_name, protocol=None, *parameters):
"""Define a new message template with `message_name`.
`protocol` has to be defined earlier with `Start Protocol Description`.
Optional parameters are default values for message header separated with
colon.
Examples:
| New message | MyMessage | MyProtocol | header_field:value |
"""
proto = self._get_protocol(protocol)
if not proto:
raise Exception("Protocol not defined! Please define a protocol before creating a message!")
if self._protocol_in_progress:
raise Exception("Protocol definition in progress. Please finish it before starting to define a message.")
configs, fields, header_fields = self._parse_parameters(parameters)
self._raise_error_if_configs_or_fields(configs, fields, 'New message')
self._init_new_message_stack(MessageTemplate(message_name, proto, header_fields)) | python | def new_message(self, message_name, protocol=None, *parameters):
"""Define a new message template with `message_name`.
`protocol` has to be defined earlier with `Start Protocol Description`.
Optional parameters are default values for message header separated with
colon.
Examples:
| New message | MyMessage | MyProtocol | header_field:value |
"""
proto = self._get_protocol(protocol)
if not proto:
raise Exception("Protocol not defined! Please define a protocol before creating a message!")
if self._protocol_in_progress:
raise Exception("Protocol definition in progress. Please finish it before starting to define a message.")
configs, fields, header_fields = self._parse_parameters(parameters)
self._raise_error_if_configs_or_fields(configs, fields, 'New message')
self._init_new_message_stack(MessageTemplate(message_name, proto, header_fields)) | [
"def",
"new_message",
"(",
"self",
",",
"message_name",
",",
"protocol",
"=",
"None",
",",
"*",
"parameters",
")",
":",
"proto",
"=",
"self",
".",
"_get_protocol",
"(",
"protocol",
")",
"if",
"not",
"proto",
":",
"raise",
"Exception",
"(",
"\"Protocol not ... | Define a new message template with `message_name`.
`protocol` has to be defined earlier with `Start Protocol Description`.
Optional parameters are default values for message header separated with
colon.
Examples:
| New message | MyMessage | MyProtocol | header_field:value | | [
"Define",
"a",
"new",
"message",
"template",
"with",
"message_name",
"."
] | c906058d055a6f7c68fe1a6096d78c2e3f642b1c | https://github.com/robotframework/Rammbock/blob/c906058d055a6f7c68fe1a6096d78c2e3f642b1c/src/Rammbock/core.py#L421-L438 | train | 36,689 |
robotframework/Rammbock | src/Rammbock/core.py | RammbockCore.save_template | def save_template(self, name, unlocked=False):
"""Save a message template for later use with `Load template`.
If saved template is marked as unlocked, then changes can be made to it
afterwards. By default tempaltes are locked.
Examples:
| Save Template | MyMessage |
| Save Template | MyOtherMessage | unlocked=True |
"""
if isinstance(unlocked, basestring):
unlocked = unlocked.lower() != 'false'
template = self._get_message_template()
if not unlocked:
template.set_as_saved()
self._message_templates[name] = (template, self._field_values) | python | def save_template(self, name, unlocked=False):
"""Save a message template for later use with `Load template`.
If saved template is marked as unlocked, then changes can be made to it
afterwards. By default tempaltes are locked.
Examples:
| Save Template | MyMessage |
| Save Template | MyOtherMessage | unlocked=True |
"""
if isinstance(unlocked, basestring):
unlocked = unlocked.lower() != 'false'
template = self._get_message_template()
if not unlocked:
template.set_as_saved()
self._message_templates[name] = (template, self._field_values) | [
"def",
"save_template",
"(",
"self",
",",
"name",
",",
"unlocked",
"=",
"False",
")",
":",
"if",
"isinstance",
"(",
"unlocked",
",",
"basestring",
")",
":",
"unlocked",
"=",
"unlocked",
".",
"lower",
"(",
")",
"!=",
"'false'",
"template",
"=",
"self",
... | Save a message template for later use with `Load template`.
If saved template is marked as unlocked, then changes can be made to it
afterwards. By default tempaltes are locked.
Examples:
| Save Template | MyMessage |
| Save Template | MyOtherMessage | unlocked=True | | [
"Save",
"a",
"message",
"template",
"for",
"later",
"use",
"with",
"Load",
"template",
"."
] | c906058d055a6f7c68fe1a6096d78c2e3f642b1c | https://github.com/robotframework/Rammbock/blob/c906058d055a6f7c68fe1a6096d78c2e3f642b1c/src/Rammbock/core.py#L444-L459 | train | 36,690 |
robotframework/Rammbock | src/Rammbock/core.py | RammbockCore.load_template | def load_template(self, name, *parameters):
"""Load a message template saved with `Save template`.
Optional parameters are default values for message header separated with
colon.
Examples:
| Load Template | MyMessage | header_field:value |
"""
template, fields, header_fields = self._set_templates_fields_and_header_fields(name, parameters)
self._init_new_message_stack(template, fields, header_fields) | python | def load_template(self, name, *parameters):
"""Load a message template saved with `Save template`.
Optional parameters are default values for message header separated with
colon.
Examples:
| Load Template | MyMessage | header_field:value |
"""
template, fields, header_fields = self._set_templates_fields_and_header_fields(name, parameters)
self._init_new_message_stack(template, fields, header_fields) | [
"def",
"load_template",
"(",
"self",
",",
"name",
",",
"*",
"parameters",
")",
":",
"template",
",",
"fields",
",",
"header_fields",
"=",
"self",
".",
"_set_templates_fields_and_header_fields",
"(",
"name",
",",
"parameters",
")",
"self",
".",
"_init_new_message... | Load a message template saved with `Save template`.
Optional parameters are default values for message header separated with
colon.
Examples:
| Load Template | MyMessage | header_field:value | | [
"Load",
"a",
"message",
"template",
"saved",
"with",
"Save",
"template",
".",
"Optional",
"parameters",
"are",
"default",
"values",
"for",
"message",
"header",
"separated",
"with",
"colon",
"."
] | c906058d055a6f7c68fe1a6096d78c2e3f642b1c | https://github.com/robotframework/Rammbock/blob/c906058d055a6f7c68fe1a6096d78c2e3f642b1c/src/Rammbock/core.py#L461-L470 | train | 36,691 |
robotframework/Rammbock | src/Rammbock/core.py | RammbockCore.load_copy_of_template | def load_copy_of_template(self, name, *parameters):
"""Load a copy of message template saved with `Save template` when originally saved values need to be preserved
from test to test.
Optional parameters are default values for message header separated with
colon.
Examples:
| Load Copy Of Template | MyMessage | header_field:value |
"""
template, fields, header_fields = self._set_templates_fields_and_header_fields(name, parameters)
copy_of_template = copy.deepcopy(template)
copy_of_fields = copy.deepcopy(fields)
self._init_new_message_stack(copy_of_template, copy_of_fields, header_fields) | python | def load_copy_of_template(self, name, *parameters):
"""Load a copy of message template saved with `Save template` when originally saved values need to be preserved
from test to test.
Optional parameters are default values for message header separated with
colon.
Examples:
| Load Copy Of Template | MyMessage | header_field:value |
"""
template, fields, header_fields = self._set_templates_fields_and_header_fields(name, parameters)
copy_of_template = copy.deepcopy(template)
copy_of_fields = copy.deepcopy(fields)
self._init_new_message_stack(copy_of_template, copy_of_fields, header_fields) | [
"def",
"load_copy_of_template",
"(",
"self",
",",
"name",
",",
"*",
"parameters",
")",
":",
"template",
",",
"fields",
",",
"header_fields",
"=",
"self",
".",
"_set_templates_fields_and_header_fields",
"(",
"name",
",",
"parameters",
")",
"copy_of_template",
"=",
... | Load a copy of message template saved with `Save template` when originally saved values need to be preserved
from test to test.
Optional parameters are default values for message header separated with
colon.
Examples:
| Load Copy Of Template | MyMessage | header_field:value | | [
"Load",
"a",
"copy",
"of",
"message",
"template",
"saved",
"with",
"Save",
"template",
"when",
"originally",
"saved",
"values",
"need",
"to",
"be",
"preserved",
"from",
"test",
"to",
"test",
".",
"Optional",
"parameters",
"are",
"default",
"values",
"for",
"... | c906058d055a6f7c68fe1a6096d78c2e3f642b1c | https://github.com/robotframework/Rammbock/blob/c906058d055a6f7c68fe1a6096d78c2e3f642b1c/src/Rammbock/core.py#L472-L484 | train | 36,692 |
robotframework/Rammbock | src/Rammbock/core.py | RammbockCore.get_message | def get_message(self, *parameters):
"""Get encoded message.
* Send Message -keywords are convenience methods, that will call this to
get the message object and then send it. Optional parameters are message
field values separated with colon.
Examples:
| ${msg} = | Get message |
| ${msg} = | Get message | field_name:value |
"""
_, message_fields, header_fields = self._get_parameters_with_defaults(parameters)
return self._encode_message(message_fields, header_fields) | python | def get_message(self, *parameters):
"""Get encoded message.
* Send Message -keywords are convenience methods, that will call this to
get the message object and then send it. Optional parameters are message
field values separated with colon.
Examples:
| ${msg} = | Get message |
| ${msg} = | Get message | field_name:value |
"""
_, message_fields, header_fields = self._get_parameters_with_defaults(parameters)
return self._encode_message(message_fields, header_fields) | [
"def",
"get_message",
"(",
"self",
",",
"*",
"parameters",
")",
":",
"_",
",",
"message_fields",
",",
"header_fields",
"=",
"self",
".",
"_get_parameters_with_defaults",
"(",
"parameters",
")",
"return",
"self",
".",
"_encode_message",
"(",
"message_fields",
","... | Get encoded message.
* Send Message -keywords are convenience methods, that will call this to
get the message object and then send it. Optional parameters are message
field values separated with colon.
Examples:
| ${msg} = | Get message |
| ${msg} = | Get message | field_name:value | | [
"Get",
"encoded",
"message",
"."
] | c906058d055a6f7c68fe1a6096d78c2e3f642b1c | https://github.com/robotframework/Rammbock/blob/c906058d055a6f7c68fe1a6096d78c2e3f642b1c/src/Rammbock/core.py#L492-L504 | train | 36,693 |
robotframework/Rammbock | src/Rammbock/core.py | RammbockCore.validate_message | def validate_message(self, msg, *parameters):
"""Validates given message using template defined with `New Message` and
field values given as optional arguments.
Examples:
| Validate message | ${msg} |
| Validate message | ${msg} | status:0 |
"""
_, message_fields, header_fields = self._get_parameters_with_defaults(parameters)
self._validate_message(msg, message_fields, header_fields) | python | def validate_message(self, msg, *parameters):
"""Validates given message using template defined with `New Message` and
field values given as optional arguments.
Examples:
| Validate message | ${msg} |
| Validate message | ${msg} | status:0 |
"""
_, message_fields, header_fields = self._get_parameters_with_defaults(parameters)
self._validate_message(msg, message_fields, header_fields) | [
"def",
"validate_message",
"(",
"self",
",",
"msg",
",",
"*",
"parameters",
")",
":",
"_",
",",
"message_fields",
",",
"header_fields",
"=",
"self",
".",
"_get_parameters_with_defaults",
"(",
"parameters",
")",
"self",
".",
"_validate_message",
"(",
"msg",
","... | Validates given message using template defined with `New Message` and
field values given as optional arguments.
Examples:
| Validate message | ${msg} |
| Validate message | ${msg} | status:0 | | [
"Validates",
"given",
"message",
"using",
"template",
"defined",
"with",
"New",
"Message",
"and",
"field",
"values",
"given",
"as",
"optional",
"arguments",
"."
] | c906058d055a6f7c68fe1a6096d78c2e3f642b1c | https://github.com/robotframework/Rammbock/blob/c906058d055a6f7c68fe1a6096d78c2e3f642b1c/src/Rammbock/core.py#L637-L646 | train | 36,694 |
robotframework/Rammbock | src/Rammbock/core.py | RammbockCore.uint | def uint(self, length, name, value=None, align=None):
"""Add an unsigned integer to template.
`length` is given in bytes and `value` is optional. `align` can be used
to align the field to longer byte length.
Examples:
| uint | 2 | foo |
| uint | 2 | foo | 42 |
| uint | 2 | fourByteFoo | 42 | align=4 |
"""
self._add_field(UInt(length, name, value, align=align)) | python | def uint(self, length, name, value=None, align=None):
"""Add an unsigned integer to template.
`length` is given in bytes and `value` is optional. `align` can be used
to align the field to longer byte length.
Examples:
| uint | 2 | foo |
| uint | 2 | foo | 42 |
| uint | 2 | fourByteFoo | 42 | align=4 |
"""
self._add_field(UInt(length, name, value, align=align)) | [
"def",
"uint",
"(",
"self",
",",
"length",
",",
"name",
",",
"value",
"=",
"None",
",",
"align",
"=",
"None",
")",
":",
"self",
".",
"_add_field",
"(",
"UInt",
"(",
"length",
",",
"name",
",",
"value",
",",
"align",
"=",
"align",
")",
")"
] | Add an unsigned integer to template.
`length` is given in bytes and `value` is optional. `align` can be used
to align the field to longer byte length.
Examples:
| uint | 2 | foo |
| uint | 2 | foo | 42 |
| uint | 2 | fourByteFoo | 42 | align=4 | | [
"Add",
"an",
"unsigned",
"integer",
"to",
"template",
"."
] | c906058d055a6f7c68fe1a6096d78c2e3f642b1c | https://github.com/robotframework/Rammbock/blob/c906058d055a6f7c68fe1a6096d78c2e3f642b1c/src/Rammbock/core.py#L668-L679 | train | 36,695 |
robotframework/Rammbock | src/Rammbock/core.py | RammbockCore.int | def int(self, length, name, value=None, align=None):
"""Add an signed integer to template.
`length` is given in bytes and `value` is optional. `align` can be used
to align the field to longer byte length.
Signed integer uses twos-complement with bits numbered in big-endian.
Examples:
| int | 2 | foo |
| int | 2 | foo | 42 |
| int | 2 | fourByteFoo | 42 | align=4 |
"""
self._add_field(Int(length, name, value, align=align)) | python | def int(self, length, name, value=None, align=None):
"""Add an signed integer to template.
`length` is given in bytes and `value` is optional. `align` can be used
to align the field to longer byte length.
Signed integer uses twos-complement with bits numbered in big-endian.
Examples:
| int | 2 | foo |
| int | 2 | foo | 42 |
| int | 2 | fourByteFoo | 42 | align=4 |
"""
self._add_field(Int(length, name, value, align=align)) | [
"def",
"int",
"(",
"self",
",",
"length",
",",
"name",
",",
"value",
"=",
"None",
",",
"align",
"=",
"None",
")",
":",
"self",
".",
"_add_field",
"(",
"Int",
"(",
"length",
",",
"name",
",",
"value",
",",
"align",
"=",
"align",
")",
")"
] | Add an signed integer to template.
`length` is given in bytes and `value` is optional. `align` can be used
to align the field to longer byte length.
Signed integer uses twos-complement with bits numbered in big-endian.
Examples:
| int | 2 | foo |
| int | 2 | foo | 42 |
| int | 2 | fourByteFoo | 42 | align=4 | | [
"Add",
"an",
"signed",
"integer",
"to",
"template",
"."
] | c906058d055a6f7c68fe1a6096d78c2e3f642b1c | https://github.com/robotframework/Rammbock/blob/c906058d055a6f7c68fe1a6096d78c2e3f642b1c/src/Rammbock/core.py#L681-L693 | train | 36,696 |
robotframework/Rammbock | src/Rammbock/core.py | RammbockCore.chars | def chars(self, length, name, value=None, terminator=None):
"""Add a char array to template.
`length` is given in bytes and can refer to earlier numeric fields in
template. Special value '*' in length means that length is encoded to
length of value and decoded as all available bytes.
`value` is optional.
`value` could be either a "String" or a "Regular Expression" and
if it is a Regular Expression it must be prefixed by 'REGEXP:'.
Examples:
| chars | 16 | field | Hello World! |
| u8 | charLength |
| chars | charLength | field |
| chars | * | field | Hello World! |
| chars | * | field | REGEXP:^{[a-zA-Z ]+}$ |
"""
self._add_field(Char(length, name, value, terminator)) | python | def chars(self, length, name, value=None, terminator=None):
"""Add a char array to template.
`length` is given in bytes and can refer to earlier numeric fields in
template. Special value '*' in length means that length is encoded to
length of value and decoded as all available bytes.
`value` is optional.
`value` could be either a "String" or a "Regular Expression" and
if it is a Regular Expression it must be prefixed by 'REGEXP:'.
Examples:
| chars | 16 | field | Hello World! |
| u8 | charLength |
| chars | charLength | field |
| chars | * | field | Hello World! |
| chars | * | field | REGEXP:^{[a-zA-Z ]+}$ |
"""
self._add_field(Char(length, name, value, terminator)) | [
"def",
"chars",
"(",
"self",
",",
"length",
",",
"name",
",",
"value",
"=",
"None",
",",
"terminator",
"=",
"None",
")",
":",
"self",
".",
"_add_field",
"(",
"Char",
"(",
"length",
",",
"name",
",",
"value",
",",
"terminator",
")",
")"
] | Add a char array to template.
`length` is given in bytes and can refer to earlier numeric fields in
template. Special value '*' in length means that length is encoded to
length of value and decoded as all available bytes.
`value` is optional.
`value` could be either a "String" or a "Regular Expression" and
if it is a Regular Expression it must be prefixed by 'REGEXP:'.
Examples:
| chars | 16 | field | Hello World! |
| u8 | charLength |
| chars | charLength | field |
| chars | * | field | Hello World! |
| chars | * | field | REGEXP:^{[a-zA-Z ]+}$ | | [
"Add",
"a",
"char",
"array",
"to",
"template",
"."
] | c906058d055a6f7c68fe1a6096d78c2e3f642b1c | https://github.com/robotframework/Rammbock/blob/c906058d055a6f7c68fe1a6096d78c2e3f642b1c/src/Rammbock/core.py#L695-L716 | train | 36,697 |
robotframework/Rammbock | src/Rammbock/core.py | RammbockCore.new_struct | def new_struct(self, type, name, *parameters):
"""Defines a new struct to template.
You must call `End Struct` to end struct definition. `type` is the name
for generic type and `name` is the field name in containing structure.
Possible parameters are values for struct fields separated with colon
and optional struct length defined with `length=`. Length can be used in
receiveing to validate that struct matches predfeined length. When
sending, the struct length can refer to other message field which will
then be set dynamically.
Examples:
| New struct | Pair | myPair |
| u8 | first |
| u8 | second |
| End Struct |
"""
configs, parameters, _ = self._get_parameters_with_defaults(parameters)
self._add_struct_name_to_params(name, parameters)
self._message_stack.append(StructTemplate(type, name, self._current_container, parameters, length=configs.get('length'), align=configs.get('align'))) | python | def new_struct(self, type, name, *parameters):
"""Defines a new struct to template.
You must call `End Struct` to end struct definition. `type` is the name
for generic type and `name` is the field name in containing structure.
Possible parameters are values for struct fields separated with colon
and optional struct length defined with `length=`. Length can be used in
receiveing to validate that struct matches predfeined length. When
sending, the struct length can refer to other message field which will
then be set dynamically.
Examples:
| New struct | Pair | myPair |
| u8 | first |
| u8 | second |
| End Struct |
"""
configs, parameters, _ = self._get_parameters_with_defaults(parameters)
self._add_struct_name_to_params(name, parameters)
self._message_stack.append(StructTemplate(type, name, self._current_container, parameters, length=configs.get('length'), align=configs.get('align'))) | [
"def",
"new_struct",
"(",
"self",
",",
"type",
",",
"name",
",",
"*",
"parameters",
")",
":",
"configs",
",",
"parameters",
",",
"_",
"=",
"self",
".",
"_get_parameters_with_defaults",
"(",
"parameters",
")",
"self",
".",
"_add_struct_name_to_params",
"(",
"... | Defines a new struct to template.
You must call `End Struct` to end struct definition. `type` is the name
for generic type and `name` is the field name in containing structure.
Possible parameters are values for struct fields separated with colon
and optional struct length defined with `length=`. Length can be used in
receiveing to validate that struct matches predfeined length. When
sending, the struct length can refer to other message field which will
then be set dynamically.
Examples:
| New struct | Pair | myPair |
| u8 | first |
| u8 | second |
| End Struct | | [
"Defines",
"a",
"new",
"struct",
"to",
"template",
"."
] | c906058d055a6f7c68fe1a6096d78c2e3f642b1c | https://github.com/robotframework/Rammbock/blob/c906058d055a6f7c68fe1a6096d78c2e3f642b1c/src/Rammbock/core.py#L723-L742 | train | 36,698 |
robotframework/Rammbock | src/Rammbock/core.py | RammbockCore._new_list | def _new_list(self, size, name):
"""Defines a new list to template of `size` and with `name`.
List type must be given after this keyword by defining one field. Then
the list definition has to be closed using `End List`.
Special value '*' in size means that list will decode values as long as
data is available. This free length value is not supported on encoding.
Examples:
| New list | 5 | myIntList |
| u16 |
| End List |
| u8 | listLength |
| New list | listLength | myIntList |
| u16 |
| End List |
| New list | * | myIntList |
| u16 |
| End List |
"""
self._message_stack.append(ListTemplate(size, name, self._current_container)) | python | def _new_list(self, size, name):
"""Defines a new list to template of `size` and with `name`.
List type must be given after this keyword by defining one field. Then
the list definition has to be closed using `End List`.
Special value '*' in size means that list will decode values as long as
data is available. This free length value is not supported on encoding.
Examples:
| New list | 5 | myIntList |
| u16 |
| End List |
| u8 | listLength |
| New list | listLength | myIntList |
| u16 |
| End List |
| New list | * | myIntList |
| u16 |
| End List |
"""
self._message_stack.append(ListTemplate(size, name, self._current_container)) | [
"def",
"_new_list",
"(",
"self",
",",
"size",
",",
"name",
")",
":",
"self",
".",
"_message_stack",
".",
"append",
"(",
"ListTemplate",
"(",
"size",
",",
"name",
",",
"self",
".",
"_current_container",
")",
")"
] | Defines a new list to template of `size` and with `name`.
List type must be given after this keyword by defining one field. Then
the list definition has to be closed using `End List`.
Special value '*' in size means that list will decode values as long as
data is available. This free length value is not supported on encoding.
Examples:
| New list | 5 | myIntList |
| u16 |
| End List |
| u8 | listLength |
| New list | listLength | myIntList |
| u16 |
| End List |
| New list | * | myIntList |
| u16 |
| End List | | [
"Defines",
"a",
"new",
"list",
"to",
"template",
"of",
"size",
"and",
"with",
"name",
"."
] | c906058d055a6f7c68fe1a6096d78c2e3f642b1c | https://github.com/robotframework/Rammbock/blob/c906058d055a6f7c68fe1a6096d78c2e3f642b1c/src/Rammbock/core.py#L753-L776 | train | 36,699 |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.