repo
stringlengths 7
55
| path
stringlengths 4
223
| func_name
stringlengths 1
134
| original_string
stringlengths 75
104k
| language
stringclasses 1
value | code
stringlengths 75
104k
| code_tokens
listlengths 19
28.4k
| docstring
stringlengths 1
46.9k
| docstring_tokens
listlengths 1
1.97k
| sha
stringlengths 40
40
| url
stringlengths 87
315
| partition
stringclasses 1
value |
|---|---|---|---|---|---|---|---|---|---|---|---|
neo4j-drivers/neobolt
|
neobolt/impl/python/direct.py
|
Connection._append
|
def _append(self, signature, fields=(), response=None):
""" Add a message to the outgoing queue.
:arg signature: the signature of the message
:arg fields: the fields of the message as a tuple
:arg response: a response object to handle callbacks
"""
self.packer.pack_struct(signature, fields)
self.output_buffer.chunk()
self.output_buffer.chunk()
self.responses.append(response)
|
python
|
def _append(self, signature, fields=(), response=None):
""" Add a message to the outgoing queue.
:arg signature: the signature of the message
:arg fields: the fields of the message as a tuple
:arg response: a response object to handle callbacks
"""
self.packer.pack_struct(signature, fields)
self.output_buffer.chunk()
self.output_buffer.chunk()
self.responses.append(response)
|
[
"def",
"_append",
"(",
"self",
",",
"signature",
",",
"fields",
"=",
"(",
")",
",",
"response",
"=",
"None",
")",
":",
"self",
".",
"packer",
".",
"pack_struct",
"(",
"signature",
",",
"fields",
")",
"self",
".",
"output_buffer",
".",
"chunk",
"(",
")",
"self",
".",
"output_buffer",
".",
"chunk",
"(",
")",
"self",
".",
"responses",
".",
"append",
"(",
"response",
")"
] |
Add a message to the outgoing queue.
:arg signature: the signature of the message
:arg fields: the fields of the message as a tuple
:arg response: a response object to handle callbacks
|
[
"Add",
"a",
"message",
"to",
"the",
"outgoing",
"queue",
"."
] |
724569d76e85777c4f5e30e8d0a18116bda4d8cd
|
https://github.com/neo4j-drivers/neobolt/blob/724569d76e85777c4f5e30e8d0a18116bda4d8cd/neobolt/impl/python/direct.py#L288-L298
|
train
|
neo4j-drivers/neobolt
|
neobolt/impl/python/direct.py
|
Connection.reset
|
def reset(self):
""" Add a RESET message to the outgoing queue, send
it and consume all remaining messages.
"""
def fail(metadata):
raise ProtocolError("RESET failed %r" % metadata)
log_debug("[#%04X] C: RESET", self.local_port)
self._append(b"\x0F", response=Response(self, on_failure=fail))
self.sync()
|
python
|
def reset(self):
""" Add a RESET message to the outgoing queue, send
it and consume all remaining messages.
"""
def fail(metadata):
raise ProtocolError("RESET failed %r" % metadata)
log_debug("[#%04X] C: RESET", self.local_port)
self._append(b"\x0F", response=Response(self, on_failure=fail))
self.sync()
|
[
"def",
"reset",
"(",
"self",
")",
":",
"def",
"fail",
"(",
"metadata",
")",
":",
"raise",
"ProtocolError",
"(",
"\"RESET failed %r\"",
"%",
"metadata",
")",
"log_debug",
"(",
"\"[#%04X] C: RESET\"",
",",
"self",
".",
"local_port",
")",
"self",
".",
"_append",
"(",
"b\"\\x0F\"",
",",
"response",
"=",
"Response",
"(",
"self",
",",
"on_failure",
"=",
"fail",
")",
")",
"self",
".",
"sync",
"(",
")"
] |
Add a RESET message to the outgoing queue, send
it and consume all remaining messages.
|
[
"Add",
"a",
"RESET",
"message",
"to",
"the",
"outgoing",
"queue",
"send",
"it",
"and",
"consume",
"all",
"remaining",
"messages",
"."
] |
724569d76e85777c4f5e30e8d0a18116bda4d8cd
|
https://github.com/neo4j-drivers/neobolt/blob/724569d76e85777c4f5e30e8d0a18116bda4d8cd/neobolt/impl/python/direct.py#L300-L310
|
train
|
neo4j-drivers/neobolt
|
neobolt/impl/python/direct.py
|
Connection._send
|
def _send(self):
""" Send all queued messages to the server.
"""
data = self.output_buffer.view()
if not data:
return
if self.closed():
raise self.Error("Failed to write to closed connection {!r}".format(self.server.address))
if self.defunct():
raise self.Error("Failed to write to defunct connection {!r}".format(self.server.address))
self.socket.sendall(data)
self.output_buffer.clear()
|
python
|
def _send(self):
""" Send all queued messages to the server.
"""
data = self.output_buffer.view()
if not data:
return
if self.closed():
raise self.Error("Failed to write to closed connection {!r}".format(self.server.address))
if self.defunct():
raise self.Error("Failed to write to defunct connection {!r}".format(self.server.address))
self.socket.sendall(data)
self.output_buffer.clear()
|
[
"def",
"_send",
"(",
"self",
")",
":",
"data",
"=",
"self",
".",
"output_buffer",
".",
"view",
"(",
")",
"if",
"not",
"data",
":",
"return",
"if",
"self",
".",
"closed",
"(",
")",
":",
"raise",
"self",
".",
"Error",
"(",
"\"Failed to write to closed connection {!r}\"",
".",
"format",
"(",
"self",
".",
"server",
".",
"address",
")",
")",
"if",
"self",
".",
"defunct",
"(",
")",
":",
"raise",
"self",
".",
"Error",
"(",
"\"Failed to write to defunct connection {!r}\"",
".",
"format",
"(",
"self",
".",
"server",
".",
"address",
")",
")",
"self",
".",
"socket",
".",
"sendall",
"(",
"data",
")",
"self",
".",
"output_buffer",
".",
"clear",
"(",
")"
] |
Send all queued messages to the server.
|
[
"Send",
"all",
"queued",
"messages",
"to",
"the",
"server",
"."
] |
724569d76e85777c4f5e30e8d0a18116bda4d8cd
|
https://github.com/neo4j-drivers/neobolt/blob/724569d76e85777c4f5e30e8d0a18116bda4d8cd/neobolt/impl/python/direct.py#L320-L331
|
train
|
neo4j-drivers/neobolt
|
neobolt/impl/python/direct.py
|
Connection._fetch
|
def _fetch(self):
""" Receive at least one message from the server, if available.
:return: 2-tuple of number of detail messages and number of summary messages fetched
"""
if self.closed():
raise self.Error("Failed to read from closed connection {!r}".format(self.server.address))
if self.defunct():
raise self.Error("Failed to read from defunct connection {!r}".format(self.server.address))
if not self.responses:
return 0, 0
self._receive()
details, summary_signature, summary_metadata = self._unpack()
if details:
log_debug("[#%04X] S: RECORD * %d", self.local_port, len(details)) # TODO
self.responses[0].on_records(details)
if summary_signature is None:
return len(details), 0
response = self.responses.popleft()
response.complete = True
if summary_signature == b"\x70":
log_debug("[#%04X] S: SUCCESS %r", self.local_port, summary_metadata)
response.on_success(summary_metadata or {})
elif summary_signature == b"\x7E":
self._last_run_statement = None
log_debug("[#%04X] S: IGNORED", self.local_port)
response.on_ignored(summary_metadata or {})
elif summary_signature == b"\x7F":
self._last_run_statement = None
log_debug("[#%04X] S: FAILURE %r", self.local_port, summary_metadata)
response.on_failure(summary_metadata or {})
else:
self._last_run_statement = None
raise ProtocolError("Unexpected response message with signature %02X" % summary_signature)
return len(details), 1
|
python
|
def _fetch(self):
""" Receive at least one message from the server, if available.
:return: 2-tuple of number of detail messages and number of summary messages fetched
"""
if self.closed():
raise self.Error("Failed to read from closed connection {!r}".format(self.server.address))
if self.defunct():
raise self.Error("Failed to read from defunct connection {!r}".format(self.server.address))
if not self.responses:
return 0, 0
self._receive()
details, summary_signature, summary_metadata = self._unpack()
if details:
log_debug("[#%04X] S: RECORD * %d", self.local_port, len(details)) # TODO
self.responses[0].on_records(details)
if summary_signature is None:
return len(details), 0
response = self.responses.popleft()
response.complete = True
if summary_signature == b"\x70":
log_debug("[#%04X] S: SUCCESS %r", self.local_port, summary_metadata)
response.on_success(summary_metadata or {})
elif summary_signature == b"\x7E":
self._last_run_statement = None
log_debug("[#%04X] S: IGNORED", self.local_port)
response.on_ignored(summary_metadata or {})
elif summary_signature == b"\x7F":
self._last_run_statement = None
log_debug("[#%04X] S: FAILURE %r", self.local_port, summary_metadata)
response.on_failure(summary_metadata or {})
else:
self._last_run_statement = None
raise ProtocolError("Unexpected response message with signature %02X" % summary_signature)
return len(details), 1
|
[
"def",
"_fetch",
"(",
"self",
")",
":",
"if",
"self",
".",
"closed",
"(",
")",
":",
"raise",
"self",
".",
"Error",
"(",
"\"Failed to read from closed connection {!r}\"",
".",
"format",
"(",
"self",
".",
"server",
".",
"address",
")",
")",
"if",
"self",
".",
"defunct",
"(",
")",
":",
"raise",
"self",
".",
"Error",
"(",
"\"Failed to read from defunct connection {!r}\"",
".",
"format",
"(",
"self",
".",
"server",
".",
"address",
")",
")",
"if",
"not",
"self",
".",
"responses",
":",
"return",
"0",
",",
"0",
"self",
".",
"_receive",
"(",
")",
"details",
",",
"summary_signature",
",",
"summary_metadata",
"=",
"self",
".",
"_unpack",
"(",
")",
"if",
"details",
":",
"log_debug",
"(",
"\"[#%04X] S: RECORD * %d\"",
",",
"self",
".",
"local_port",
",",
"len",
"(",
"details",
")",
")",
"# TODO",
"self",
".",
"responses",
"[",
"0",
"]",
".",
"on_records",
"(",
"details",
")",
"if",
"summary_signature",
"is",
"None",
":",
"return",
"len",
"(",
"details",
")",
",",
"0",
"response",
"=",
"self",
".",
"responses",
".",
"popleft",
"(",
")",
"response",
".",
"complete",
"=",
"True",
"if",
"summary_signature",
"==",
"b\"\\x70\"",
":",
"log_debug",
"(",
"\"[#%04X] S: SUCCESS %r\"",
",",
"self",
".",
"local_port",
",",
"summary_metadata",
")",
"response",
".",
"on_success",
"(",
"summary_metadata",
"or",
"{",
"}",
")",
"elif",
"summary_signature",
"==",
"b\"\\x7E\"",
":",
"self",
".",
"_last_run_statement",
"=",
"None",
"log_debug",
"(",
"\"[#%04X] S: IGNORED\"",
",",
"self",
".",
"local_port",
")",
"response",
".",
"on_ignored",
"(",
"summary_metadata",
"or",
"{",
"}",
")",
"elif",
"summary_signature",
"==",
"b\"\\x7F\"",
":",
"self",
".",
"_last_run_statement",
"=",
"None",
"log_debug",
"(",
"\"[#%04X] S: FAILURE %r\"",
",",
"self",
".",
"local_port",
",",
"summary_metadata",
")",
"response",
".",
"on_failure",
"(",
"summary_metadata",
"or",
"{",
"}",
")",
"else",
":",
"self",
".",
"_last_run_statement",
"=",
"None",
"raise",
"ProtocolError",
"(",
"\"Unexpected response message with signature %02X\"",
"%",
"summary_signature",
")",
"return",
"len",
"(",
"details",
")",
",",
"1"
] |
Receive at least one message from the server, if available.
:return: 2-tuple of number of detail messages and number of summary messages fetched
|
[
"Receive",
"at",
"least",
"one",
"message",
"from",
"the",
"server",
"if",
"available",
"."
] |
724569d76e85777c4f5e30e8d0a18116bda4d8cd
|
https://github.com/neo4j-drivers/neobolt/blob/724569d76e85777c4f5e30e8d0a18116bda4d8cd/neobolt/impl/python/direct.py#L341-L381
|
train
|
neo4j-drivers/neobolt
|
neobolt/impl/python/direct.py
|
Connection.sync
|
def sync(self):
""" Send and fetch all outstanding messages.
:return: 2-tuple of number of detail messages and number of summary messages fetched
"""
self.send()
detail_count = summary_count = 0
while self.responses:
response = self.responses[0]
while not response.complete:
detail_delta, summary_delta = self.fetch()
detail_count += detail_delta
summary_count += summary_delta
return detail_count, summary_count
|
python
|
def sync(self):
""" Send and fetch all outstanding messages.
:return: 2-tuple of number of detail messages and number of summary messages fetched
"""
self.send()
detail_count = summary_count = 0
while self.responses:
response = self.responses[0]
while not response.complete:
detail_delta, summary_delta = self.fetch()
detail_count += detail_delta
summary_count += summary_delta
return detail_count, summary_count
|
[
"def",
"sync",
"(",
"self",
")",
":",
"self",
".",
"send",
"(",
")",
"detail_count",
"=",
"summary_count",
"=",
"0",
"while",
"self",
".",
"responses",
":",
"response",
"=",
"self",
".",
"responses",
"[",
"0",
"]",
"while",
"not",
"response",
".",
"complete",
":",
"detail_delta",
",",
"summary_delta",
"=",
"self",
".",
"fetch",
"(",
")",
"detail_count",
"+=",
"detail_delta",
"summary_count",
"+=",
"summary_delta",
"return",
"detail_count",
",",
"summary_count"
] |
Send and fetch all outstanding messages.
:return: 2-tuple of number of detail messages and number of summary messages fetched
|
[
"Send",
"and",
"fetch",
"all",
"outstanding",
"messages",
"."
] |
724569d76e85777c4f5e30e8d0a18116bda4d8cd
|
https://github.com/neo4j-drivers/neobolt/blob/724569d76e85777c4f5e30e8d0a18116bda4d8cd/neobolt/impl/python/direct.py#L431-L444
|
train
|
neo4j-drivers/neobolt
|
neobolt/impl/python/direct.py
|
Connection.close
|
def close(self):
""" Close the connection.
"""
if not self._closed:
if self.protocol_version >= 3:
log_debug("[#%04X] C: GOODBYE", self.local_port)
self._append(b"\x02", ())
try:
self.send()
except ServiceUnavailable:
pass
log_debug("[#%04X] C: <CLOSE>", self.local_port)
try:
self.socket.close()
except IOError:
pass
finally:
self._closed = True
|
python
|
def close(self):
""" Close the connection.
"""
if not self._closed:
if self.protocol_version >= 3:
log_debug("[#%04X] C: GOODBYE", self.local_port)
self._append(b"\x02", ())
try:
self.send()
except ServiceUnavailable:
pass
log_debug("[#%04X] C: <CLOSE>", self.local_port)
try:
self.socket.close()
except IOError:
pass
finally:
self._closed = True
|
[
"def",
"close",
"(",
"self",
")",
":",
"if",
"not",
"self",
".",
"_closed",
":",
"if",
"self",
".",
"protocol_version",
">=",
"3",
":",
"log_debug",
"(",
"\"[#%04X] C: GOODBYE\"",
",",
"self",
".",
"local_port",
")",
"self",
".",
"_append",
"(",
"b\"\\x02\"",
",",
"(",
")",
")",
"try",
":",
"self",
".",
"send",
"(",
")",
"except",
"ServiceUnavailable",
":",
"pass",
"log_debug",
"(",
"\"[#%04X] C: <CLOSE>\"",
",",
"self",
".",
"local_port",
")",
"try",
":",
"self",
".",
"socket",
".",
"close",
"(",
")",
"except",
"IOError",
":",
"pass",
"finally",
":",
"self",
".",
"_closed",
"=",
"True"
] |
Close the connection.
|
[
"Close",
"the",
"connection",
"."
] |
724569d76e85777c4f5e30e8d0a18116bda4d8cd
|
https://github.com/neo4j-drivers/neobolt/blob/724569d76e85777c4f5e30e8d0a18116bda4d8cd/neobolt/impl/python/direct.py#L446-L463
|
train
|
neo4j-drivers/neobolt
|
neobolt/impl/python/direct.py
|
AbstractConnectionPool.acquire_direct
|
def acquire_direct(self, address):
""" Acquire a connection to a given address from the pool.
The address supplied should always be an IP address, not
a host name.
This method is thread safe.
"""
if self.closed():
raise ServiceUnavailable("Connection pool closed")
with self.lock:
try:
connections = self.connections[address]
except KeyError:
connections = self.connections[address] = deque()
connection_acquisition_start_timestamp = perf_counter()
while True:
# try to find a free connection in pool
for connection in list(connections):
if connection.closed() or connection.defunct() or connection.timedout():
connections.remove(connection)
continue
if not connection.in_use:
connection.in_use = True
return connection
# all connections in pool are in-use
infinite_connection_pool = (self._max_connection_pool_size < 0 or
self._max_connection_pool_size == float("inf"))
can_create_new_connection = infinite_connection_pool or len(connections) < self._max_connection_pool_size
if can_create_new_connection:
try:
connection = self.connector(address)
except ServiceUnavailable:
self.remove(address)
raise
else:
connection.pool = self
connection.in_use = True
connections.append(connection)
return connection
# failed to obtain a connection from pool because the pool is full and no free connection in the pool
span_timeout = self._connection_acquisition_timeout - (perf_counter() - connection_acquisition_start_timestamp)
if span_timeout > 0:
self.cond.wait(span_timeout)
# if timed out, then we throw error. This time computation is needed, as with python 2.7, we cannot
# tell if the condition is notified or timed out when we come to this line
if self._connection_acquisition_timeout <= (perf_counter() - connection_acquisition_start_timestamp):
raise ClientError("Failed to obtain a connection from pool within {!r}s".format(
self._connection_acquisition_timeout))
else:
raise ClientError("Failed to obtain a connection from pool within {!r}s".format(self._connection_acquisition_timeout))
|
python
|
def acquire_direct(self, address):
""" Acquire a connection to a given address from the pool.
The address supplied should always be an IP address, not
a host name.
This method is thread safe.
"""
if self.closed():
raise ServiceUnavailable("Connection pool closed")
with self.lock:
try:
connections = self.connections[address]
except KeyError:
connections = self.connections[address] = deque()
connection_acquisition_start_timestamp = perf_counter()
while True:
# try to find a free connection in pool
for connection in list(connections):
if connection.closed() or connection.defunct() or connection.timedout():
connections.remove(connection)
continue
if not connection.in_use:
connection.in_use = True
return connection
# all connections in pool are in-use
infinite_connection_pool = (self._max_connection_pool_size < 0 or
self._max_connection_pool_size == float("inf"))
can_create_new_connection = infinite_connection_pool or len(connections) < self._max_connection_pool_size
if can_create_new_connection:
try:
connection = self.connector(address)
except ServiceUnavailable:
self.remove(address)
raise
else:
connection.pool = self
connection.in_use = True
connections.append(connection)
return connection
# failed to obtain a connection from pool because the pool is full and no free connection in the pool
span_timeout = self._connection_acquisition_timeout - (perf_counter() - connection_acquisition_start_timestamp)
if span_timeout > 0:
self.cond.wait(span_timeout)
# if timed out, then we throw error. This time computation is needed, as with python 2.7, we cannot
# tell if the condition is notified or timed out when we come to this line
if self._connection_acquisition_timeout <= (perf_counter() - connection_acquisition_start_timestamp):
raise ClientError("Failed to obtain a connection from pool within {!r}s".format(
self._connection_acquisition_timeout))
else:
raise ClientError("Failed to obtain a connection from pool within {!r}s".format(self._connection_acquisition_timeout))
|
[
"def",
"acquire_direct",
"(",
"self",
",",
"address",
")",
":",
"if",
"self",
".",
"closed",
"(",
")",
":",
"raise",
"ServiceUnavailable",
"(",
"\"Connection pool closed\"",
")",
"with",
"self",
".",
"lock",
":",
"try",
":",
"connections",
"=",
"self",
".",
"connections",
"[",
"address",
"]",
"except",
"KeyError",
":",
"connections",
"=",
"self",
".",
"connections",
"[",
"address",
"]",
"=",
"deque",
"(",
")",
"connection_acquisition_start_timestamp",
"=",
"perf_counter",
"(",
")",
"while",
"True",
":",
"# try to find a free connection in pool",
"for",
"connection",
"in",
"list",
"(",
"connections",
")",
":",
"if",
"connection",
".",
"closed",
"(",
")",
"or",
"connection",
".",
"defunct",
"(",
")",
"or",
"connection",
".",
"timedout",
"(",
")",
":",
"connections",
".",
"remove",
"(",
"connection",
")",
"continue",
"if",
"not",
"connection",
".",
"in_use",
":",
"connection",
".",
"in_use",
"=",
"True",
"return",
"connection",
"# all connections in pool are in-use",
"infinite_connection_pool",
"=",
"(",
"self",
".",
"_max_connection_pool_size",
"<",
"0",
"or",
"self",
".",
"_max_connection_pool_size",
"==",
"float",
"(",
"\"inf\"",
")",
")",
"can_create_new_connection",
"=",
"infinite_connection_pool",
"or",
"len",
"(",
"connections",
")",
"<",
"self",
".",
"_max_connection_pool_size",
"if",
"can_create_new_connection",
":",
"try",
":",
"connection",
"=",
"self",
".",
"connector",
"(",
"address",
")",
"except",
"ServiceUnavailable",
":",
"self",
".",
"remove",
"(",
"address",
")",
"raise",
"else",
":",
"connection",
".",
"pool",
"=",
"self",
"connection",
".",
"in_use",
"=",
"True",
"connections",
".",
"append",
"(",
"connection",
")",
"return",
"connection",
"# failed to obtain a connection from pool because the pool is full and no free connection in the pool",
"span_timeout",
"=",
"self",
".",
"_connection_acquisition_timeout",
"-",
"(",
"perf_counter",
"(",
")",
"-",
"connection_acquisition_start_timestamp",
")",
"if",
"span_timeout",
">",
"0",
":",
"self",
".",
"cond",
".",
"wait",
"(",
"span_timeout",
")",
"# if timed out, then we throw error. This time computation is needed, as with python 2.7, we cannot",
"# tell if the condition is notified or timed out when we come to this line",
"if",
"self",
".",
"_connection_acquisition_timeout",
"<=",
"(",
"perf_counter",
"(",
")",
"-",
"connection_acquisition_start_timestamp",
")",
":",
"raise",
"ClientError",
"(",
"\"Failed to obtain a connection from pool within {!r}s\"",
".",
"format",
"(",
"self",
".",
"_connection_acquisition_timeout",
")",
")",
"else",
":",
"raise",
"ClientError",
"(",
"\"Failed to obtain a connection from pool within {!r}s\"",
".",
"format",
"(",
"self",
".",
"_connection_acquisition_timeout",
")",
")"
] |
Acquire a connection to a given address from the pool.
The address supplied should always be an IP address, not
a host name.
This method is thread safe.
|
[
"Acquire",
"a",
"connection",
"to",
"a",
"given",
"address",
"from",
"the",
"pool",
".",
"The",
"address",
"supplied",
"should",
"always",
"be",
"an",
"IP",
"address",
"not",
"a",
"host",
"name",
"."
] |
724569d76e85777c4f5e30e8d0a18116bda4d8cd
|
https://github.com/neo4j-drivers/neobolt/blob/724569d76e85777c4f5e30e8d0a18116bda4d8cd/neobolt/impl/python/direct.py#L492-L543
|
train
|
neo4j-drivers/neobolt
|
neobolt/impl/python/direct.py
|
AbstractConnectionPool.release
|
def release(self, connection):
""" Release a connection back into the pool.
This method is thread safe.
"""
with self.lock:
connection.in_use = False
self.cond.notify_all()
|
python
|
def release(self, connection):
""" Release a connection back into the pool.
This method is thread safe.
"""
with self.lock:
connection.in_use = False
self.cond.notify_all()
|
[
"def",
"release",
"(",
"self",
",",
"connection",
")",
":",
"with",
"self",
".",
"lock",
":",
"connection",
".",
"in_use",
"=",
"False",
"self",
".",
"cond",
".",
"notify_all",
"(",
")"
] |
Release a connection back into the pool.
This method is thread safe.
|
[
"Release",
"a",
"connection",
"back",
"into",
"the",
"pool",
".",
"This",
"method",
"is",
"thread",
"safe",
"."
] |
724569d76e85777c4f5e30e8d0a18116bda4d8cd
|
https://github.com/neo4j-drivers/neobolt/blob/724569d76e85777c4f5e30e8d0a18116bda4d8cd/neobolt/impl/python/direct.py#L551-L557
|
train
|
neo4j-drivers/neobolt
|
neobolt/impl/python/direct.py
|
AbstractConnectionPool.in_use_connection_count
|
def in_use_connection_count(self, address):
""" Count the number of connections currently in use to a given
address.
"""
try:
connections = self.connections[address]
except KeyError:
return 0
else:
return sum(1 if connection.in_use else 0 for connection in connections)
|
python
|
def in_use_connection_count(self, address):
""" Count the number of connections currently in use to a given
address.
"""
try:
connections = self.connections[address]
except KeyError:
return 0
else:
return sum(1 if connection.in_use else 0 for connection in connections)
|
[
"def",
"in_use_connection_count",
"(",
"self",
",",
"address",
")",
":",
"try",
":",
"connections",
"=",
"self",
".",
"connections",
"[",
"address",
"]",
"except",
"KeyError",
":",
"return",
"0",
"else",
":",
"return",
"sum",
"(",
"1",
"if",
"connection",
".",
"in_use",
"else",
"0",
"for",
"connection",
"in",
"connections",
")"
] |
Count the number of connections currently in use to a given
address.
|
[
"Count",
"the",
"number",
"of",
"connections",
"currently",
"in",
"use",
"to",
"a",
"given",
"address",
"."
] |
724569d76e85777c4f5e30e8d0a18116bda4d8cd
|
https://github.com/neo4j-drivers/neobolt/blob/724569d76e85777c4f5e30e8d0a18116bda4d8cd/neobolt/impl/python/direct.py#L559-L568
|
train
|
neo4j-drivers/neobolt
|
neobolt/impl/python/direct.py
|
AbstractConnectionPool.deactivate
|
def deactivate(self, address):
""" Deactivate an address from the connection pool, if present, closing
all idle connection to that address
"""
with self.lock:
try:
connections = self.connections[address]
except KeyError: # already removed from the connection pool
return
for conn in list(connections):
if not conn.in_use:
connections.remove(conn)
try:
conn.close()
except IOError:
pass
if not connections:
self.remove(address)
|
python
|
def deactivate(self, address):
""" Deactivate an address from the connection pool, if present, closing
all idle connection to that address
"""
with self.lock:
try:
connections = self.connections[address]
except KeyError: # already removed from the connection pool
return
for conn in list(connections):
if not conn.in_use:
connections.remove(conn)
try:
conn.close()
except IOError:
pass
if not connections:
self.remove(address)
|
[
"def",
"deactivate",
"(",
"self",
",",
"address",
")",
":",
"with",
"self",
".",
"lock",
":",
"try",
":",
"connections",
"=",
"self",
".",
"connections",
"[",
"address",
"]",
"except",
"KeyError",
":",
"# already removed from the connection pool",
"return",
"for",
"conn",
"in",
"list",
"(",
"connections",
")",
":",
"if",
"not",
"conn",
".",
"in_use",
":",
"connections",
".",
"remove",
"(",
"conn",
")",
"try",
":",
"conn",
".",
"close",
"(",
")",
"except",
"IOError",
":",
"pass",
"if",
"not",
"connections",
":",
"self",
".",
"remove",
"(",
"address",
")"
] |
Deactivate an address from the connection pool, if present, closing
all idle connection to that address
|
[
"Deactivate",
"an",
"address",
"from",
"the",
"connection",
"pool",
"if",
"present",
"closing",
"all",
"idle",
"connection",
"to",
"that",
"address"
] |
724569d76e85777c4f5e30e8d0a18116bda4d8cd
|
https://github.com/neo4j-drivers/neobolt/blob/724569d76e85777c4f5e30e8d0a18116bda4d8cd/neobolt/impl/python/direct.py#L570-L587
|
train
|
neo4j-drivers/neobolt
|
neobolt/impl/python/direct.py
|
AbstractConnectionPool.remove
|
def remove(self, address):
""" Remove an address from the connection pool, if present, closing
all connections to that address.
"""
with self.lock:
for connection in self.connections.pop(address, ()):
try:
connection.close()
except IOError:
pass
|
python
|
def remove(self, address):
""" Remove an address from the connection pool, if present, closing
all connections to that address.
"""
with self.lock:
for connection in self.connections.pop(address, ()):
try:
connection.close()
except IOError:
pass
|
[
"def",
"remove",
"(",
"self",
",",
"address",
")",
":",
"with",
"self",
".",
"lock",
":",
"for",
"connection",
"in",
"self",
".",
"connections",
".",
"pop",
"(",
"address",
",",
"(",
")",
")",
":",
"try",
":",
"connection",
".",
"close",
"(",
")",
"except",
"IOError",
":",
"pass"
] |
Remove an address from the connection pool, if present, closing
all connections to that address.
|
[
"Remove",
"an",
"address",
"from",
"the",
"connection",
"pool",
"if",
"present",
"closing",
"all",
"connections",
"to",
"that",
"address",
"."
] |
724569d76e85777c4f5e30e8d0a18116bda4d8cd
|
https://github.com/neo4j-drivers/neobolt/blob/724569d76e85777c4f5e30e8d0a18116bda4d8cd/neobolt/impl/python/direct.py#L589-L598
|
train
|
neo4j-drivers/neobolt
|
neobolt/impl/python/direct.py
|
AbstractConnectionPool.close
|
def close(self):
""" Close all connections and empty the pool.
This method is thread safe.
"""
if self._closed:
return
try:
with self.lock:
if not self._closed:
self._closed = True
for address in list(self.connections):
self.remove(address)
except TypeError as e:
pass
|
python
|
def close(self):
""" Close all connections and empty the pool.
This method is thread safe.
"""
if self._closed:
return
try:
with self.lock:
if not self._closed:
self._closed = True
for address in list(self.connections):
self.remove(address)
except TypeError as e:
pass
|
[
"def",
"close",
"(",
"self",
")",
":",
"if",
"self",
".",
"_closed",
":",
"return",
"try",
":",
"with",
"self",
".",
"lock",
":",
"if",
"not",
"self",
".",
"_closed",
":",
"self",
".",
"_closed",
"=",
"True",
"for",
"address",
"in",
"list",
"(",
"self",
".",
"connections",
")",
":",
"self",
".",
"remove",
"(",
"address",
")",
"except",
"TypeError",
"as",
"e",
":",
"pass"
] |
Close all connections and empty the pool.
This method is thread safe.
|
[
"Close",
"all",
"connections",
"and",
"empty",
"the",
"pool",
".",
"This",
"method",
"is",
"thread",
"safe",
"."
] |
724569d76e85777c4f5e30e8d0a18116bda4d8cd
|
https://github.com/neo4j-drivers/neobolt/blob/724569d76e85777c4f5e30e8d0a18116bda4d8cd/neobolt/impl/python/direct.py#L600-L613
|
train
|
neo4j-drivers/neobolt
|
neobolt/impl/python/direct.py
|
Response.on_records
|
def on_records(self, records):
""" Called when one or more RECORD messages have been received.
"""
handler = self.handlers.get("on_records")
if callable(handler):
handler(records)
|
python
|
def on_records(self, records):
""" Called when one or more RECORD messages have been received.
"""
handler = self.handlers.get("on_records")
if callable(handler):
handler(records)
|
[
"def",
"on_records",
"(",
"self",
",",
"records",
")",
":",
"handler",
"=",
"self",
".",
"handlers",
".",
"get",
"(",
"\"on_records\"",
")",
"if",
"callable",
"(",
"handler",
")",
":",
"handler",
"(",
"records",
")"
] |
Called when one or more RECORD messages have been received.
|
[
"Called",
"when",
"one",
"or",
"more",
"RECORD",
"messages",
"have",
"been",
"received",
"."
] |
724569d76e85777c4f5e30e8d0a18116bda4d8cd
|
https://github.com/neo4j-drivers/neobolt/blob/724569d76e85777c4f5e30e8d0a18116bda4d8cd/neobolt/impl/python/direct.py#L648-L653
|
train
|
neo4j-drivers/neobolt
|
neobolt/impl/python/direct.py
|
Response.on_success
|
def on_success(self, metadata):
""" Called when a SUCCESS message has been received.
"""
handler = self.handlers.get("on_success")
if callable(handler):
handler(metadata)
handler = self.handlers.get("on_summary")
if callable(handler):
handler()
|
python
|
def on_success(self, metadata):
""" Called when a SUCCESS message has been received.
"""
handler = self.handlers.get("on_success")
if callable(handler):
handler(metadata)
handler = self.handlers.get("on_summary")
if callable(handler):
handler()
|
[
"def",
"on_success",
"(",
"self",
",",
"metadata",
")",
":",
"handler",
"=",
"self",
".",
"handlers",
".",
"get",
"(",
"\"on_success\"",
")",
"if",
"callable",
"(",
"handler",
")",
":",
"handler",
"(",
"metadata",
")",
"handler",
"=",
"self",
".",
"handlers",
".",
"get",
"(",
"\"on_summary\"",
")",
"if",
"callable",
"(",
"handler",
")",
":",
"handler",
"(",
")"
] |
Called when a SUCCESS message has been received.
|
[
"Called",
"when",
"a",
"SUCCESS",
"message",
"has",
"been",
"received",
"."
] |
724569d76e85777c4f5e30e8d0a18116bda4d8cd
|
https://github.com/neo4j-drivers/neobolt/blob/724569d76e85777c4f5e30e8d0a18116bda4d8cd/neobolt/impl/python/direct.py#L655-L663
|
train
|
neo4j-drivers/neobolt
|
neobolt/impl/python/direct.py
|
Response.on_failure
|
def on_failure(self, metadata):
""" Called when a FAILURE message has been received.
"""
self.connection.reset()
handler = self.handlers.get("on_failure")
if callable(handler):
handler(metadata)
handler = self.handlers.get("on_summary")
if callable(handler):
handler()
raise CypherError.hydrate(**metadata)
|
python
|
def on_failure(self, metadata):
""" Called when a FAILURE message has been received.
"""
self.connection.reset()
handler = self.handlers.get("on_failure")
if callable(handler):
handler(metadata)
handler = self.handlers.get("on_summary")
if callable(handler):
handler()
raise CypherError.hydrate(**metadata)
|
[
"def",
"on_failure",
"(",
"self",
",",
"metadata",
")",
":",
"self",
".",
"connection",
".",
"reset",
"(",
")",
"handler",
"=",
"self",
".",
"handlers",
".",
"get",
"(",
"\"on_failure\"",
")",
"if",
"callable",
"(",
"handler",
")",
":",
"handler",
"(",
"metadata",
")",
"handler",
"=",
"self",
".",
"handlers",
".",
"get",
"(",
"\"on_summary\"",
")",
"if",
"callable",
"(",
"handler",
")",
":",
"handler",
"(",
")",
"raise",
"CypherError",
".",
"hydrate",
"(",
"*",
"*",
"metadata",
")"
] |
Called when a FAILURE message has been received.
|
[
"Called",
"when",
"a",
"FAILURE",
"message",
"has",
"been",
"received",
"."
] |
724569d76e85777c4f5e30e8d0a18116bda4d8cd
|
https://github.com/neo4j-drivers/neobolt/blob/724569d76e85777c4f5e30e8d0a18116bda4d8cd/neobolt/impl/python/direct.py#L665-L675
|
train
|
neo4j-drivers/neobolt
|
neobolt/impl/python/direct.py
|
Response.on_ignored
|
def on_ignored(self, metadata=None):
""" Called when an IGNORED message has been received.
"""
handler = self.handlers.get("on_ignored")
if callable(handler):
handler(metadata)
handler = self.handlers.get("on_summary")
if callable(handler):
handler()
|
python
|
def on_ignored(self, metadata=None):
""" Called when an IGNORED message has been received.
"""
handler = self.handlers.get("on_ignored")
if callable(handler):
handler(metadata)
handler = self.handlers.get("on_summary")
if callable(handler):
handler()
|
[
"def",
"on_ignored",
"(",
"self",
",",
"metadata",
"=",
"None",
")",
":",
"handler",
"=",
"self",
".",
"handlers",
".",
"get",
"(",
"\"on_ignored\"",
")",
"if",
"callable",
"(",
"handler",
")",
":",
"handler",
"(",
"metadata",
")",
"handler",
"=",
"self",
".",
"handlers",
".",
"get",
"(",
"\"on_summary\"",
")",
"if",
"callable",
"(",
"handler",
")",
":",
"handler",
"(",
")"
] |
Called when an IGNORED message has been received.
|
[
"Called",
"when",
"an",
"IGNORED",
"message",
"has",
"been",
"received",
"."
] |
724569d76e85777c4f5e30e8d0a18116bda4d8cd
|
https://github.com/neo4j-drivers/neobolt/blob/724569d76e85777c4f5e30e8d0a18116bda4d8cd/neobolt/impl/python/direct.py#L677-L685
|
train
|
trehn/hnmp
|
hnmp.py
|
cached_property
|
def cached_property(prop):
"""
A replacement for the property decorator that will only compute the
attribute's value on the first call and serve a cached copy from
then on.
"""
def cache_wrapper(self):
if not hasattr(self, "_cache"):
self._cache = {}
if prop.__name__ not in self._cache:
return_value = prop(self)
if isgenerator(return_value):
return_value = tuple(return_value)
self._cache[prop.__name__] = return_value
return self._cache[prop.__name__]
return property(cache_wrapper)
|
python
|
def cached_property(prop):
"""
A replacement for the property decorator that will only compute the
attribute's value on the first call and serve a cached copy from
then on.
"""
def cache_wrapper(self):
if not hasattr(self, "_cache"):
self._cache = {}
if prop.__name__ not in self._cache:
return_value = prop(self)
if isgenerator(return_value):
return_value = tuple(return_value)
self._cache[prop.__name__] = return_value
return self._cache[prop.__name__]
return property(cache_wrapper)
|
[
"def",
"cached_property",
"(",
"prop",
")",
":",
"def",
"cache_wrapper",
"(",
"self",
")",
":",
"if",
"not",
"hasattr",
"(",
"self",
",",
"\"_cache\"",
")",
":",
"self",
".",
"_cache",
"=",
"{",
"}",
"if",
"prop",
".",
"__name__",
"not",
"in",
"self",
".",
"_cache",
":",
"return_value",
"=",
"prop",
"(",
"self",
")",
"if",
"isgenerator",
"(",
"return_value",
")",
":",
"return_value",
"=",
"tuple",
"(",
"return_value",
")",
"self",
".",
"_cache",
"[",
"prop",
".",
"__name__",
"]",
"=",
"return_value",
"return",
"self",
".",
"_cache",
"[",
"prop",
".",
"__name__",
"]",
"return",
"property",
"(",
"cache_wrapper",
")"
] |
A replacement for the property decorator that will only compute the
attribute's value on the first call and serve a cached copy from
then on.
|
[
"A",
"replacement",
"for",
"the",
"property",
"decorator",
"that",
"will",
"only",
"compute",
"the",
"attribute",
"s",
"value",
"on",
"the",
"first",
"call",
"and",
"serve",
"a",
"cached",
"copy",
"from",
"then",
"on",
"."
] |
a21f9e73c96a35bff2354894031c4788ad4ed2f0
|
https://github.com/trehn/hnmp/blob/a21f9e73c96a35bff2354894031c4788ad4ed2f0/hnmp.py#L47-L62
|
train
|
trehn/hnmp
|
hnmp.py
|
_convert_value_to_native
|
def _convert_value_to_native(value):
"""
Converts pysnmp objects into native Python objects.
"""
if isinstance(value, Counter32):
return int(value.prettyPrint())
if isinstance(value, Counter64):
return int(value.prettyPrint())
if isinstance(value, Gauge32):
return int(value.prettyPrint())
if isinstance(value, Integer):
return int(value.prettyPrint())
if isinstance(value, Integer32):
return int(value.prettyPrint())
if isinstance(value, Unsigned32):
return int(value.prettyPrint())
if isinstance(value, IpAddress):
return str(value.prettyPrint())
if isinstance(value, OctetString):
try:
return value.asOctets().decode(value.encoding)
except UnicodeDecodeError:
return value.asOctets()
if isinstance(value, TimeTicks):
return timedelta(seconds=int(value.prettyPrint()) / 100.0)
return value
|
python
|
def _convert_value_to_native(value):
"""
Converts pysnmp objects into native Python objects.
"""
if isinstance(value, Counter32):
return int(value.prettyPrint())
if isinstance(value, Counter64):
return int(value.prettyPrint())
if isinstance(value, Gauge32):
return int(value.prettyPrint())
if isinstance(value, Integer):
return int(value.prettyPrint())
if isinstance(value, Integer32):
return int(value.prettyPrint())
if isinstance(value, Unsigned32):
return int(value.prettyPrint())
if isinstance(value, IpAddress):
return str(value.prettyPrint())
if isinstance(value, OctetString):
try:
return value.asOctets().decode(value.encoding)
except UnicodeDecodeError:
return value.asOctets()
if isinstance(value, TimeTicks):
return timedelta(seconds=int(value.prettyPrint()) / 100.0)
return value
|
[
"def",
"_convert_value_to_native",
"(",
"value",
")",
":",
"if",
"isinstance",
"(",
"value",
",",
"Counter32",
")",
":",
"return",
"int",
"(",
"value",
".",
"prettyPrint",
"(",
")",
")",
"if",
"isinstance",
"(",
"value",
",",
"Counter64",
")",
":",
"return",
"int",
"(",
"value",
".",
"prettyPrint",
"(",
")",
")",
"if",
"isinstance",
"(",
"value",
",",
"Gauge32",
")",
":",
"return",
"int",
"(",
"value",
".",
"prettyPrint",
"(",
")",
")",
"if",
"isinstance",
"(",
"value",
",",
"Integer",
")",
":",
"return",
"int",
"(",
"value",
".",
"prettyPrint",
"(",
")",
")",
"if",
"isinstance",
"(",
"value",
",",
"Integer32",
")",
":",
"return",
"int",
"(",
"value",
".",
"prettyPrint",
"(",
")",
")",
"if",
"isinstance",
"(",
"value",
",",
"Unsigned32",
")",
":",
"return",
"int",
"(",
"value",
".",
"prettyPrint",
"(",
")",
")",
"if",
"isinstance",
"(",
"value",
",",
"IpAddress",
")",
":",
"return",
"str",
"(",
"value",
".",
"prettyPrint",
"(",
")",
")",
"if",
"isinstance",
"(",
"value",
",",
"OctetString",
")",
":",
"try",
":",
"return",
"value",
".",
"asOctets",
"(",
")",
".",
"decode",
"(",
"value",
".",
"encoding",
")",
"except",
"UnicodeDecodeError",
":",
"return",
"value",
".",
"asOctets",
"(",
")",
"if",
"isinstance",
"(",
"value",
",",
"TimeTicks",
")",
":",
"return",
"timedelta",
"(",
"seconds",
"=",
"int",
"(",
"value",
".",
"prettyPrint",
"(",
")",
")",
"/",
"100.0",
")",
"return",
"value"
] |
Converts pysnmp objects into native Python objects.
|
[
"Converts",
"pysnmp",
"objects",
"into",
"native",
"Python",
"objects",
"."
] |
a21f9e73c96a35bff2354894031c4788ad4ed2f0
|
https://github.com/trehn/hnmp/blob/a21f9e73c96a35bff2354894031c4788ad4ed2f0/hnmp.py#L65-L90
|
train
|
trehn/hnmp
|
hnmp.py
|
SNMP.get
|
def get(self, oid):
"""
Get a single OID value.
"""
snmpsecurity = self._get_snmp_security()
try:
engine_error, pdu_error, pdu_error_index, objects = self._cmdgen.getCmd(
snmpsecurity,
cmdgen.UdpTransportTarget((self.host, self.port), timeout=self.timeout,
retries=self.retries),
oid,
)
except Exception as e:
raise SNMPError(e)
if engine_error:
raise SNMPError(engine_error)
if pdu_error:
raise SNMPError(pdu_error.prettyPrint())
_, value = objects[0]
value = _convert_value_to_native(value)
return value
|
python
|
def get(self, oid):
"""
Get a single OID value.
"""
snmpsecurity = self._get_snmp_security()
try:
engine_error, pdu_error, pdu_error_index, objects = self._cmdgen.getCmd(
snmpsecurity,
cmdgen.UdpTransportTarget((self.host, self.port), timeout=self.timeout,
retries=self.retries),
oid,
)
except Exception as e:
raise SNMPError(e)
if engine_error:
raise SNMPError(engine_error)
if pdu_error:
raise SNMPError(pdu_error.prettyPrint())
_, value = objects[0]
value = _convert_value_to_native(value)
return value
|
[
"def",
"get",
"(",
"self",
",",
"oid",
")",
":",
"snmpsecurity",
"=",
"self",
".",
"_get_snmp_security",
"(",
")",
"try",
":",
"engine_error",
",",
"pdu_error",
",",
"pdu_error_index",
",",
"objects",
"=",
"self",
".",
"_cmdgen",
".",
"getCmd",
"(",
"snmpsecurity",
",",
"cmdgen",
".",
"UdpTransportTarget",
"(",
"(",
"self",
".",
"host",
",",
"self",
".",
"port",
")",
",",
"timeout",
"=",
"self",
".",
"timeout",
",",
"retries",
"=",
"self",
".",
"retries",
")",
",",
"oid",
",",
")",
"except",
"Exception",
"as",
"e",
":",
"raise",
"SNMPError",
"(",
"e",
")",
"if",
"engine_error",
":",
"raise",
"SNMPError",
"(",
"engine_error",
")",
"if",
"pdu_error",
":",
"raise",
"SNMPError",
"(",
"pdu_error",
".",
"prettyPrint",
"(",
")",
")",
"_",
",",
"value",
"=",
"objects",
"[",
"0",
"]",
"value",
"=",
"_convert_value_to_native",
"(",
"value",
")",
"return",
"value"
] |
Get a single OID value.
|
[
"Get",
"a",
"single",
"OID",
"value",
"."
] |
a21f9e73c96a35bff2354894031c4788ad4ed2f0
|
https://github.com/trehn/hnmp/blob/a21f9e73c96a35bff2354894031c4788ad4ed2f0/hnmp.py#L178-L201
|
train
|
trehn/hnmp
|
hnmp.py
|
SNMP.set
|
def set(self, oid, value, value_type=None):
"""
Sets a single OID value. If you do not pass value_type hnmp will
try to guess the correct type. Autodetection is supported for:
* int and float (as Integer, fractional part will be discarded)
* IPv4 address (as IpAddress)
* str (as OctetString)
Unfortunately, pysnmp does not support the SNMP FLOAT type so
please use Integer instead.
"""
snmpsecurity = self._get_snmp_security()
if value_type is None:
if isinstance(value, int):
data = Integer(value)
elif isinstance(value, float):
data = Integer(value)
elif isinstance(value, str):
if is_ipv4_address(value):
data = IpAddress(value)
else:
data = OctetString(value)
else:
raise TypeError(
"Unable to autodetect type. Please pass one of "
"these strings as the value_type keyword arg: "
", ".join(TYPES.keys())
)
else:
if value_type not in TYPES:
raise ValueError("'{}' is not one of the supported types: {}".format(
value_type,
", ".join(TYPES.keys())
))
data = TYPES[value_type](value)
try:
engine_error, pdu_error, pdu_error_index, objects = self._cmdgen.setCmd(
snmpsecurity,
cmdgen.UdpTransportTarget((self.host, self.port), timeout=self.timeout,
retries=self.retries),
(oid, data),
)
if engine_error:
raise SNMPError(engine_error)
if pdu_error:
raise SNMPError(pdu_error.prettyPrint())
except Exception as e:
raise SNMPError(e)
_, value = objects[0]
value = _convert_value_to_native(value)
return value
|
python
|
def set(self, oid, value, value_type=None):
"""
Sets a single OID value. If you do not pass value_type hnmp will
try to guess the correct type. Autodetection is supported for:
* int and float (as Integer, fractional part will be discarded)
* IPv4 address (as IpAddress)
* str (as OctetString)
Unfortunately, pysnmp does not support the SNMP FLOAT type so
please use Integer instead.
"""
snmpsecurity = self._get_snmp_security()
if value_type is None:
if isinstance(value, int):
data = Integer(value)
elif isinstance(value, float):
data = Integer(value)
elif isinstance(value, str):
if is_ipv4_address(value):
data = IpAddress(value)
else:
data = OctetString(value)
else:
raise TypeError(
"Unable to autodetect type. Please pass one of "
"these strings as the value_type keyword arg: "
", ".join(TYPES.keys())
)
else:
if value_type not in TYPES:
raise ValueError("'{}' is not one of the supported types: {}".format(
value_type,
", ".join(TYPES.keys())
))
data = TYPES[value_type](value)
try:
engine_error, pdu_error, pdu_error_index, objects = self._cmdgen.setCmd(
snmpsecurity,
cmdgen.UdpTransportTarget((self.host, self.port), timeout=self.timeout,
retries=self.retries),
(oid, data),
)
if engine_error:
raise SNMPError(engine_error)
if pdu_error:
raise SNMPError(pdu_error.prettyPrint())
except Exception as e:
raise SNMPError(e)
_, value = objects[0]
value = _convert_value_to_native(value)
return value
|
[
"def",
"set",
"(",
"self",
",",
"oid",
",",
"value",
",",
"value_type",
"=",
"None",
")",
":",
"snmpsecurity",
"=",
"self",
".",
"_get_snmp_security",
"(",
")",
"if",
"value_type",
"is",
"None",
":",
"if",
"isinstance",
"(",
"value",
",",
"int",
")",
":",
"data",
"=",
"Integer",
"(",
"value",
")",
"elif",
"isinstance",
"(",
"value",
",",
"float",
")",
":",
"data",
"=",
"Integer",
"(",
"value",
")",
"elif",
"isinstance",
"(",
"value",
",",
"str",
")",
":",
"if",
"is_ipv4_address",
"(",
"value",
")",
":",
"data",
"=",
"IpAddress",
"(",
"value",
")",
"else",
":",
"data",
"=",
"OctetString",
"(",
"value",
")",
"else",
":",
"raise",
"TypeError",
"(",
"\"Unable to autodetect type. Please pass one of \"",
"\"these strings as the value_type keyword arg: \"",
"\", \"",
".",
"join",
"(",
"TYPES",
".",
"keys",
"(",
")",
")",
")",
"else",
":",
"if",
"value_type",
"not",
"in",
"TYPES",
":",
"raise",
"ValueError",
"(",
"\"'{}' is not one of the supported types: {}\"",
".",
"format",
"(",
"value_type",
",",
"\", \"",
".",
"join",
"(",
"TYPES",
".",
"keys",
"(",
")",
")",
")",
")",
"data",
"=",
"TYPES",
"[",
"value_type",
"]",
"(",
"value",
")",
"try",
":",
"engine_error",
",",
"pdu_error",
",",
"pdu_error_index",
",",
"objects",
"=",
"self",
".",
"_cmdgen",
".",
"setCmd",
"(",
"snmpsecurity",
",",
"cmdgen",
".",
"UdpTransportTarget",
"(",
"(",
"self",
".",
"host",
",",
"self",
".",
"port",
")",
",",
"timeout",
"=",
"self",
".",
"timeout",
",",
"retries",
"=",
"self",
".",
"retries",
")",
",",
"(",
"oid",
",",
"data",
")",
",",
")",
"if",
"engine_error",
":",
"raise",
"SNMPError",
"(",
"engine_error",
")",
"if",
"pdu_error",
":",
"raise",
"SNMPError",
"(",
"pdu_error",
".",
"prettyPrint",
"(",
")",
")",
"except",
"Exception",
"as",
"e",
":",
"raise",
"SNMPError",
"(",
"e",
")",
"_",
",",
"value",
"=",
"objects",
"[",
"0",
"]",
"value",
"=",
"_convert_value_to_native",
"(",
"value",
")",
"return",
"value"
] |
Sets a single OID value. If you do not pass value_type hnmp will
try to guess the correct type. Autodetection is supported for:
* int and float (as Integer, fractional part will be discarded)
* IPv4 address (as IpAddress)
* str (as OctetString)
Unfortunately, pysnmp does not support the SNMP FLOAT type so
please use Integer instead.
|
[
"Sets",
"a",
"single",
"OID",
"value",
".",
"If",
"you",
"do",
"not",
"pass",
"value_type",
"hnmp",
"will",
"try",
"to",
"guess",
"the",
"correct",
"type",
".",
"Autodetection",
"is",
"supported",
"for",
":"
] |
a21f9e73c96a35bff2354894031c4788ad4ed2f0
|
https://github.com/trehn/hnmp/blob/a21f9e73c96a35bff2354894031c4788ad4ed2f0/hnmp.py#L203-L257
|
train
|
trehn/hnmp
|
hnmp.py
|
SNMP.table
|
def table(self, oid, columns=None, column_value_mapping=None, non_repeaters=0,
max_repetitions=20, fetch_all_columns=True):
"""
Get a table of values with the given OID prefix.
"""
snmpsecurity = self._get_snmp_security()
base_oid = oid.strip(".")
if not fetch_all_columns and not columns:
raise ValueError("please use the columns argument to "
"indicate which columns to fetch")
if fetch_all_columns:
columns_to_fetch = [""]
else:
columns_to_fetch = ["." + str(col_id) for col_id in columns.keys()]
full_obj_table = []
for col in columns_to_fetch:
try:
engine_error, pdu_error, pdu_error_index, obj_table = self._cmdgen.bulkCmd(
snmpsecurity,
cmdgen.UdpTransportTarget((self.host, self.port), timeout=self.timeout,
retries=self.retries),
non_repeaters,
max_repetitions,
oid + col,
)
except Exception as e:
raise SNMPError(e)
if engine_error:
raise SNMPError(engine_error)
if pdu_error:
raise SNMPError(pdu_error.prettyPrint())
# remove any trailing rows from the next subtree
try:
while not str(obj_table[-1][0][0].getOid()).lstrip(".").startswith(
base_oid + col + "."
):
obj_table.pop()
except IndexError:
pass
# append this column to full result
full_obj_table += obj_table
t = Table(columns=columns, column_value_mapping=column_value_mapping)
for row in full_obj_table:
for name, value in row:
oid = str(name.getOid()).strip(".")
value = _convert_value_to_native(value)
column, row_id = oid[len(base_oid) + 1:].split(".", 1)
t._add_value(int(column), row_id, value)
return t
|
python
|
def table(self, oid, columns=None, column_value_mapping=None, non_repeaters=0,
max_repetitions=20, fetch_all_columns=True):
"""
Get a table of values with the given OID prefix.
"""
snmpsecurity = self._get_snmp_security()
base_oid = oid.strip(".")
if not fetch_all_columns and not columns:
raise ValueError("please use the columns argument to "
"indicate which columns to fetch")
if fetch_all_columns:
columns_to_fetch = [""]
else:
columns_to_fetch = ["." + str(col_id) for col_id in columns.keys()]
full_obj_table = []
for col in columns_to_fetch:
try:
engine_error, pdu_error, pdu_error_index, obj_table = self._cmdgen.bulkCmd(
snmpsecurity,
cmdgen.UdpTransportTarget((self.host, self.port), timeout=self.timeout,
retries=self.retries),
non_repeaters,
max_repetitions,
oid + col,
)
except Exception as e:
raise SNMPError(e)
if engine_error:
raise SNMPError(engine_error)
if pdu_error:
raise SNMPError(pdu_error.prettyPrint())
# remove any trailing rows from the next subtree
try:
while not str(obj_table[-1][0][0].getOid()).lstrip(".").startswith(
base_oid + col + "."
):
obj_table.pop()
except IndexError:
pass
# append this column to full result
full_obj_table += obj_table
t = Table(columns=columns, column_value_mapping=column_value_mapping)
for row in full_obj_table:
for name, value in row:
oid = str(name.getOid()).strip(".")
value = _convert_value_to_native(value)
column, row_id = oid[len(base_oid) + 1:].split(".", 1)
t._add_value(int(column), row_id, value)
return t
|
[
"def",
"table",
"(",
"self",
",",
"oid",
",",
"columns",
"=",
"None",
",",
"column_value_mapping",
"=",
"None",
",",
"non_repeaters",
"=",
"0",
",",
"max_repetitions",
"=",
"20",
",",
"fetch_all_columns",
"=",
"True",
")",
":",
"snmpsecurity",
"=",
"self",
".",
"_get_snmp_security",
"(",
")",
"base_oid",
"=",
"oid",
".",
"strip",
"(",
"\".\"",
")",
"if",
"not",
"fetch_all_columns",
"and",
"not",
"columns",
":",
"raise",
"ValueError",
"(",
"\"please use the columns argument to \"",
"\"indicate which columns to fetch\"",
")",
"if",
"fetch_all_columns",
":",
"columns_to_fetch",
"=",
"[",
"\"\"",
"]",
"else",
":",
"columns_to_fetch",
"=",
"[",
"\".\"",
"+",
"str",
"(",
"col_id",
")",
"for",
"col_id",
"in",
"columns",
".",
"keys",
"(",
")",
"]",
"full_obj_table",
"=",
"[",
"]",
"for",
"col",
"in",
"columns_to_fetch",
":",
"try",
":",
"engine_error",
",",
"pdu_error",
",",
"pdu_error_index",
",",
"obj_table",
"=",
"self",
".",
"_cmdgen",
".",
"bulkCmd",
"(",
"snmpsecurity",
",",
"cmdgen",
".",
"UdpTransportTarget",
"(",
"(",
"self",
".",
"host",
",",
"self",
".",
"port",
")",
",",
"timeout",
"=",
"self",
".",
"timeout",
",",
"retries",
"=",
"self",
".",
"retries",
")",
",",
"non_repeaters",
",",
"max_repetitions",
",",
"oid",
"+",
"col",
",",
")",
"except",
"Exception",
"as",
"e",
":",
"raise",
"SNMPError",
"(",
"e",
")",
"if",
"engine_error",
":",
"raise",
"SNMPError",
"(",
"engine_error",
")",
"if",
"pdu_error",
":",
"raise",
"SNMPError",
"(",
"pdu_error",
".",
"prettyPrint",
"(",
")",
")",
"# remove any trailing rows from the next subtree",
"try",
":",
"while",
"not",
"str",
"(",
"obj_table",
"[",
"-",
"1",
"]",
"[",
"0",
"]",
"[",
"0",
"]",
".",
"getOid",
"(",
")",
")",
".",
"lstrip",
"(",
"\".\"",
")",
".",
"startswith",
"(",
"base_oid",
"+",
"col",
"+",
"\".\"",
")",
":",
"obj_table",
".",
"pop",
"(",
")",
"except",
"IndexError",
":",
"pass",
"# append this column to full result",
"full_obj_table",
"+=",
"obj_table",
"t",
"=",
"Table",
"(",
"columns",
"=",
"columns",
",",
"column_value_mapping",
"=",
"column_value_mapping",
")",
"for",
"row",
"in",
"full_obj_table",
":",
"for",
"name",
",",
"value",
"in",
"row",
":",
"oid",
"=",
"str",
"(",
"name",
".",
"getOid",
"(",
")",
")",
".",
"strip",
"(",
"\".\"",
")",
"value",
"=",
"_convert_value_to_native",
"(",
"value",
")",
"column",
",",
"row_id",
"=",
"oid",
"[",
"len",
"(",
"base_oid",
")",
"+",
"1",
":",
"]",
".",
"split",
"(",
"\".\"",
",",
"1",
")",
"t",
".",
"_add_value",
"(",
"int",
"(",
"column",
")",
",",
"row_id",
",",
"value",
")",
"return",
"t"
] |
Get a table of values with the given OID prefix.
|
[
"Get",
"a",
"table",
"of",
"values",
"with",
"the",
"given",
"OID",
"prefix",
"."
] |
a21f9e73c96a35bff2354894031c4788ad4ed2f0
|
https://github.com/trehn/hnmp/blob/a21f9e73c96a35bff2354894031c4788ad4ed2f0/hnmp.py#L259-L317
|
train
|
ocslegna/auto_py_torrent
|
auto_py_torrent/auto_py_torrent.py
|
get_parser
|
def get_parser():
"""Load parser for command line arguments.
It parses argv/input into args variable.
"""
desc = Colors.LIGHTBLUE + textwrap.dedent(
'''\
Welcome to
_ _ _
__ _ _ _| |_ ___ _ __ _ _ | |_ ___ _ __ _ __ ___ _ __ | |_
/ _` | | | | __/ _ \ | '_ \| | | | | __/ _ \| '__| '__/ _ \ '_ \| __|
| (_| | |_| | || (_) | | |_) | |_| | | || (_) | | | | | __/ | | | |_
\__,_|\__,_|\__\___/____| .__/ \__, |___\__\___/|_| |_| \___|_| |_|\__|
|_____|_| |___/_____|
------------------------------------
auto_py_torrent is an automated tool for download files by obtaining
torrents or magnets that are in different provided pages that the
user can choose.
Its goal is to make it easier for users to find the files they want
and download them instantly.
An auto_py_torrent command is provided in which the user can
currently choose between two modes, best_rated and list mode, then it
selects one of the torrent tracking pages for multimedia content and
finally enter the text of what you want to download.
------------------------------------
''') + Colors.ENDC
usage_info = Colors.LGREEN + textwrap.dedent(
'''\
Use "%(prog)s --help" for more information.
Examples:
use "%(prog)s MODE SELECTED_PAGE STRING_TO_SEARCH # generic.
use "%(prog)s 0 0 "The simpsons" # best rated.
use "%(prog)s 1 0 "The simpsons" # list rated.
Mode options:
0: best_rated. # Download the most rated file.
1: list. # Get a list, and select one of them.
Page list options:
0: torrent project.
1: the pirate bay.
2: 1337x.
3: eztv.
4: limetorrents.
5: isohunt.
''') + Colors.ENDC
epi = Colors.LIGHTPURPLE + textwrap.dedent(
'''\
-> Thanks for using auto_py_torrent!
''') + Colors.ENDC
# Parent and only parser.
parser = argparse.ArgumentParser(
add_help=True,
formatter_class=argparse.RawTextHelpFormatter,
usage=usage_info,
description=desc,
epilog=epi)
parser.add_argument('mode', action='store',
choices=range(len(MODES)),
type=int,
help='Select mode of file download.\n'
' e.g: 0(rated) or 1(list).')
parser.add_argument('torr_page', action='store',
choices=range(len(TORRENTS)),
type=int,
help='Select tracking page to download from.\n'
' e.g: 0 to .. ' + str(len(TORRENTS)-1) + '.')
parser.add_argument('str_search', action='store',
type=str,
help='Input torrent string to search.\n'
' e.g: "String search"')
return(parser)
|
python
|
def get_parser():
"""Load parser for command line arguments.
It parses argv/input into args variable.
"""
desc = Colors.LIGHTBLUE + textwrap.dedent(
'''\
Welcome to
_ _ _
__ _ _ _| |_ ___ _ __ _ _ | |_ ___ _ __ _ __ ___ _ __ | |_
/ _` | | | | __/ _ \ | '_ \| | | | | __/ _ \| '__| '__/ _ \ '_ \| __|
| (_| | |_| | || (_) | | |_) | |_| | | || (_) | | | | | __/ | | | |_
\__,_|\__,_|\__\___/____| .__/ \__, |___\__\___/|_| |_| \___|_| |_|\__|
|_____|_| |___/_____|
------------------------------------
auto_py_torrent is an automated tool for download files by obtaining
torrents or magnets that are in different provided pages that the
user can choose.
Its goal is to make it easier for users to find the files they want
and download them instantly.
An auto_py_torrent command is provided in which the user can
currently choose between two modes, best_rated and list mode, then it
selects one of the torrent tracking pages for multimedia content and
finally enter the text of what you want to download.
------------------------------------
''') + Colors.ENDC
usage_info = Colors.LGREEN + textwrap.dedent(
'''\
Use "%(prog)s --help" for more information.
Examples:
use "%(prog)s MODE SELECTED_PAGE STRING_TO_SEARCH # generic.
use "%(prog)s 0 0 "The simpsons" # best rated.
use "%(prog)s 1 0 "The simpsons" # list rated.
Mode options:
0: best_rated. # Download the most rated file.
1: list. # Get a list, and select one of them.
Page list options:
0: torrent project.
1: the pirate bay.
2: 1337x.
3: eztv.
4: limetorrents.
5: isohunt.
''') + Colors.ENDC
epi = Colors.LIGHTPURPLE + textwrap.dedent(
'''\
-> Thanks for using auto_py_torrent!
''') + Colors.ENDC
# Parent and only parser.
parser = argparse.ArgumentParser(
add_help=True,
formatter_class=argparse.RawTextHelpFormatter,
usage=usage_info,
description=desc,
epilog=epi)
parser.add_argument('mode', action='store',
choices=range(len(MODES)),
type=int,
help='Select mode of file download.\n'
' e.g: 0(rated) or 1(list).')
parser.add_argument('torr_page', action='store',
choices=range(len(TORRENTS)),
type=int,
help='Select tracking page to download from.\n'
' e.g: 0 to .. ' + str(len(TORRENTS)-1) + '.')
parser.add_argument('str_search', action='store',
type=str,
help='Input torrent string to search.\n'
' e.g: "String search"')
return(parser)
|
[
"def",
"get_parser",
"(",
")",
":",
"desc",
"=",
"Colors",
".",
"LIGHTBLUE",
"+",
"textwrap",
".",
"dedent",
"(",
"'''\\\n Welcome to\n _ _ _\n __ _ _ _| |_ ___ _ __ _ _ | |_ ___ _ __ _ __ ___ _ __ | |_\n / _` | | | | __/ _ \\ | '_ \\| | | | | __/ _ \\| '__| '__/ _ \\ '_ \\| __|\n | (_| | |_| | || (_) | | |_) | |_| | | || (_) | | | | | __/ | | | |_\n \\__,_|\\__,_|\\__\\___/____| .__/ \\__, |___\\__\\___/|_| |_| \\___|_| |_|\\__|\n |_____|_| |___/_____|\n\n ------------------------------------\n auto_py_torrent is an automated tool for download files by obtaining\n torrents or magnets that are in different provided pages that the\n user can choose.\n\n Its goal is to make it easier for users to find the files they want\n and download them instantly.\n\n An auto_py_torrent command is provided in which the user can\n currently choose between two modes, best_rated and list mode, then it\n selects one of the torrent tracking pages for multimedia content and\n finally enter the text of what you want to download.\n ------------------------------------\n '''",
")",
"+",
"Colors",
".",
"ENDC",
"usage_info",
"=",
"Colors",
".",
"LGREEN",
"+",
"textwrap",
".",
"dedent",
"(",
"'''\\\n\n Use \"%(prog)s --help\" for more information.\n Examples:\n use \"%(prog)s MODE SELECTED_PAGE STRING_TO_SEARCH # generic.\n use \"%(prog)s 0 0 \"The simpsons\" # best rated.\n use \"%(prog)s 1 0 \"The simpsons\" # list rated.\n\n Mode options:\n 0: best_rated. # Download the most rated file.\n 1: list. # Get a list, and select one of them.\n\n Page list options:\n 0: torrent project.\n 1: the pirate bay.\n 2: 1337x.\n 3: eztv.\n 4: limetorrents.\n 5: isohunt.\n '''",
")",
"+",
"Colors",
".",
"ENDC",
"epi",
"=",
"Colors",
".",
"LIGHTPURPLE",
"+",
"textwrap",
".",
"dedent",
"(",
"'''\\\n -> Thanks for using auto_py_torrent!\n '''",
")",
"+",
"Colors",
".",
"ENDC",
"# Parent and only parser.",
"parser",
"=",
"argparse",
".",
"ArgumentParser",
"(",
"add_help",
"=",
"True",
",",
"formatter_class",
"=",
"argparse",
".",
"RawTextHelpFormatter",
",",
"usage",
"=",
"usage_info",
",",
"description",
"=",
"desc",
",",
"epilog",
"=",
"epi",
")",
"parser",
".",
"add_argument",
"(",
"'mode'",
",",
"action",
"=",
"'store'",
",",
"choices",
"=",
"range",
"(",
"len",
"(",
"MODES",
")",
")",
",",
"type",
"=",
"int",
",",
"help",
"=",
"'Select mode of file download.\\n'",
"' e.g: 0(rated) or 1(list).'",
")",
"parser",
".",
"add_argument",
"(",
"'torr_page'",
",",
"action",
"=",
"'store'",
",",
"choices",
"=",
"range",
"(",
"len",
"(",
"TORRENTS",
")",
")",
",",
"type",
"=",
"int",
",",
"help",
"=",
"'Select tracking page to download from.\\n'",
"' e.g: 0 to .. '",
"+",
"str",
"(",
"len",
"(",
"TORRENTS",
")",
"-",
"1",
")",
"+",
"'.'",
")",
"parser",
".",
"add_argument",
"(",
"'str_search'",
",",
"action",
"=",
"'store'",
",",
"type",
"=",
"str",
",",
"help",
"=",
"'Input torrent string to search.\\n'",
"' e.g: \"String search\"'",
")",
"return",
"(",
"parser",
")"
] |
Load parser for command line arguments.
It parses argv/input into args variable.
|
[
"Load",
"parser",
"for",
"command",
"line",
"arguments",
"."
] |
32761fe18b3112e6e3754da863488b50929fcc41
|
https://github.com/ocslegna/auto_py_torrent/blob/32761fe18b3112e6e3754da863488b50929fcc41/auto_py_torrent/auto_py_torrent.py#L98-L174
|
train
|
ocslegna/auto_py_torrent
|
auto_py_torrent/auto_py_torrent.py
|
insert
|
def insert(args):
"""Insert args values into instance variables."""
string_search = args.str_search
mode_search = MODES[args.mode]
page = list(TORRENTS[args.torr_page].keys())[0]
key_search = TORRENTS[args.torr_page][page]['key_search']
torrent_page = TORRENTS[args.torr_page][page]['page']
domain = TORRENTS[args.torr_page][page]['domain']
return([args, string_search, mode_search, page,
key_search, torrent_page, domain])
|
python
|
def insert(args):
"""Insert args values into instance variables."""
string_search = args.str_search
mode_search = MODES[args.mode]
page = list(TORRENTS[args.torr_page].keys())[0]
key_search = TORRENTS[args.torr_page][page]['key_search']
torrent_page = TORRENTS[args.torr_page][page]['page']
domain = TORRENTS[args.torr_page][page]['domain']
return([args, string_search, mode_search, page,
key_search, torrent_page, domain])
|
[
"def",
"insert",
"(",
"args",
")",
":",
"string_search",
"=",
"args",
".",
"str_search",
"mode_search",
"=",
"MODES",
"[",
"args",
".",
"mode",
"]",
"page",
"=",
"list",
"(",
"TORRENTS",
"[",
"args",
".",
"torr_page",
"]",
".",
"keys",
"(",
")",
")",
"[",
"0",
"]",
"key_search",
"=",
"TORRENTS",
"[",
"args",
".",
"torr_page",
"]",
"[",
"page",
"]",
"[",
"'key_search'",
"]",
"torrent_page",
"=",
"TORRENTS",
"[",
"args",
".",
"torr_page",
"]",
"[",
"page",
"]",
"[",
"'page'",
"]",
"domain",
"=",
"TORRENTS",
"[",
"args",
".",
"torr_page",
"]",
"[",
"page",
"]",
"[",
"'domain'",
"]",
"return",
"(",
"[",
"args",
",",
"string_search",
",",
"mode_search",
",",
"page",
",",
"key_search",
",",
"torrent_page",
",",
"domain",
"]",
")"
] |
Insert args values into instance variables.
|
[
"Insert",
"args",
"values",
"into",
"instance",
"variables",
"."
] |
32761fe18b3112e6e3754da863488b50929fcc41
|
https://github.com/ocslegna/auto_py_torrent/blob/32761fe18b3112e6e3754da863488b50929fcc41/auto_py_torrent/auto_py_torrent.py#L579-L588
|
train
|
ocslegna/auto_py_torrent
|
auto_py_torrent/auto_py_torrent.py
|
run_it
|
def run_it():
"""Search and download torrents until the user says it so."""
initialize()
parser = get_parser()
args = None
first_parse = True
while(True):
if first_parse is True:
first_parse = False
args = parser.parse_args()
else:
print(textwrap.dedent(
'''\
Search again like in the beginning.
-- You can either choose best rated or list mode.
-- This time, you can insert the search string without double quotes.
Remember the list mode options!
0: torrent project.
1: the pirate bay.
2: 1337x.
3: eztv.
4: limetorrents.
5: isohunt.
'''))
print('Or.. if you want to exit just write "' +
Colors.LRED + 'Q' + Colors.ENDC + '" or "' +
Colors.LRED + 'q' + Colors.ENDC + '".')
input_parse = input('>> ').replace("'", "").replace('"', '')
if input_parse in ['Q', 'q']:
sys.exit(1)
args = parser.parse_args(input_parse.split(' ', 2))
if args.str_search.strip() == "":
print('Please insert an appropiate non-empty string.')
else:
auto = AutoPy(*insert(args))
auto.get_content()
auto.select_torrent()
auto.download_torrent()
|
python
|
def run_it():
"""Search and download torrents until the user says it so."""
initialize()
parser = get_parser()
args = None
first_parse = True
while(True):
if first_parse is True:
first_parse = False
args = parser.parse_args()
else:
print(textwrap.dedent(
'''\
Search again like in the beginning.
-- You can either choose best rated or list mode.
-- This time, you can insert the search string without double quotes.
Remember the list mode options!
0: torrent project.
1: the pirate bay.
2: 1337x.
3: eztv.
4: limetorrents.
5: isohunt.
'''))
print('Or.. if you want to exit just write "' +
Colors.LRED + 'Q' + Colors.ENDC + '" or "' +
Colors.LRED + 'q' + Colors.ENDC + '".')
input_parse = input('>> ').replace("'", "").replace('"', '')
if input_parse in ['Q', 'q']:
sys.exit(1)
args = parser.parse_args(input_parse.split(' ', 2))
if args.str_search.strip() == "":
print('Please insert an appropiate non-empty string.')
else:
auto = AutoPy(*insert(args))
auto.get_content()
auto.select_torrent()
auto.download_torrent()
|
[
"def",
"run_it",
"(",
")",
":",
"initialize",
"(",
")",
"parser",
"=",
"get_parser",
"(",
")",
"args",
"=",
"None",
"first_parse",
"=",
"True",
"while",
"(",
"True",
")",
":",
"if",
"first_parse",
"is",
"True",
":",
"first_parse",
"=",
"False",
"args",
"=",
"parser",
".",
"parse_args",
"(",
")",
"else",
":",
"print",
"(",
"textwrap",
".",
"dedent",
"(",
"'''\\\n Search again like in the beginning.\n -- You can either choose best rated or list mode.\n -- This time, you can insert the search string without double quotes.\n Remember the list mode options!\n 0: torrent project.\n 1: the pirate bay.\n 2: 1337x.\n 3: eztv.\n 4: limetorrents.\n 5: isohunt.\n '''",
")",
")",
"print",
"(",
"'Or.. if you want to exit just write \"'",
"+",
"Colors",
".",
"LRED",
"+",
"'Q'",
"+",
"Colors",
".",
"ENDC",
"+",
"'\" or \"'",
"+",
"Colors",
".",
"LRED",
"+",
"'q'",
"+",
"Colors",
".",
"ENDC",
"+",
"'\".'",
")",
"input_parse",
"=",
"input",
"(",
"'>> '",
")",
".",
"replace",
"(",
"\"'\"",
",",
"\"\"",
")",
".",
"replace",
"(",
"'\"'",
",",
"''",
")",
"if",
"input_parse",
"in",
"[",
"'Q'",
",",
"'q'",
"]",
":",
"sys",
".",
"exit",
"(",
"1",
")",
"args",
"=",
"parser",
".",
"parse_args",
"(",
"input_parse",
".",
"split",
"(",
"' '",
",",
"2",
")",
")",
"if",
"args",
".",
"str_search",
".",
"strip",
"(",
")",
"==",
"\"\"",
":",
"print",
"(",
"'Please insert an appropiate non-empty string.'",
")",
"else",
":",
"auto",
"=",
"AutoPy",
"(",
"*",
"insert",
"(",
"args",
")",
")",
"auto",
".",
"get_content",
"(",
")",
"auto",
".",
"select_torrent",
"(",
")",
"auto",
".",
"download_torrent",
"(",
")"
] |
Search and download torrents until the user says it so.
|
[
"Search",
"and",
"download",
"torrents",
"until",
"the",
"user",
"says",
"it",
"so",
"."
] |
32761fe18b3112e6e3754da863488b50929fcc41
|
https://github.com/ocslegna/auto_py_torrent/blob/32761fe18b3112e6e3754da863488b50929fcc41/auto_py_torrent/auto_py_torrent.py#L596-L636
|
train
|
ocslegna/auto_py_torrent
|
auto_py_torrent/auto_py_torrent.py
|
AutoPy.open_magnet
|
def open_magnet(self):
"""Open magnet according to os."""
if sys.platform.startswith('linux'):
subprocess.Popen(['xdg-open', self.magnet],
stdout=subprocess.PIPE, stderr=subprocess.PIPE)
elif sys.platform.startswith('win32'):
os.startfile(self.magnet)
elif sys.platform.startswith('cygwin'):
os.startfile(self.magnet)
elif sys.platform.startswith('darwin'):
subprocess.Popen(['open', self.magnet],
stdout=subprocess.PIPE, stderr=subprocess.PIPE)
else:
subprocess.Popen(['xdg-open', self.magnet],
stdout=subprocess.PIPE, stderr=subprocess.PIPE)
|
python
|
def open_magnet(self):
"""Open magnet according to os."""
if sys.platform.startswith('linux'):
subprocess.Popen(['xdg-open', self.magnet],
stdout=subprocess.PIPE, stderr=subprocess.PIPE)
elif sys.platform.startswith('win32'):
os.startfile(self.magnet)
elif sys.platform.startswith('cygwin'):
os.startfile(self.magnet)
elif sys.platform.startswith('darwin'):
subprocess.Popen(['open', self.magnet],
stdout=subprocess.PIPE, stderr=subprocess.PIPE)
else:
subprocess.Popen(['xdg-open', self.magnet],
stdout=subprocess.PIPE, stderr=subprocess.PIPE)
|
[
"def",
"open_magnet",
"(",
"self",
")",
":",
"if",
"sys",
".",
"platform",
".",
"startswith",
"(",
"'linux'",
")",
":",
"subprocess",
".",
"Popen",
"(",
"[",
"'xdg-open'",
",",
"self",
".",
"magnet",
"]",
",",
"stdout",
"=",
"subprocess",
".",
"PIPE",
",",
"stderr",
"=",
"subprocess",
".",
"PIPE",
")",
"elif",
"sys",
".",
"platform",
".",
"startswith",
"(",
"'win32'",
")",
":",
"os",
".",
"startfile",
"(",
"self",
".",
"magnet",
")",
"elif",
"sys",
".",
"platform",
".",
"startswith",
"(",
"'cygwin'",
")",
":",
"os",
".",
"startfile",
"(",
"self",
".",
"magnet",
")",
"elif",
"sys",
".",
"platform",
".",
"startswith",
"(",
"'darwin'",
")",
":",
"subprocess",
".",
"Popen",
"(",
"[",
"'open'",
",",
"self",
".",
"magnet",
"]",
",",
"stdout",
"=",
"subprocess",
".",
"PIPE",
",",
"stderr",
"=",
"subprocess",
".",
"PIPE",
")",
"else",
":",
"subprocess",
".",
"Popen",
"(",
"[",
"'xdg-open'",
",",
"self",
".",
"magnet",
"]",
",",
"stdout",
"=",
"subprocess",
".",
"PIPE",
",",
"stderr",
"=",
"subprocess",
".",
"PIPE",
")"
] |
Open magnet according to os.
|
[
"Open",
"magnet",
"according",
"to",
"os",
"."
] |
32761fe18b3112e6e3754da863488b50929fcc41
|
https://github.com/ocslegna/auto_py_torrent/blob/32761fe18b3112e6e3754da863488b50929fcc41/auto_py_torrent/auto_py_torrent.py#L211-L225
|
train
|
ocslegna/auto_py_torrent
|
auto_py_torrent/auto_py_torrent.py
|
AutoPy.get_magnet
|
def get_magnet(self, url):
"""Get magnet from torrent page. Url already got domain."""
content_most_rated = requests.get(url)
rated_soup = BeautifulSoup(content_most_rated.content, 'lxml')
if self.page == 'torrent_project':
self.magnet = rated_soup.find(
'a', href=True, text=re.compile('Download'))['href']
elif self.page == 'the_pirate_bay':
self.magnet = rated_soup.find(
'a', href=True, text=re.compile('Get this torrent'))['href']
elif self.page == '1337x':
div1337 = rated_soup.find(
'div', {'class': 'torrent-category-detail'})
self.magnet = div1337.find('a', href=re.compile('magnet'))['href']
elif self.page == 'isohunt':
self.magnet = rated_soup.find(
'a', href=re.compile('magnet'))['href']
else:
print('Wrong page to get magnet!')
sys.exit(1)
|
python
|
def get_magnet(self, url):
"""Get magnet from torrent page. Url already got domain."""
content_most_rated = requests.get(url)
rated_soup = BeautifulSoup(content_most_rated.content, 'lxml')
if self.page == 'torrent_project':
self.magnet = rated_soup.find(
'a', href=True, text=re.compile('Download'))['href']
elif self.page == 'the_pirate_bay':
self.magnet = rated_soup.find(
'a', href=True, text=re.compile('Get this torrent'))['href']
elif self.page == '1337x':
div1337 = rated_soup.find(
'div', {'class': 'torrent-category-detail'})
self.magnet = div1337.find('a', href=re.compile('magnet'))['href']
elif self.page == 'isohunt':
self.magnet = rated_soup.find(
'a', href=re.compile('magnet'))['href']
else:
print('Wrong page to get magnet!')
sys.exit(1)
|
[
"def",
"get_magnet",
"(",
"self",
",",
"url",
")",
":",
"content_most_rated",
"=",
"requests",
".",
"get",
"(",
"url",
")",
"rated_soup",
"=",
"BeautifulSoup",
"(",
"content_most_rated",
".",
"content",
",",
"'lxml'",
")",
"if",
"self",
".",
"page",
"==",
"'torrent_project'",
":",
"self",
".",
"magnet",
"=",
"rated_soup",
".",
"find",
"(",
"'a'",
",",
"href",
"=",
"True",
",",
"text",
"=",
"re",
".",
"compile",
"(",
"'Download'",
")",
")",
"[",
"'href'",
"]",
"elif",
"self",
".",
"page",
"==",
"'the_pirate_bay'",
":",
"self",
".",
"magnet",
"=",
"rated_soup",
".",
"find",
"(",
"'a'",
",",
"href",
"=",
"True",
",",
"text",
"=",
"re",
".",
"compile",
"(",
"'Get this torrent'",
")",
")",
"[",
"'href'",
"]",
"elif",
"self",
".",
"page",
"==",
"'1337x'",
":",
"div1337",
"=",
"rated_soup",
".",
"find",
"(",
"'div'",
",",
"{",
"'class'",
":",
"'torrent-category-detail'",
"}",
")",
"self",
".",
"magnet",
"=",
"div1337",
".",
"find",
"(",
"'a'",
",",
"href",
"=",
"re",
".",
"compile",
"(",
"'magnet'",
")",
")",
"[",
"'href'",
"]",
"elif",
"self",
".",
"page",
"==",
"'isohunt'",
":",
"self",
".",
"magnet",
"=",
"rated_soup",
".",
"find",
"(",
"'a'",
",",
"href",
"=",
"re",
".",
"compile",
"(",
"'magnet'",
")",
")",
"[",
"'href'",
"]",
"else",
":",
"print",
"(",
"'Wrong page to get magnet!'",
")",
"sys",
".",
"exit",
"(",
"1",
")"
] |
Get magnet from torrent page. Url already got domain.
|
[
"Get",
"magnet",
"from",
"torrent",
"page",
".",
"Url",
"already",
"got",
"domain",
"."
] |
32761fe18b3112e6e3754da863488b50929fcc41
|
https://github.com/ocslegna/auto_py_torrent/blob/32761fe18b3112e6e3754da863488b50929fcc41/auto_py_torrent/auto_py_torrent.py#L227-L251
|
train
|
ocslegna/auto_py_torrent
|
auto_py_torrent/auto_py_torrent.py
|
AutoPy.download_torrent
|
def download_torrent(self):
"""Download torrent.
Rated implies download the unique best rated torrent found.
Otherwise: get the magnet and download it.
"""
try:
if self.back_to_menu is True:
return
if self.found_torrents is False:
print('Nothing found.')
return
if self.mode_search == 'best_rated':
print('Downloading..')
self.open_magnet()
elif self.mode_search == 'list':
if self.selected is not None:
# t_p, pirate and 1337x got magnet inside, else direct.
if self.page in ['eztv', 'limetorrents']:
self.magnet = self.hrefs[int(self.selected)]
print('Downloading..')
self.open_magnet()
elif self.page in ['the_pirate_bay',
'torrent_project',
'1337x',
'isohunt']:
url = self.hrefs[int(self.selected)]
self.get_magnet(url)
print('Downloading..')
self.open_magnet()
else:
print('Bad selected page.')
else:
print('Nothing selected.')
sys.exit(1)
except Exception:
print(traceback.format_exc())
sys.exit(0)
|
python
|
def download_torrent(self):
"""Download torrent.
Rated implies download the unique best rated torrent found.
Otherwise: get the magnet and download it.
"""
try:
if self.back_to_menu is True:
return
if self.found_torrents is False:
print('Nothing found.')
return
if self.mode_search == 'best_rated':
print('Downloading..')
self.open_magnet()
elif self.mode_search == 'list':
if self.selected is not None:
# t_p, pirate and 1337x got magnet inside, else direct.
if self.page in ['eztv', 'limetorrents']:
self.magnet = self.hrefs[int(self.selected)]
print('Downloading..')
self.open_magnet()
elif self.page in ['the_pirate_bay',
'torrent_project',
'1337x',
'isohunt']:
url = self.hrefs[int(self.selected)]
self.get_magnet(url)
print('Downloading..')
self.open_magnet()
else:
print('Bad selected page.')
else:
print('Nothing selected.')
sys.exit(1)
except Exception:
print(traceback.format_exc())
sys.exit(0)
|
[
"def",
"download_torrent",
"(",
"self",
")",
":",
"try",
":",
"if",
"self",
".",
"back_to_menu",
"is",
"True",
":",
"return",
"if",
"self",
".",
"found_torrents",
"is",
"False",
":",
"print",
"(",
"'Nothing found.'",
")",
"return",
"if",
"self",
".",
"mode_search",
"==",
"'best_rated'",
":",
"print",
"(",
"'Downloading..'",
")",
"self",
".",
"open_magnet",
"(",
")",
"elif",
"self",
".",
"mode_search",
"==",
"'list'",
":",
"if",
"self",
".",
"selected",
"is",
"not",
"None",
":",
"# t_p, pirate and 1337x got magnet inside, else direct.",
"if",
"self",
".",
"page",
"in",
"[",
"'eztv'",
",",
"'limetorrents'",
"]",
":",
"self",
".",
"magnet",
"=",
"self",
".",
"hrefs",
"[",
"int",
"(",
"self",
".",
"selected",
")",
"]",
"print",
"(",
"'Downloading..'",
")",
"self",
".",
"open_magnet",
"(",
")",
"elif",
"self",
".",
"page",
"in",
"[",
"'the_pirate_bay'",
",",
"'torrent_project'",
",",
"'1337x'",
",",
"'isohunt'",
"]",
":",
"url",
"=",
"self",
".",
"hrefs",
"[",
"int",
"(",
"self",
".",
"selected",
")",
"]",
"self",
".",
"get_magnet",
"(",
"url",
")",
"print",
"(",
"'Downloading..'",
")",
"self",
".",
"open_magnet",
"(",
")",
"else",
":",
"print",
"(",
"'Bad selected page.'",
")",
"else",
":",
"print",
"(",
"'Nothing selected.'",
")",
"sys",
".",
"exit",
"(",
"1",
")",
"except",
"Exception",
":",
"print",
"(",
"traceback",
".",
"format_exc",
"(",
")",
")",
"sys",
".",
"exit",
"(",
"0",
")"
] |
Download torrent.
Rated implies download the unique best rated torrent found.
Otherwise: get the magnet and download it.
|
[
"Download",
"torrent",
"."
] |
32761fe18b3112e6e3754da863488b50929fcc41
|
https://github.com/ocslegna/auto_py_torrent/blob/32761fe18b3112e6e3754da863488b50929fcc41/auto_py_torrent/auto_py_torrent.py#L253-L290
|
train
|
ocslegna/auto_py_torrent
|
auto_py_torrent/auto_py_torrent.py
|
AutoPy.build_table
|
def build_table(self):
"""Build table."""
headers = ['Title', 'Seeders', 'Leechers', 'Age', 'Size']
titles = []
seeders = []
leechers = []
ages = []
sizes = []
if self.page == 'torrent_project':
titles = [list(span.find('a').stripped_strings)[0]
for span in self.elements[0]]
seeders = [span.get_text() for span in self.elements[1]]
leechers = [span.get_text() for span in self.elements[2]]
ages = [span.get_text() for span in self.elements[3]]
sizes = [span.get_text() for span in self.elements[4]]
# Torrents
self.hrefs = [self.domain +
span.find('a')['href']
for span in self.elements[0]]
elif self.page == 'the_pirate_bay':
for elem in self.elements[0]:
title = elem.find('a', {'class': 'detLink'}).get_text()
titles.append(title)
font_text = elem.find(
'font', {'class': 'detDesc'}).get_text()
dammit = UnicodeDammit(font_text)
age, size = dammit.unicode_markup.split(',')[:-1]
ages.append(age)
sizes.append(size)
# Torrent
href = self.domain + \
elem.find('a', title=re.compile('magnet'))['href']
self.hrefs.append(str(href))
seeders = [elem.get_text() for elem in self.elements[1]]
leechers = [elem.get_text() for elem in self.elements[2]]
elif self.page == '1337x':
titles = [elem.get_text() for elem in self.elements[0]]
seeders = [elem.get_text() for elem in self.elements[1]]
leechers = [elem.get_text() for elem in self.elements[2]]
ages = [elem.get_text() for elem in self.elements[3]]
sizes = [elem.get_text('|').split('|')[0]
for elem in self.elements[4]]
# Torrent
self.hrefs = [self.domain +
elem.find(href=re.compile('torrent'))['href']
for elem in self.elements[0]]
elif self.page == 'eztv':
titles = [elem.get_text() for elem in self.elements[0]]
seeders = [elem.get_text() for elem in self.elements[4]]
leechers = ['-' for elem in self.elements[4]]
ages = [elem.get_text() for elem in self.elements[3]]
sizes = [elem.get_text() for elem in self.elements[2]]
# Magnets
self.hrefs = [elem.find(href=re.compile('magnet'))['href']
for elem in self.elements[1]]
elif self.page == 'limetorrents':
titles = [elem.get_text() for elem in self.elements[0]]
seeders = [elem.get_text() for elem in self.elements[3]]
leechers = [elem.get_text() for elem in self.elements[4]]
ages = [elem.get_text() for elem in self.elements[1]]
sizes = [elem.get_text() for elem in self.elements[2]]
# Magnets
self.hrefs = [elem.find('a', href=re.compile('torrent'))['href']
for elem in self.elements[0]]
elif self.page == 'isohunt':
titles = [elem.get_text() for elem in self.elements[0]]
seeders = [elem.get_text() for elem in self.elements[5]]
leechers = ['-' for elem in self.elements[5]]
ages = [elem.get_text() for elem in self.elements[3]]
sizes = [elem.get_text() for elem in self.elements[4]]
# Torrents
self.hrefs = [self.domain +
elem.find(href=re.compile('torrent_details'))['href']
for elem in self.elements[0]]
else:
print('Error page')
self.table = [[Colors.BOLD +
UnicodeDammit(titles[i][:75].strip(), ["utf-8"]).unicode_markup +
Colors.ENDC
if (i + 1) % 2 == 0
else UnicodeDammit(
titles[i][:75].strip()).unicode_markup,
Colors.SEEDER + seeders[i].strip() + Colors.ENDC
if (i + 1) % 2 == 0
else Colors.LGREEN + seeders[i].strip() + Colors.ENDC,
Colors.LEECHER + leechers[i].strip() + Colors.ENDC
if (i + 1) % 2 == 0
else Colors.LRED + leechers[i].strip() + Colors.ENDC,
Colors.LIGHTBLUE + ages[i].strip() + Colors.ENDC
if (i + 1) % 2 == 0
else Colors.BLUE + ages[i].strip() + Colors.ENDC,
Colors.PINK + sizes[i].strip() + Colors.ENDC
if (i + 1) % 2 == 0
else Colors.PURPLE + sizes[i].strip() + Colors.ENDC]
for i in range(len(self.hrefs))]
print(tabulate(self.table,
headers=headers,
tablefmt='psql',
numalign='right',
stralign='left',
showindex=True))
|
python
|
def build_table(self):
"""Build table."""
headers = ['Title', 'Seeders', 'Leechers', 'Age', 'Size']
titles = []
seeders = []
leechers = []
ages = []
sizes = []
if self.page == 'torrent_project':
titles = [list(span.find('a').stripped_strings)[0]
for span in self.elements[0]]
seeders = [span.get_text() for span in self.elements[1]]
leechers = [span.get_text() for span in self.elements[2]]
ages = [span.get_text() for span in self.elements[3]]
sizes = [span.get_text() for span in self.elements[4]]
# Torrents
self.hrefs = [self.domain +
span.find('a')['href']
for span in self.elements[0]]
elif self.page == 'the_pirate_bay':
for elem in self.elements[0]:
title = elem.find('a', {'class': 'detLink'}).get_text()
titles.append(title)
font_text = elem.find(
'font', {'class': 'detDesc'}).get_text()
dammit = UnicodeDammit(font_text)
age, size = dammit.unicode_markup.split(',')[:-1]
ages.append(age)
sizes.append(size)
# Torrent
href = self.domain + \
elem.find('a', title=re.compile('magnet'))['href']
self.hrefs.append(str(href))
seeders = [elem.get_text() for elem in self.elements[1]]
leechers = [elem.get_text() for elem in self.elements[2]]
elif self.page == '1337x':
titles = [elem.get_text() for elem in self.elements[0]]
seeders = [elem.get_text() for elem in self.elements[1]]
leechers = [elem.get_text() for elem in self.elements[2]]
ages = [elem.get_text() for elem in self.elements[3]]
sizes = [elem.get_text('|').split('|')[0]
for elem in self.elements[4]]
# Torrent
self.hrefs = [self.domain +
elem.find(href=re.compile('torrent'))['href']
for elem in self.elements[0]]
elif self.page == 'eztv':
titles = [elem.get_text() for elem in self.elements[0]]
seeders = [elem.get_text() for elem in self.elements[4]]
leechers = ['-' for elem in self.elements[4]]
ages = [elem.get_text() for elem in self.elements[3]]
sizes = [elem.get_text() for elem in self.elements[2]]
# Magnets
self.hrefs = [elem.find(href=re.compile('magnet'))['href']
for elem in self.elements[1]]
elif self.page == 'limetorrents':
titles = [elem.get_text() for elem in self.elements[0]]
seeders = [elem.get_text() for elem in self.elements[3]]
leechers = [elem.get_text() for elem in self.elements[4]]
ages = [elem.get_text() for elem in self.elements[1]]
sizes = [elem.get_text() for elem in self.elements[2]]
# Magnets
self.hrefs = [elem.find('a', href=re.compile('torrent'))['href']
for elem in self.elements[0]]
elif self.page == 'isohunt':
titles = [elem.get_text() for elem in self.elements[0]]
seeders = [elem.get_text() for elem in self.elements[5]]
leechers = ['-' for elem in self.elements[5]]
ages = [elem.get_text() for elem in self.elements[3]]
sizes = [elem.get_text() for elem in self.elements[4]]
# Torrents
self.hrefs = [self.domain +
elem.find(href=re.compile('torrent_details'))['href']
for elem in self.elements[0]]
else:
print('Error page')
self.table = [[Colors.BOLD +
UnicodeDammit(titles[i][:75].strip(), ["utf-8"]).unicode_markup +
Colors.ENDC
if (i + 1) % 2 == 0
else UnicodeDammit(
titles[i][:75].strip()).unicode_markup,
Colors.SEEDER + seeders[i].strip() + Colors.ENDC
if (i + 1) % 2 == 0
else Colors.LGREEN + seeders[i].strip() + Colors.ENDC,
Colors.LEECHER + leechers[i].strip() + Colors.ENDC
if (i + 1) % 2 == 0
else Colors.LRED + leechers[i].strip() + Colors.ENDC,
Colors.LIGHTBLUE + ages[i].strip() + Colors.ENDC
if (i + 1) % 2 == 0
else Colors.BLUE + ages[i].strip() + Colors.ENDC,
Colors.PINK + sizes[i].strip() + Colors.ENDC
if (i + 1) % 2 == 0
else Colors.PURPLE + sizes[i].strip() + Colors.ENDC]
for i in range(len(self.hrefs))]
print(tabulate(self.table,
headers=headers,
tablefmt='psql',
numalign='right',
stralign='left',
showindex=True))
|
[
"def",
"build_table",
"(",
"self",
")",
":",
"headers",
"=",
"[",
"'Title'",
",",
"'Seeders'",
",",
"'Leechers'",
",",
"'Age'",
",",
"'Size'",
"]",
"titles",
"=",
"[",
"]",
"seeders",
"=",
"[",
"]",
"leechers",
"=",
"[",
"]",
"ages",
"=",
"[",
"]",
"sizes",
"=",
"[",
"]",
"if",
"self",
".",
"page",
"==",
"'torrent_project'",
":",
"titles",
"=",
"[",
"list",
"(",
"span",
".",
"find",
"(",
"'a'",
")",
".",
"stripped_strings",
")",
"[",
"0",
"]",
"for",
"span",
"in",
"self",
".",
"elements",
"[",
"0",
"]",
"]",
"seeders",
"=",
"[",
"span",
".",
"get_text",
"(",
")",
"for",
"span",
"in",
"self",
".",
"elements",
"[",
"1",
"]",
"]",
"leechers",
"=",
"[",
"span",
".",
"get_text",
"(",
")",
"for",
"span",
"in",
"self",
".",
"elements",
"[",
"2",
"]",
"]",
"ages",
"=",
"[",
"span",
".",
"get_text",
"(",
")",
"for",
"span",
"in",
"self",
".",
"elements",
"[",
"3",
"]",
"]",
"sizes",
"=",
"[",
"span",
".",
"get_text",
"(",
")",
"for",
"span",
"in",
"self",
".",
"elements",
"[",
"4",
"]",
"]",
"# Torrents",
"self",
".",
"hrefs",
"=",
"[",
"self",
".",
"domain",
"+",
"span",
".",
"find",
"(",
"'a'",
")",
"[",
"'href'",
"]",
"for",
"span",
"in",
"self",
".",
"elements",
"[",
"0",
"]",
"]",
"elif",
"self",
".",
"page",
"==",
"'the_pirate_bay'",
":",
"for",
"elem",
"in",
"self",
".",
"elements",
"[",
"0",
"]",
":",
"title",
"=",
"elem",
".",
"find",
"(",
"'a'",
",",
"{",
"'class'",
":",
"'detLink'",
"}",
")",
".",
"get_text",
"(",
")",
"titles",
".",
"append",
"(",
"title",
")",
"font_text",
"=",
"elem",
".",
"find",
"(",
"'font'",
",",
"{",
"'class'",
":",
"'detDesc'",
"}",
")",
".",
"get_text",
"(",
")",
"dammit",
"=",
"UnicodeDammit",
"(",
"font_text",
")",
"age",
",",
"size",
"=",
"dammit",
".",
"unicode_markup",
".",
"split",
"(",
"','",
")",
"[",
":",
"-",
"1",
"]",
"ages",
".",
"append",
"(",
"age",
")",
"sizes",
".",
"append",
"(",
"size",
")",
"# Torrent",
"href",
"=",
"self",
".",
"domain",
"+",
"elem",
".",
"find",
"(",
"'a'",
",",
"title",
"=",
"re",
".",
"compile",
"(",
"'magnet'",
")",
")",
"[",
"'href'",
"]",
"self",
".",
"hrefs",
".",
"append",
"(",
"str",
"(",
"href",
")",
")",
"seeders",
"=",
"[",
"elem",
".",
"get_text",
"(",
")",
"for",
"elem",
"in",
"self",
".",
"elements",
"[",
"1",
"]",
"]",
"leechers",
"=",
"[",
"elem",
".",
"get_text",
"(",
")",
"for",
"elem",
"in",
"self",
".",
"elements",
"[",
"2",
"]",
"]",
"elif",
"self",
".",
"page",
"==",
"'1337x'",
":",
"titles",
"=",
"[",
"elem",
".",
"get_text",
"(",
")",
"for",
"elem",
"in",
"self",
".",
"elements",
"[",
"0",
"]",
"]",
"seeders",
"=",
"[",
"elem",
".",
"get_text",
"(",
")",
"for",
"elem",
"in",
"self",
".",
"elements",
"[",
"1",
"]",
"]",
"leechers",
"=",
"[",
"elem",
".",
"get_text",
"(",
")",
"for",
"elem",
"in",
"self",
".",
"elements",
"[",
"2",
"]",
"]",
"ages",
"=",
"[",
"elem",
".",
"get_text",
"(",
")",
"for",
"elem",
"in",
"self",
".",
"elements",
"[",
"3",
"]",
"]",
"sizes",
"=",
"[",
"elem",
".",
"get_text",
"(",
"'|'",
")",
".",
"split",
"(",
"'|'",
")",
"[",
"0",
"]",
"for",
"elem",
"in",
"self",
".",
"elements",
"[",
"4",
"]",
"]",
"# Torrent",
"self",
".",
"hrefs",
"=",
"[",
"self",
".",
"domain",
"+",
"elem",
".",
"find",
"(",
"href",
"=",
"re",
".",
"compile",
"(",
"'torrent'",
")",
")",
"[",
"'href'",
"]",
"for",
"elem",
"in",
"self",
".",
"elements",
"[",
"0",
"]",
"]",
"elif",
"self",
".",
"page",
"==",
"'eztv'",
":",
"titles",
"=",
"[",
"elem",
".",
"get_text",
"(",
")",
"for",
"elem",
"in",
"self",
".",
"elements",
"[",
"0",
"]",
"]",
"seeders",
"=",
"[",
"elem",
".",
"get_text",
"(",
")",
"for",
"elem",
"in",
"self",
".",
"elements",
"[",
"4",
"]",
"]",
"leechers",
"=",
"[",
"'-'",
"for",
"elem",
"in",
"self",
".",
"elements",
"[",
"4",
"]",
"]",
"ages",
"=",
"[",
"elem",
".",
"get_text",
"(",
")",
"for",
"elem",
"in",
"self",
".",
"elements",
"[",
"3",
"]",
"]",
"sizes",
"=",
"[",
"elem",
".",
"get_text",
"(",
")",
"for",
"elem",
"in",
"self",
".",
"elements",
"[",
"2",
"]",
"]",
"# Magnets",
"self",
".",
"hrefs",
"=",
"[",
"elem",
".",
"find",
"(",
"href",
"=",
"re",
".",
"compile",
"(",
"'magnet'",
")",
")",
"[",
"'href'",
"]",
"for",
"elem",
"in",
"self",
".",
"elements",
"[",
"1",
"]",
"]",
"elif",
"self",
".",
"page",
"==",
"'limetorrents'",
":",
"titles",
"=",
"[",
"elem",
".",
"get_text",
"(",
")",
"for",
"elem",
"in",
"self",
".",
"elements",
"[",
"0",
"]",
"]",
"seeders",
"=",
"[",
"elem",
".",
"get_text",
"(",
")",
"for",
"elem",
"in",
"self",
".",
"elements",
"[",
"3",
"]",
"]",
"leechers",
"=",
"[",
"elem",
".",
"get_text",
"(",
")",
"for",
"elem",
"in",
"self",
".",
"elements",
"[",
"4",
"]",
"]",
"ages",
"=",
"[",
"elem",
".",
"get_text",
"(",
")",
"for",
"elem",
"in",
"self",
".",
"elements",
"[",
"1",
"]",
"]",
"sizes",
"=",
"[",
"elem",
".",
"get_text",
"(",
")",
"for",
"elem",
"in",
"self",
".",
"elements",
"[",
"2",
"]",
"]",
"# Magnets",
"self",
".",
"hrefs",
"=",
"[",
"elem",
".",
"find",
"(",
"'a'",
",",
"href",
"=",
"re",
".",
"compile",
"(",
"'torrent'",
")",
")",
"[",
"'href'",
"]",
"for",
"elem",
"in",
"self",
".",
"elements",
"[",
"0",
"]",
"]",
"elif",
"self",
".",
"page",
"==",
"'isohunt'",
":",
"titles",
"=",
"[",
"elem",
".",
"get_text",
"(",
")",
"for",
"elem",
"in",
"self",
".",
"elements",
"[",
"0",
"]",
"]",
"seeders",
"=",
"[",
"elem",
".",
"get_text",
"(",
")",
"for",
"elem",
"in",
"self",
".",
"elements",
"[",
"5",
"]",
"]",
"leechers",
"=",
"[",
"'-'",
"for",
"elem",
"in",
"self",
".",
"elements",
"[",
"5",
"]",
"]",
"ages",
"=",
"[",
"elem",
".",
"get_text",
"(",
")",
"for",
"elem",
"in",
"self",
".",
"elements",
"[",
"3",
"]",
"]",
"sizes",
"=",
"[",
"elem",
".",
"get_text",
"(",
")",
"for",
"elem",
"in",
"self",
".",
"elements",
"[",
"4",
"]",
"]",
"# Torrents",
"self",
".",
"hrefs",
"=",
"[",
"self",
".",
"domain",
"+",
"elem",
".",
"find",
"(",
"href",
"=",
"re",
".",
"compile",
"(",
"'torrent_details'",
")",
")",
"[",
"'href'",
"]",
"for",
"elem",
"in",
"self",
".",
"elements",
"[",
"0",
"]",
"]",
"else",
":",
"print",
"(",
"'Error page'",
")",
"self",
".",
"table",
"=",
"[",
"[",
"Colors",
".",
"BOLD",
"+",
"UnicodeDammit",
"(",
"titles",
"[",
"i",
"]",
"[",
":",
"75",
"]",
".",
"strip",
"(",
")",
",",
"[",
"\"utf-8\"",
"]",
")",
".",
"unicode_markup",
"+",
"Colors",
".",
"ENDC",
"if",
"(",
"i",
"+",
"1",
")",
"%",
"2",
"==",
"0",
"else",
"UnicodeDammit",
"(",
"titles",
"[",
"i",
"]",
"[",
":",
"75",
"]",
".",
"strip",
"(",
")",
")",
".",
"unicode_markup",
",",
"Colors",
".",
"SEEDER",
"+",
"seeders",
"[",
"i",
"]",
".",
"strip",
"(",
")",
"+",
"Colors",
".",
"ENDC",
"if",
"(",
"i",
"+",
"1",
")",
"%",
"2",
"==",
"0",
"else",
"Colors",
".",
"LGREEN",
"+",
"seeders",
"[",
"i",
"]",
".",
"strip",
"(",
")",
"+",
"Colors",
".",
"ENDC",
",",
"Colors",
".",
"LEECHER",
"+",
"leechers",
"[",
"i",
"]",
".",
"strip",
"(",
")",
"+",
"Colors",
".",
"ENDC",
"if",
"(",
"i",
"+",
"1",
")",
"%",
"2",
"==",
"0",
"else",
"Colors",
".",
"LRED",
"+",
"leechers",
"[",
"i",
"]",
".",
"strip",
"(",
")",
"+",
"Colors",
".",
"ENDC",
",",
"Colors",
".",
"LIGHTBLUE",
"+",
"ages",
"[",
"i",
"]",
".",
"strip",
"(",
")",
"+",
"Colors",
".",
"ENDC",
"if",
"(",
"i",
"+",
"1",
")",
"%",
"2",
"==",
"0",
"else",
"Colors",
".",
"BLUE",
"+",
"ages",
"[",
"i",
"]",
".",
"strip",
"(",
")",
"+",
"Colors",
".",
"ENDC",
",",
"Colors",
".",
"PINK",
"+",
"sizes",
"[",
"i",
"]",
".",
"strip",
"(",
")",
"+",
"Colors",
".",
"ENDC",
"if",
"(",
"i",
"+",
"1",
")",
"%",
"2",
"==",
"0",
"else",
"Colors",
".",
"PURPLE",
"+",
"sizes",
"[",
"i",
"]",
".",
"strip",
"(",
")",
"+",
"Colors",
".",
"ENDC",
"]",
"for",
"i",
"in",
"range",
"(",
"len",
"(",
"self",
".",
"hrefs",
")",
")",
"]",
"print",
"(",
"tabulate",
"(",
"self",
".",
"table",
",",
"headers",
"=",
"headers",
",",
"tablefmt",
"=",
"'psql'",
",",
"numalign",
"=",
"'right'",
",",
"stralign",
"=",
"'left'",
",",
"showindex",
"=",
"True",
")",
")"
] |
Build table.
|
[
"Build",
"table",
"."
] |
32761fe18b3112e6e3754da863488b50929fcc41
|
https://github.com/ocslegna/auto_py_torrent/blob/32761fe18b3112e6e3754da863488b50929fcc41/auto_py_torrent/auto_py_torrent.py#L292-L408
|
train
|
ocslegna/auto_py_torrent
|
auto_py_torrent/auto_py_torrent.py
|
AutoPy.soupify
|
def soupify(self):
"""Get proper torrent/magnet information.
If search_mode is rated then get torrent/magnet.
If not, get all the elements to build the table.
There are different ways for each page.
"""
soup = BeautifulSoup(self.content_page.content, 'lxml')
if self.page == 'torrent_project':
main = soup.find('div', {'id': 'similarfiles'})
if self.mode_search == 'best_rated':
rated_url = self.domain + \
main.find(href=re.compile('torrent.html'))['href']
self.get_magnet(rated_url)
else:
divs = main.find_all('div', limit=30)[2:]
self.elements = list(
zip(*[d.find_all('span', recursive=False)
for d in divs])) # Torrents
elif self.page == 'the_pirate_bay':
main = soup.find('table', {'id': 'searchResult'})
if self.mode_search == 'best_rated':
rated_url = self.domain + \
main.find('a', href=re.compile('torrent'))['href']
self.get_magnet(rated_url)
else:
trs = main.find_all('tr', limit=30)[1:]
self.elements = list(
zip(*[tr.find_all('td', recursive=False)[1:]
for tr in trs])) # Magnets
elif self.page == '1337x':
main = soup.find('table', {'class': 'table'})
if self.mode_search == 'best_rated':
rated_url = self.domain + \
main.find('a', href=re.compile('torrent'))['href']
self.get_magnet(rated_url)
else:
trs = main.find_all('tr', limit=30)[1:]
self.elements = list(
zip(*([tr.find_all('td', recursive=False)[:-1]
for tr in trs]))) # Torrents
elif self.page == 'eztv':
main = soup.find_all('table', {'class': 'forum_header_border'})[2]
if self.mode_search == 'best_rated':
self.magnet = main.find('a', href=re.compile('magnet'))['href']
else:
trs = main.find_all('tr', limit=30)[2:]
self.elements = list(
zip(*([tr.find_all('td', recursive=False)[1:-1]
for tr in trs]))) # Magnets
elif self.page == 'limetorrents':
main = soup.find('table', {'class': 'table2'})
if self.mode_search == 'best_rated':
self.magnet = main.find(
'a', href=re.compile('torrent'))['href']
else:
trs = main.find_all('tr', limit=30)[1:]
self.elements = list(
zip(*([tr.find_all('td', recursive=False)[:-1]
for tr in trs]))) # Magnets
elif self.page == 'isohunt':
main = soup.find('table', {'class': 'table'})
if self.mode_search == 'best_rated':
rated_url = self.domain + \
main.find('a', href=re.compile(
'torrent_details'))['href']
self.get_magnet(rated_url)
else:
trs = main.find_all('tr', limit=30)[1:-1]
self.elements = list(
zip(*([tr.find_all('td', recursive=False)[1:-1]
for tr in trs]))) # Torrent
else:
print('Cannot soupify current page. Try again.')
|
python
|
def soupify(self):
"""Get proper torrent/magnet information.
If search_mode is rated then get torrent/magnet.
If not, get all the elements to build the table.
There are different ways for each page.
"""
soup = BeautifulSoup(self.content_page.content, 'lxml')
if self.page == 'torrent_project':
main = soup.find('div', {'id': 'similarfiles'})
if self.mode_search == 'best_rated':
rated_url = self.domain + \
main.find(href=re.compile('torrent.html'))['href']
self.get_magnet(rated_url)
else:
divs = main.find_all('div', limit=30)[2:]
self.elements = list(
zip(*[d.find_all('span', recursive=False)
for d in divs])) # Torrents
elif self.page == 'the_pirate_bay':
main = soup.find('table', {'id': 'searchResult'})
if self.mode_search == 'best_rated':
rated_url = self.domain + \
main.find('a', href=re.compile('torrent'))['href']
self.get_magnet(rated_url)
else:
trs = main.find_all('tr', limit=30)[1:]
self.elements = list(
zip(*[tr.find_all('td', recursive=False)[1:]
for tr in trs])) # Magnets
elif self.page == '1337x':
main = soup.find('table', {'class': 'table'})
if self.mode_search == 'best_rated':
rated_url = self.domain + \
main.find('a', href=re.compile('torrent'))['href']
self.get_magnet(rated_url)
else:
trs = main.find_all('tr', limit=30)[1:]
self.elements = list(
zip(*([tr.find_all('td', recursive=False)[:-1]
for tr in trs]))) # Torrents
elif self.page == 'eztv':
main = soup.find_all('table', {'class': 'forum_header_border'})[2]
if self.mode_search == 'best_rated':
self.magnet = main.find('a', href=re.compile('magnet'))['href']
else:
trs = main.find_all('tr', limit=30)[2:]
self.elements = list(
zip(*([tr.find_all('td', recursive=False)[1:-1]
for tr in trs]))) # Magnets
elif self.page == 'limetorrents':
main = soup.find('table', {'class': 'table2'})
if self.mode_search == 'best_rated':
self.magnet = main.find(
'a', href=re.compile('torrent'))['href']
else:
trs = main.find_all('tr', limit=30)[1:]
self.elements = list(
zip(*([tr.find_all('td', recursive=False)[:-1]
for tr in trs]))) # Magnets
elif self.page == 'isohunt':
main = soup.find('table', {'class': 'table'})
if self.mode_search == 'best_rated':
rated_url = self.domain + \
main.find('a', href=re.compile(
'torrent_details'))['href']
self.get_magnet(rated_url)
else:
trs = main.find_all('tr', limit=30)[1:-1]
self.elements = list(
zip(*([tr.find_all('td', recursive=False)[1:-1]
for tr in trs]))) # Torrent
else:
print('Cannot soupify current page. Try again.')
|
[
"def",
"soupify",
"(",
"self",
")",
":",
"soup",
"=",
"BeautifulSoup",
"(",
"self",
".",
"content_page",
".",
"content",
",",
"'lxml'",
")",
"if",
"self",
".",
"page",
"==",
"'torrent_project'",
":",
"main",
"=",
"soup",
".",
"find",
"(",
"'div'",
",",
"{",
"'id'",
":",
"'similarfiles'",
"}",
")",
"if",
"self",
".",
"mode_search",
"==",
"'best_rated'",
":",
"rated_url",
"=",
"self",
".",
"domain",
"+",
"main",
".",
"find",
"(",
"href",
"=",
"re",
".",
"compile",
"(",
"'torrent.html'",
")",
")",
"[",
"'href'",
"]",
"self",
".",
"get_magnet",
"(",
"rated_url",
")",
"else",
":",
"divs",
"=",
"main",
".",
"find_all",
"(",
"'div'",
",",
"limit",
"=",
"30",
")",
"[",
"2",
":",
"]",
"self",
".",
"elements",
"=",
"list",
"(",
"zip",
"(",
"*",
"[",
"d",
".",
"find_all",
"(",
"'span'",
",",
"recursive",
"=",
"False",
")",
"for",
"d",
"in",
"divs",
"]",
")",
")",
"# Torrents",
"elif",
"self",
".",
"page",
"==",
"'the_pirate_bay'",
":",
"main",
"=",
"soup",
".",
"find",
"(",
"'table'",
",",
"{",
"'id'",
":",
"'searchResult'",
"}",
")",
"if",
"self",
".",
"mode_search",
"==",
"'best_rated'",
":",
"rated_url",
"=",
"self",
".",
"domain",
"+",
"main",
".",
"find",
"(",
"'a'",
",",
"href",
"=",
"re",
".",
"compile",
"(",
"'torrent'",
")",
")",
"[",
"'href'",
"]",
"self",
".",
"get_magnet",
"(",
"rated_url",
")",
"else",
":",
"trs",
"=",
"main",
".",
"find_all",
"(",
"'tr'",
",",
"limit",
"=",
"30",
")",
"[",
"1",
":",
"]",
"self",
".",
"elements",
"=",
"list",
"(",
"zip",
"(",
"*",
"[",
"tr",
".",
"find_all",
"(",
"'td'",
",",
"recursive",
"=",
"False",
")",
"[",
"1",
":",
"]",
"for",
"tr",
"in",
"trs",
"]",
")",
")",
"# Magnets",
"elif",
"self",
".",
"page",
"==",
"'1337x'",
":",
"main",
"=",
"soup",
".",
"find",
"(",
"'table'",
",",
"{",
"'class'",
":",
"'table'",
"}",
")",
"if",
"self",
".",
"mode_search",
"==",
"'best_rated'",
":",
"rated_url",
"=",
"self",
".",
"domain",
"+",
"main",
".",
"find",
"(",
"'a'",
",",
"href",
"=",
"re",
".",
"compile",
"(",
"'torrent'",
")",
")",
"[",
"'href'",
"]",
"self",
".",
"get_magnet",
"(",
"rated_url",
")",
"else",
":",
"trs",
"=",
"main",
".",
"find_all",
"(",
"'tr'",
",",
"limit",
"=",
"30",
")",
"[",
"1",
":",
"]",
"self",
".",
"elements",
"=",
"list",
"(",
"zip",
"(",
"*",
"(",
"[",
"tr",
".",
"find_all",
"(",
"'td'",
",",
"recursive",
"=",
"False",
")",
"[",
":",
"-",
"1",
"]",
"for",
"tr",
"in",
"trs",
"]",
")",
")",
")",
"# Torrents",
"elif",
"self",
".",
"page",
"==",
"'eztv'",
":",
"main",
"=",
"soup",
".",
"find_all",
"(",
"'table'",
",",
"{",
"'class'",
":",
"'forum_header_border'",
"}",
")",
"[",
"2",
"]",
"if",
"self",
".",
"mode_search",
"==",
"'best_rated'",
":",
"self",
".",
"magnet",
"=",
"main",
".",
"find",
"(",
"'a'",
",",
"href",
"=",
"re",
".",
"compile",
"(",
"'magnet'",
")",
")",
"[",
"'href'",
"]",
"else",
":",
"trs",
"=",
"main",
".",
"find_all",
"(",
"'tr'",
",",
"limit",
"=",
"30",
")",
"[",
"2",
":",
"]",
"self",
".",
"elements",
"=",
"list",
"(",
"zip",
"(",
"*",
"(",
"[",
"tr",
".",
"find_all",
"(",
"'td'",
",",
"recursive",
"=",
"False",
")",
"[",
"1",
":",
"-",
"1",
"]",
"for",
"tr",
"in",
"trs",
"]",
")",
")",
")",
"# Magnets",
"elif",
"self",
".",
"page",
"==",
"'limetorrents'",
":",
"main",
"=",
"soup",
".",
"find",
"(",
"'table'",
",",
"{",
"'class'",
":",
"'table2'",
"}",
")",
"if",
"self",
".",
"mode_search",
"==",
"'best_rated'",
":",
"self",
".",
"magnet",
"=",
"main",
".",
"find",
"(",
"'a'",
",",
"href",
"=",
"re",
".",
"compile",
"(",
"'torrent'",
")",
")",
"[",
"'href'",
"]",
"else",
":",
"trs",
"=",
"main",
".",
"find_all",
"(",
"'tr'",
",",
"limit",
"=",
"30",
")",
"[",
"1",
":",
"]",
"self",
".",
"elements",
"=",
"list",
"(",
"zip",
"(",
"*",
"(",
"[",
"tr",
".",
"find_all",
"(",
"'td'",
",",
"recursive",
"=",
"False",
")",
"[",
":",
"-",
"1",
"]",
"for",
"tr",
"in",
"trs",
"]",
")",
")",
")",
"# Magnets",
"elif",
"self",
".",
"page",
"==",
"'isohunt'",
":",
"main",
"=",
"soup",
".",
"find",
"(",
"'table'",
",",
"{",
"'class'",
":",
"'table'",
"}",
")",
"if",
"self",
".",
"mode_search",
"==",
"'best_rated'",
":",
"rated_url",
"=",
"self",
".",
"domain",
"+",
"main",
".",
"find",
"(",
"'a'",
",",
"href",
"=",
"re",
".",
"compile",
"(",
"'torrent_details'",
")",
")",
"[",
"'href'",
"]",
"self",
".",
"get_magnet",
"(",
"rated_url",
")",
"else",
":",
"trs",
"=",
"main",
".",
"find_all",
"(",
"'tr'",
",",
"limit",
"=",
"30",
")",
"[",
"1",
":",
"-",
"1",
"]",
"self",
".",
"elements",
"=",
"list",
"(",
"zip",
"(",
"*",
"(",
"[",
"tr",
".",
"find_all",
"(",
"'td'",
",",
"recursive",
"=",
"False",
")",
"[",
"1",
":",
"-",
"1",
"]",
"for",
"tr",
"in",
"trs",
"]",
")",
")",
")",
"# Torrent",
"else",
":",
"print",
"(",
"'Cannot soupify current page. Try again.'",
")"
] |
Get proper torrent/magnet information.
If search_mode is rated then get torrent/magnet.
If not, get all the elements to build the table.
There are different ways for each page.
|
[
"Get",
"proper",
"torrent",
"/",
"magnet",
"information",
"."
] |
32761fe18b3112e6e3754da863488b50929fcc41
|
https://github.com/ocslegna/auto_py_torrent/blob/32761fe18b3112e6e3754da863488b50929fcc41/auto_py_torrent/auto_py_torrent.py#L410-L488
|
train
|
ocslegna/auto_py_torrent
|
auto_py_torrent/auto_py_torrent.py
|
AutoPy.handle_select
|
def handle_select(self):
"""Handle user's input in list mode."""
self.selected = input('>> ')
if self.selected in ['Q', 'q']:
sys.exit(1)
elif self.selected in ['B', 'b']:
self.back_to_menu = True
return True
elif is_num(self.selected):
if 0 <= int(self.selected) <= len(self.hrefs) - 1:
self.back_to_menu = False
return True
else:
print(Colors.FAIL +
'Wrong index. ' +
'Please select an appropiate one or other option.' +
Colors.ENDC)
return False
else:
print(Colors.FAIL +
'Invalid input. ' +
'Please select an appropiate one or other option.' +
Colors.ENDC)
return False
|
python
|
def handle_select(self):
"""Handle user's input in list mode."""
self.selected = input('>> ')
if self.selected in ['Q', 'q']:
sys.exit(1)
elif self.selected in ['B', 'b']:
self.back_to_menu = True
return True
elif is_num(self.selected):
if 0 <= int(self.selected) <= len(self.hrefs) - 1:
self.back_to_menu = False
return True
else:
print(Colors.FAIL +
'Wrong index. ' +
'Please select an appropiate one or other option.' +
Colors.ENDC)
return False
else:
print(Colors.FAIL +
'Invalid input. ' +
'Please select an appropiate one or other option.' +
Colors.ENDC)
return False
|
[
"def",
"handle_select",
"(",
"self",
")",
":",
"self",
".",
"selected",
"=",
"input",
"(",
"'>> '",
")",
"if",
"self",
".",
"selected",
"in",
"[",
"'Q'",
",",
"'q'",
"]",
":",
"sys",
".",
"exit",
"(",
"1",
")",
"elif",
"self",
".",
"selected",
"in",
"[",
"'B'",
",",
"'b'",
"]",
":",
"self",
".",
"back_to_menu",
"=",
"True",
"return",
"True",
"elif",
"is_num",
"(",
"self",
".",
"selected",
")",
":",
"if",
"0",
"<=",
"int",
"(",
"self",
".",
"selected",
")",
"<=",
"len",
"(",
"self",
".",
"hrefs",
")",
"-",
"1",
":",
"self",
".",
"back_to_menu",
"=",
"False",
"return",
"True",
"else",
":",
"print",
"(",
"Colors",
".",
"FAIL",
"+",
"'Wrong index. '",
"+",
"'Please select an appropiate one or other option.'",
"+",
"Colors",
".",
"ENDC",
")",
"return",
"False",
"else",
":",
"print",
"(",
"Colors",
".",
"FAIL",
"+",
"'Invalid input. '",
"+",
"'Please select an appropiate one or other option.'",
"+",
"Colors",
".",
"ENDC",
")",
"return",
"False"
] |
Handle user's input in list mode.
|
[
"Handle",
"user",
"s",
"input",
"in",
"list",
"mode",
"."
] |
32761fe18b3112e6e3754da863488b50929fcc41
|
https://github.com/ocslegna/auto_py_torrent/blob/32761fe18b3112e6e3754da863488b50929fcc41/auto_py_torrent/auto_py_torrent.py#L490-L513
|
train
|
ocslegna/auto_py_torrent
|
auto_py_torrent/auto_py_torrent.py
|
AutoPy.select_torrent
|
def select_torrent(self):
"""Select torrent.
First check if specific element/info is obtained in content_page.
Specify to user if it wants best rated torrent or select one from list.
If the user wants best rated: Directly obtain magnet/torrent.
Else: build table with all data and enable the user select the torrent.
"""
try:
self.found_torrents = not bool(self.key_search in
self.content_page.text)
if not self.found_torrents:
print('No torrents found.')
sys.exit(1)
self.soupify()
if self.mode_search == 'list':
self.build_table()
if len(self.hrefs) == 1:
print('Press "0" to download it.')
elif len(self.hrefs) >= 2:
print('\nSelect one of the following torrents. ' +
'Enter a number between: 0 and ' +
str(len(self.hrefs) - 1))
print('If you want to exit write "' +
Colors.LRED + 'Q' + Colors.ENDC + '" or "' +
Colors.LRED + 'q' + Colors.ENDC + '".')
print('If you want to go back to menu and search again write "' +
Colors.LGREEN + 'B' + Colors.ENDC + '" or "' +
Colors.LGREEN + 'b' + Colors.ENDC + '".')
while not(self.picked_choice):
self.picked_choice = self.handle_select()
except Exception:
print('ERROR select_torrent: ')
logging.error(traceback.format_exc())
sys.exit(0)
|
python
|
def select_torrent(self):
"""Select torrent.
First check if specific element/info is obtained in content_page.
Specify to user if it wants best rated torrent or select one from list.
If the user wants best rated: Directly obtain magnet/torrent.
Else: build table with all data and enable the user select the torrent.
"""
try:
self.found_torrents = not bool(self.key_search in
self.content_page.text)
if not self.found_torrents:
print('No torrents found.')
sys.exit(1)
self.soupify()
if self.mode_search == 'list':
self.build_table()
if len(self.hrefs) == 1:
print('Press "0" to download it.')
elif len(self.hrefs) >= 2:
print('\nSelect one of the following torrents. ' +
'Enter a number between: 0 and ' +
str(len(self.hrefs) - 1))
print('If you want to exit write "' +
Colors.LRED + 'Q' + Colors.ENDC + '" or "' +
Colors.LRED + 'q' + Colors.ENDC + '".')
print('If you want to go back to menu and search again write "' +
Colors.LGREEN + 'B' + Colors.ENDC + '" or "' +
Colors.LGREEN + 'b' + Colors.ENDC + '".')
while not(self.picked_choice):
self.picked_choice = self.handle_select()
except Exception:
print('ERROR select_torrent: ')
logging.error(traceback.format_exc())
sys.exit(0)
|
[
"def",
"select_torrent",
"(",
"self",
")",
":",
"try",
":",
"self",
".",
"found_torrents",
"=",
"not",
"bool",
"(",
"self",
".",
"key_search",
"in",
"self",
".",
"content_page",
".",
"text",
")",
"if",
"not",
"self",
".",
"found_torrents",
":",
"print",
"(",
"'No torrents found.'",
")",
"sys",
".",
"exit",
"(",
"1",
")",
"self",
".",
"soupify",
"(",
")",
"if",
"self",
".",
"mode_search",
"==",
"'list'",
":",
"self",
".",
"build_table",
"(",
")",
"if",
"len",
"(",
"self",
".",
"hrefs",
")",
"==",
"1",
":",
"print",
"(",
"'Press \"0\" to download it.'",
")",
"elif",
"len",
"(",
"self",
".",
"hrefs",
")",
">=",
"2",
":",
"print",
"(",
"'\\nSelect one of the following torrents. '",
"+",
"'Enter a number between: 0 and '",
"+",
"str",
"(",
"len",
"(",
"self",
".",
"hrefs",
")",
"-",
"1",
")",
")",
"print",
"(",
"'If you want to exit write \"'",
"+",
"Colors",
".",
"LRED",
"+",
"'Q'",
"+",
"Colors",
".",
"ENDC",
"+",
"'\" or \"'",
"+",
"Colors",
".",
"LRED",
"+",
"'q'",
"+",
"Colors",
".",
"ENDC",
"+",
"'\".'",
")",
"print",
"(",
"'If you want to go back to menu and search again write \"'",
"+",
"Colors",
".",
"LGREEN",
"+",
"'B'",
"+",
"Colors",
".",
"ENDC",
"+",
"'\" or \"'",
"+",
"Colors",
".",
"LGREEN",
"+",
"'b'",
"+",
"Colors",
".",
"ENDC",
"+",
"'\".'",
")",
"while",
"not",
"(",
"self",
".",
"picked_choice",
")",
":",
"self",
".",
"picked_choice",
"=",
"self",
".",
"handle_select",
"(",
")",
"except",
"Exception",
":",
"print",
"(",
"'ERROR select_torrent: '",
")",
"logging",
".",
"error",
"(",
"traceback",
".",
"format_exc",
"(",
")",
")",
"sys",
".",
"exit",
"(",
"0",
")"
] |
Select torrent.
First check if specific element/info is obtained in content_page.
Specify to user if it wants best rated torrent or select one from list.
If the user wants best rated: Directly obtain magnet/torrent.
Else: build table with all data and enable the user select the torrent.
|
[
"Select",
"torrent",
"."
] |
32761fe18b3112e6e3754da863488b50929fcc41
|
https://github.com/ocslegna/auto_py_torrent/blob/32761fe18b3112e6e3754da863488b50929fcc41/auto_py_torrent/auto_py_torrent.py#L515-L550
|
train
|
ocslegna/auto_py_torrent
|
auto_py_torrent/auto_py_torrent.py
|
AutoPy.build_url
|
def build_url(self):
"""Build appropiate encoded URL.
This implies the same way of searching a torrent as in the page itself.
"""
url = requests.utils.requote_uri(
self.torrent_page + self.string_search)
if self.page == '1337x':
return(url + '/1/')
elif self.page == 'limetorrents':
return(url + '/')
else:
return(url)
|
python
|
def build_url(self):
"""Build appropiate encoded URL.
This implies the same way of searching a torrent as in the page itself.
"""
url = requests.utils.requote_uri(
self.torrent_page + self.string_search)
if self.page == '1337x':
return(url + '/1/')
elif self.page == 'limetorrents':
return(url + '/')
else:
return(url)
|
[
"def",
"build_url",
"(",
"self",
")",
":",
"url",
"=",
"requests",
".",
"utils",
".",
"requote_uri",
"(",
"self",
".",
"torrent_page",
"+",
"self",
".",
"string_search",
")",
"if",
"self",
".",
"page",
"==",
"'1337x'",
":",
"return",
"(",
"url",
"+",
"'/1/'",
")",
"elif",
"self",
".",
"page",
"==",
"'limetorrents'",
":",
"return",
"(",
"url",
"+",
"'/'",
")",
"else",
":",
"return",
"(",
"url",
")"
] |
Build appropiate encoded URL.
This implies the same way of searching a torrent as in the page itself.
|
[
"Build",
"appropiate",
"encoded",
"URL",
"."
] |
32761fe18b3112e6e3754da863488b50929fcc41
|
https://github.com/ocslegna/auto_py_torrent/blob/32761fe18b3112e6e3754da863488b50929fcc41/auto_py_torrent/auto_py_torrent.py#L552-L564
|
train
|
ocslegna/auto_py_torrent
|
auto_py_torrent/auto_py_torrent.py
|
AutoPy.get_content
|
def get_content(self):
"""Get content of the page through url."""
url = self.build_url()
try:
self.content_page = requests.get(url)
if not(self.content_page.status_code == requests.codes.ok):
self.content_page.raise_for_status()
except requests.exceptions.RequestException as ex:
logging.info('A requests exception has ocurred: ' + str(ex))
logging.error(traceback.format_exc())
sys.exit(0)
|
python
|
def get_content(self):
"""Get content of the page through url."""
url = self.build_url()
try:
self.content_page = requests.get(url)
if not(self.content_page.status_code == requests.codes.ok):
self.content_page.raise_for_status()
except requests.exceptions.RequestException as ex:
logging.info('A requests exception has ocurred: ' + str(ex))
logging.error(traceback.format_exc())
sys.exit(0)
|
[
"def",
"get_content",
"(",
"self",
")",
":",
"url",
"=",
"self",
".",
"build_url",
"(",
")",
"try",
":",
"self",
".",
"content_page",
"=",
"requests",
".",
"get",
"(",
"url",
")",
"if",
"not",
"(",
"self",
".",
"content_page",
".",
"status_code",
"==",
"requests",
".",
"codes",
".",
"ok",
")",
":",
"self",
".",
"content_page",
".",
"raise_for_status",
"(",
")",
"except",
"requests",
".",
"exceptions",
".",
"RequestException",
"as",
"ex",
":",
"logging",
".",
"info",
"(",
"'A requests exception has ocurred: '",
"+",
"str",
"(",
"ex",
")",
")",
"logging",
".",
"error",
"(",
"traceback",
".",
"format_exc",
"(",
")",
")",
"sys",
".",
"exit",
"(",
"0",
")"
] |
Get content of the page through url.
|
[
"Get",
"content",
"of",
"the",
"page",
"through",
"url",
"."
] |
32761fe18b3112e6e3754da863488b50929fcc41
|
https://github.com/ocslegna/auto_py_torrent/blob/32761fe18b3112e6e3754da863488b50929fcc41/auto_py_torrent/auto_py_torrent.py#L566-L576
|
train
|
neo4j-drivers/neobolt
|
neobolt/impl/python/bolt/io.py
|
ChunkedInputBuffer._recycle
|
def _recycle(self):
""" Reclaim buffer space before the origin.
Note: modifies buffer size
"""
origin = self._origin
if origin == 0:
return False
available = self._extent - origin
self._data[:available] = self._data[origin:self._extent]
self._extent = available
self._origin = 0
#log_debug("Recycled %d bytes" % origin)
return True
|
python
|
def _recycle(self):
""" Reclaim buffer space before the origin.
Note: modifies buffer size
"""
origin = self._origin
if origin == 0:
return False
available = self._extent - origin
self._data[:available] = self._data[origin:self._extent]
self._extent = available
self._origin = 0
#log_debug("Recycled %d bytes" % origin)
return True
|
[
"def",
"_recycle",
"(",
"self",
")",
":",
"origin",
"=",
"self",
".",
"_origin",
"if",
"origin",
"==",
"0",
":",
"return",
"False",
"available",
"=",
"self",
".",
"_extent",
"-",
"origin",
"self",
".",
"_data",
"[",
":",
"available",
"]",
"=",
"self",
".",
"_data",
"[",
"origin",
":",
"self",
".",
"_extent",
"]",
"self",
".",
"_extent",
"=",
"available",
"self",
".",
"_origin",
"=",
"0",
"#log_debug(\"Recycled %d bytes\" % origin)",
"return",
"True"
] |
Reclaim buffer space before the origin.
Note: modifies buffer size
|
[
"Reclaim",
"buffer",
"space",
"before",
"the",
"origin",
"."
] |
724569d76e85777c4f5e30e8d0a18116bda4d8cd
|
https://github.com/neo4j-drivers/neobolt/blob/724569d76e85777c4f5e30e8d0a18116bda4d8cd/neobolt/impl/python/bolt/io.py#L180-L193
|
train
|
neo4j-drivers/neobolt
|
neobolt/impl/python/bolt/io.py
|
ChunkedInputBuffer.frame_message
|
def frame_message(self):
""" Construct a frame around the first complete message in the buffer.
"""
if self._frame is not None:
self.discard_message()
panes = []
p = origin = self._origin
extent = self._extent
while p < extent:
available = extent - p
if available < 2:
break
chunk_size, = struct_unpack(">H", self._view[p:(p + 2)])
p += 2
if chunk_size == 0:
self._limit = p
self._frame = MessageFrame(memoryview(self._view[origin:self._limit]), panes)
return True
q = p + chunk_size
panes.append((p - origin, q - origin))
p = q
return False
|
python
|
def frame_message(self):
""" Construct a frame around the first complete message in the buffer.
"""
if self._frame is not None:
self.discard_message()
panes = []
p = origin = self._origin
extent = self._extent
while p < extent:
available = extent - p
if available < 2:
break
chunk_size, = struct_unpack(">H", self._view[p:(p + 2)])
p += 2
if chunk_size == 0:
self._limit = p
self._frame = MessageFrame(memoryview(self._view[origin:self._limit]), panes)
return True
q = p + chunk_size
panes.append((p - origin, q - origin))
p = q
return False
|
[
"def",
"frame_message",
"(",
"self",
")",
":",
"if",
"self",
".",
"_frame",
"is",
"not",
"None",
":",
"self",
".",
"discard_message",
"(",
")",
"panes",
"=",
"[",
"]",
"p",
"=",
"origin",
"=",
"self",
".",
"_origin",
"extent",
"=",
"self",
".",
"_extent",
"while",
"p",
"<",
"extent",
":",
"available",
"=",
"extent",
"-",
"p",
"if",
"available",
"<",
"2",
":",
"break",
"chunk_size",
",",
"=",
"struct_unpack",
"(",
"\">H\"",
",",
"self",
".",
"_view",
"[",
"p",
":",
"(",
"p",
"+",
"2",
")",
"]",
")",
"p",
"+=",
"2",
"if",
"chunk_size",
"==",
"0",
":",
"self",
".",
"_limit",
"=",
"p",
"self",
".",
"_frame",
"=",
"MessageFrame",
"(",
"memoryview",
"(",
"self",
".",
"_view",
"[",
"origin",
":",
"self",
".",
"_limit",
"]",
")",
",",
"panes",
")",
"return",
"True",
"q",
"=",
"p",
"+",
"chunk_size",
"panes",
".",
"append",
"(",
"(",
"p",
"-",
"origin",
",",
"q",
"-",
"origin",
")",
")",
"p",
"=",
"q",
"return",
"False"
] |
Construct a frame around the first complete message in the buffer.
|
[
"Construct",
"a",
"frame",
"around",
"the",
"first",
"complete",
"message",
"in",
"the",
"buffer",
"."
] |
724569d76e85777c4f5e30e8d0a18116bda4d8cd
|
https://github.com/neo4j-drivers/neobolt/blob/724569d76e85777c4f5e30e8d0a18116bda4d8cd/neobolt/impl/python/bolt/io.py#L198-L219
|
train
|
praw-dev/prawcore
|
prawcore/rate_limit.py
|
RateLimiter.call
|
def call(self, request_function, set_header_callback, *args, **kwargs):
"""Rate limit the call to request_function.
:param request_function: A function call that returns an HTTP response
object.
:param set_header_callback: A callback function used to set the request
headers. This callback is called after any necessary sleep time
occurs.
:param *args: The positional arguments to ``request_function``.
:param **kwargs: The keyword arguments to ``request_function``.
"""
self.delay()
kwargs["headers"] = set_header_callback()
response = request_function(*args, **kwargs)
self.update(response.headers)
return response
|
python
|
def call(self, request_function, set_header_callback, *args, **kwargs):
"""Rate limit the call to request_function.
:param request_function: A function call that returns an HTTP response
object.
:param set_header_callback: A callback function used to set the request
headers. This callback is called after any necessary sleep time
occurs.
:param *args: The positional arguments to ``request_function``.
:param **kwargs: The keyword arguments to ``request_function``.
"""
self.delay()
kwargs["headers"] = set_header_callback()
response = request_function(*args, **kwargs)
self.update(response.headers)
return response
|
[
"def",
"call",
"(",
"self",
",",
"request_function",
",",
"set_header_callback",
",",
"*",
"args",
",",
"*",
"*",
"kwargs",
")",
":",
"self",
".",
"delay",
"(",
")",
"kwargs",
"[",
"\"headers\"",
"]",
"=",
"set_header_callback",
"(",
")",
"response",
"=",
"request_function",
"(",
"*",
"args",
",",
"*",
"*",
"kwargs",
")",
"self",
".",
"update",
"(",
"response",
".",
"headers",
")",
"return",
"response"
] |
Rate limit the call to request_function.
:param request_function: A function call that returns an HTTP response
object.
:param set_header_callback: A callback function used to set the request
headers. This callback is called after any necessary sleep time
occurs.
:param *args: The positional arguments to ``request_function``.
:param **kwargs: The keyword arguments to ``request_function``.
|
[
"Rate",
"limit",
"the",
"call",
"to",
"request_function",
"."
] |
b16ae88a1f2bf98095ed6fe64851cb7add7ed752
|
https://github.com/praw-dev/prawcore/blob/b16ae88a1f2bf98095ed6fe64851cb7add7ed752/prawcore/rate_limit.py#L22-L38
|
train
|
praw-dev/prawcore
|
prawcore/rate_limit.py
|
RateLimiter.delay
|
def delay(self):
"""Sleep for an amount of time to remain under the rate limit."""
if self.next_request_timestamp is None:
return
sleep_seconds = self.next_request_timestamp - time.time()
if sleep_seconds <= 0:
return
message = "Sleeping: {:0.2f} seconds prior to" " call".format(
sleep_seconds
)
log.debug(message)
time.sleep(sleep_seconds)
|
python
|
def delay(self):
"""Sleep for an amount of time to remain under the rate limit."""
if self.next_request_timestamp is None:
return
sleep_seconds = self.next_request_timestamp - time.time()
if sleep_seconds <= 0:
return
message = "Sleeping: {:0.2f} seconds prior to" " call".format(
sleep_seconds
)
log.debug(message)
time.sleep(sleep_seconds)
|
[
"def",
"delay",
"(",
"self",
")",
":",
"if",
"self",
".",
"next_request_timestamp",
"is",
"None",
":",
"return",
"sleep_seconds",
"=",
"self",
".",
"next_request_timestamp",
"-",
"time",
".",
"time",
"(",
")",
"if",
"sleep_seconds",
"<=",
"0",
":",
"return",
"message",
"=",
"\"Sleeping: {:0.2f} seconds prior to\"",
"\" call\"",
".",
"format",
"(",
"sleep_seconds",
")",
"log",
".",
"debug",
"(",
"message",
")",
"time",
".",
"sleep",
"(",
"sleep_seconds",
")"
] |
Sleep for an amount of time to remain under the rate limit.
|
[
"Sleep",
"for",
"an",
"amount",
"of",
"time",
"to",
"remain",
"under",
"the",
"rate",
"limit",
"."
] |
b16ae88a1f2bf98095ed6fe64851cb7add7ed752
|
https://github.com/praw-dev/prawcore/blob/b16ae88a1f2bf98095ed6fe64851cb7add7ed752/prawcore/rate_limit.py#L40-L51
|
train
|
praw-dev/prawcore
|
prawcore/rate_limit.py
|
RateLimiter.update
|
def update(self, response_headers):
"""Update the state of the rate limiter based on the response headers.
This method should only be called following a HTTP request to reddit.
Response headers that do not contain x-ratelimit fields will be treated
as a single request. This behavior is to error on the safe-side as such
responses should trigger exceptions that indicate invalid behavior.
"""
if "x-ratelimit-remaining" not in response_headers:
if self.remaining is not None:
self.remaining -= 1
self.used += 1
return
now = time.time()
prev_remaining = self.remaining
seconds_to_reset = int(response_headers["x-ratelimit-reset"])
self.remaining = float(response_headers["x-ratelimit-remaining"])
self.used = int(response_headers["x-ratelimit-used"])
self.reset_timestamp = now + seconds_to_reset
if self.remaining <= 0:
self.next_request_timestamp = self.reset_timestamp
return
if prev_remaining is not None and prev_remaining > self.remaining:
estimated_clients = prev_remaining - self.remaining
else:
estimated_clients = 1.0
self.next_request_timestamp = min(
self.reset_timestamp,
now + (estimated_clients * seconds_to_reset / self.remaining),
)
|
python
|
def update(self, response_headers):
"""Update the state of the rate limiter based on the response headers.
This method should only be called following a HTTP request to reddit.
Response headers that do not contain x-ratelimit fields will be treated
as a single request. This behavior is to error on the safe-side as such
responses should trigger exceptions that indicate invalid behavior.
"""
if "x-ratelimit-remaining" not in response_headers:
if self.remaining is not None:
self.remaining -= 1
self.used += 1
return
now = time.time()
prev_remaining = self.remaining
seconds_to_reset = int(response_headers["x-ratelimit-reset"])
self.remaining = float(response_headers["x-ratelimit-remaining"])
self.used = int(response_headers["x-ratelimit-used"])
self.reset_timestamp = now + seconds_to_reset
if self.remaining <= 0:
self.next_request_timestamp = self.reset_timestamp
return
if prev_remaining is not None and prev_remaining > self.remaining:
estimated_clients = prev_remaining - self.remaining
else:
estimated_clients = 1.0
self.next_request_timestamp = min(
self.reset_timestamp,
now + (estimated_clients * seconds_to_reset / self.remaining),
)
|
[
"def",
"update",
"(",
"self",
",",
"response_headers",
")",
":",
"if",
"\"x-ratelimit-remaining\"",
"not",
"in",
"response_headers",
":",
"if",
"self",
".",
"remaining",
"is",
"not",
"None",
":",
"self",
".",
"remaining",
"-=",
"1",
"self",
".",
"used",
"+=",
"1",
"return",
"now",
"=",
"time",
".",
"time",
"(",
")",
"prev_remaining",
"=",
"self",
".",
"remaining",
"seconds_to_reset",
"=",
"int",
"(",
"response_headers",
"[",
"\"x-ratelimit-reset\"",
"]",
")",
"self",
".",
"remaining",
"=",
"float",
"(",
"response_headers",
"[",
"\"x-ratelimit-remaining\"",
"]",
")",
"self",
".",
"used",
"=",
"int",
"(",
"response_headers",
"[",
"\"x-ratelimit-used\"",
"]",
")",
"self",
".",
"reset_timestamp",
"=",
"now",
"+",
"seconds_to_reset",
"if",
"self",
".",
"remaining",
"<=",
"0",
":",
"self",
".",
"next_request_timestamp",
"=",
"self",
".",
"reset_timestamp",
"return",
"if",
"prev_remaining",
"is",
"not",
"None",
"and",
"prev_remaining",
">",
"self",
".",
"remaining",
":",
"estimated_clients",
"=",
"prev_remaining",
"-",
"self",
".",
"remaining",
"else",
":",
"estimated_clients",
"=",
"1.0",
"self",
".",
"next_request_timestamp",
"=",
"min",
"(",
"self",
".",
"reset_timestamp",
",",
"now",
"+",
"(",
"estimated_clients",
"*",
"seconds_to_reset",
"/",
"self",
".",
"remaining",
")",
",",
")"
] |
Update the state of the rate limiter based on the response headers.
This method should only be called following a HTTP request to reddit.
Response headers that do not contain x-ratelimit fields will be treated
as a single request. This behavior is to error on the safe-side as such
responses should trigger exceptions that indicate invalid behavior.
|
[
"Update",
"the",
"state",
"of",
"the",
"rate",
"limiter",
"based",
"on",
"the",
"response",
"headers",
"."
] |
b16ae88a1f2bf98095ed6fe64851cb7add7ed752
|
https://github.com/praw-dev/prawcore/blob/b16ae88a1f2bf98095ed6fe64851cb7add7ed752/prawcore/rate_limit.py#L53-L89
|
train
|
neo4j-drivers/neobolt
|
neobolt/impl/python/addressing.py
|
Resolver.custom_resolve
|
def custom_resolve(self):
""" If a custom resolver is defined, perform custom resolution on
the contained addresses.
:return:
"""
if not callable(self.custom_resolver):
return
new_addresses = []
for address in self.addresses:
for new_address in self.custom_resolver(address):
new_addresses.append(new_address)
self.addresses = new_addresses
|
python
|
def custom_resolve(self):
""" If a custom resolver is defined, perform custom resolution on
the contained addresses.
:return:
"""
if not callable(self.custom_resolver):
return
new_addresses = []
for address in self.addresses:
for new_address in self.custom_resolver(address):
new_addresses.append(new_address)
self.addresses = new_addresses
|
[
"def",
"custom_resolve",
"(",
"self",
")",
":",
"if",
"not",
"callable",
"(",
"self",
".",
"custom_resolver",
")",
":",
"return",
"new_addresses",
"=",
"[",
"]",
"for",
"address",
"in",
"self",
".",
"addresses",
":",
"for",
"new_address",
"in",
"self",
".",
"custom_resolver",
"(",
"address",
")",
":",
"new_addresses",
".",
"append",
"(",
"new_address",
")",
"self",
".",
"addresses",
"=",
"new_addresses"
] |
If a custom resolver is defined, perform custom resolution on
the contained addresses.
:return:
|
[
"If",
"a",
"custom",
"resolver",
"is",
"defined",
"perform",
"custom",
"resolution",
"on",
"the",
"contained",
"addresses",
"."
] |
724569d76e85777c4f5e30e8d0a18116bda4d8cd
|
https://github.com/neo4j-drivers/neobolt/blob/724569d76e85777c4f5e30e8d0a18116bda4d8cd/neobolt/impl/python/addressing.py#L109-L121
|
train
|
neo4j-drivers/neobolt
|
neobolt/impl/python/addressing.py
|
Resolver.dns_resolve
|
def dns_resolve(self):
""" Perform DNS resolution on the contained addresses.
:return:
"""
new_addresses = []
for address in self.addresses:
try:
info = getaddrinfo(address[0], address[1], 0, SOCK_STREAM, IPPROTO_TCP)
except gaierror:
raise AddressError("Cannot resolve address {!r}".format(address))
else:
for _, _, _, _, address in info:
if len(address) == 4 and address[3] != 0:
# skip any IPv6 addresses with a non-zero scope id
# as these appear to cause problems on some platforms
continue
new_addresses.append(address)
self.addresses = new_addresses
|
python
|
def dns_resolve(self):
""" Perform DNS resolution on the contained addresses.
:return:
"""
new_addresses = []
for address in self.addresses:
try:
info = getaddrinfo(address[0], address[1], 0, SOCK_STREAM, IPPROTO_TCP)
except gaierror:
raise AddressError("Cannot resolve address {!r}".format(address))
else:
for _, _, _, _, address in info:
if len(address) == 4 and address[3] != 0:
# skip any IPv6 addresses with a non-zero scope id
# as these appear to cause problems on some platforms
continue
new_addresses.append(address)
self.addresses = new_addresses
|
[
"def",
"dns_resolve",
"(",
"self",
")",
":",
"new_addresses",
"=",
"[",
"]",
"for",
"address",
"in",
"self",
".",
"addresses",
":",
"try",
":",
"info",
"=",
"getaddrinfo",
"(",
"address",
"[",
"0",
"]",
",",
"address",
"[",
"1",
"]",
",",
"0",
",",
"SOCK_STREAM",
",",
"IPPROTO_TCP",
")",
"except",
"gaierror",
":",
"raise",
"AddressError",
"(",
"\"Cannot resolve address {!r}\"",
".",
"format",
"(",
"address",
")",
")",
"else",
":",
"for",
"_",
",",
"_",
",",
"_",
",",
"_",
",",
"address",
"in",
"info",
":",
"if",
"len",
"(",
"address",
")",
"==",
"4",
"and",
"address",
"[",
"3",
"]",
"!=",
"0",
":",
"# skip any IPv6 addresses with a non-zero scope id",
"# as these appear to cause problems on some platforms",
"continue",
"new_addresses",
".",
"append",
"(",
"address",
")",
"self",
".",
"addresses",
"=",
"new_addresses"
] |
Perform DNS resolution on the contained addresses.
:return:
|
[
"Perform",
"DNS",
"resolution",
"on",
"the",
"contained",
"addresses",
"."
] |
724569d76e85777c4f5e30e8d0a18116bda4d8cd
|
https://github.com/neo4j-drivers/neobolt/blob/724569d76e85777c4f5e30e8d0a18116bda4d8cd/neobolt/impl/python/addressing.py#L123-L141
|
train
|
cuzzo/iw_parse
|
iw_parse.py
|
get_quality
|
def get_quality(cell):
""" Gets the quality of a network / cell.
@param string cell
A network / cell from iwlist scan.
@return string
The quality of the network.
"""
quality = matching_line(cell, "Quality=")
if quality is None:
return ""
quality = quality.split()[0].split("/")
quality = matching_line(cell, "Quality=").split()[0].split("/")
return str(int(round(float(quality[0]) / float(quality[1]) * 100)))
|
python
|
def get_quality(cell):
""" Gets the quality of a network / cell.
@param string cell
A network / cell from iwlist scan.
@return string
The quality of the network.
"""
quality = matching_line(cell, "Quality=")
if quality is None:
return ""
quality = quality.split()[0].split("/")
quality = matching_line(cell, "Quality=").split()[0].split("/")
return str(int(round(float(quality[0]) / float(quality[1]) * 100)))
|
[
"def",
"get_quality",
"(",
"cell",
")",
":",
"quality",
"=",
"matching_line",
"(",
"cell",
",",
"\"Quality=\"",
")",
"if",
"quality",
"is",
"None",
":",
"return",
"\"\"",
"quality",
"=",
"quality",
".",
"split",
"(",
")",
"[",
"0",
"]",
".",
"split",
"(",
"\"/\"",
")",
"quality",
"=",
"matching_line",
"(",
"cell",
",",
"\"Quality=\"",
")",
".",
"split",
"(",
")",
"[",
"0",
"]",
".",
"split",
"(",
"\"/\"",
")",
"return",
"str",
"(",
"int",
"(",
"round",
"(",
"float",
"(",
"quality",
"[",
"0",
"]",
")",
"/",
"float",
"(",
"quality",
"[",
"1",
"]",
")",
"*",
"100",
")",
")",
")"
] |
Gets the quality of a network / cell.
@param string cell
A network / cell from iwlist scan.
@return string
The quality of the network.
|
[
"Gets",
"the",
"quality",
"of",
"a",
"network",
"/",
"cell",
".",
"@param",
"string",
"cell",
"A",
"network",
"/",
"cell",
"from",
"iwlist",
"scan",
"."
] |
84c287dc6cfceb04ccbc0a8995f8a87323356ee5
|
https://github.com/cuzzo/iw_parse/blob/84c287dc6cfceb04ccbc0a8995f8a87323356ee5/iw_parse.py#L29-L43
|
train
|
cuzzo/iw_parse
|
iw_parse.py
|
get_signal_level
|
def get_signal_level(cell):
""" Gets the signal level of a network / cell.
@param string cell
A network / cell from iwlist scan.
@return string
The signal level of the network.
"""
signal = matching_line(cell, "Signal level=")
if signal is None:
return ""
signal = signal.split("=")[1].split("/")
if len(signal) == 2:
return str(int(round(float(signal[0]) / float(signal[1]) * 100)))
elif len(signal) == 1:
return signal[0].split(' ')[0]
else:
return ""
|
python
|
def get_signal_level(cell):
""" Gets the signal level of a network / cell.
@param string cell
A network / cell from iwlist scan.
@return string
The signal level of the network.
"""
signal = matching_line(cell, "Signal level=")
if signal is None:
return ""
signal = signal.split("=")[1].split("/")
if len(signal) == 2:
return str(int(round(float(signal[0]) / float(signal[1]) * 100)))
elif len(signal) == 1:
return signal[0].split(' ')[0]
else:
return ""
|
[
"def",
"get_signal_level",
"(",
"cell",
")",
":",
"signal",
"=",
"matching_line",
"(",
"cell",
",",
"\"Signal level=\"",
")",
"if",
"signal",
"is",
"None",
":",
"return",
"\"\"",
"signal",
"=",
"signal",
".",
"split",
"(",
"\"=\"",
")",
"[",
"1",
"]",
".",
"split",
"(",
"\"/\"",
")",
"if",
"len",
"(",
"signal",
")",
"==",
"2",
":",
"return",
"str",
"(",
"int",
"(",
"round",
"(",
"float",
"(",
"signal",
"[",
"0",
"]",
")",
"/",
"float",
"(",
"signal",
"[",
"1",
"]",
")",
"*",
"100",
")",
")",
")",
"elif",
"len",
"(",
"signal",
")",
"==",
"1",
":",
"return",
"signal",
"[",
"0",
"]",
".",
"split",
"(",
"' '",
")",
"[",
"0",
"]",
"else",
":",
"return",
"\"\""
] |
Gets the signal level of a network / cell.
@param string cell
A network / cell from iwlist scan.
@return string
The signal level of the network.
|
[
"Gets",
"the",
"signal",
"level",
"of",
"a",
"network",
"/",
"cell",
".",
"@param",
"string",
"cell",
"A",
"network",
"/",
"cell",
"from",
"iwlist",
"scan",
"."
] |
84c287dc6cfceb04ccbc0a8995f8a87323356ee5
|
https://github.com/cuzzo/iw_parse/blob/84c287dc6cfceb04ccbc0a8995f8a87323356ee5/iw_parse.py#L45-L63
|
train
|
cuzzo/iw_parse
|
iw_parse.py
|
get_noise_level
|
def get_noise_level(cell):
""" Gets the noise level of a network / cell.
@param string cell
A network / cell from iwlist scan.
@return string
The noise level of the network.
"""
noise = matching_line(cell, "Noise level=")
if noise is None:
return ""
noise = noise.split("=")[1]
return noise.split(' ')[0]
|
python
|
def get_noise_level(cell):
""" Gets the noise level of a network / cell.
@param string cell
A network / cell from iwlist scan.
@return string
The noise level of the network.
"""
noise = matching_line(cell, "Noise level=")
if noise is None:
return ""
noise = noise.split("=")[1]
return noise.split(' ')[0]
|
[
"def",
"get_noise_level",
"(",
"cell",
")",
":",
"noise",
"=",
"matching_line",
"(",
"cell",
",",
"\"Noise level=\"",
")",
"if",
"noise",
"is",
"None",
":",
"return",
"\"\"",
"noise",
"=",
"noise",
".",
"split",
"(",
"\"=\"",
")",
"[",
"1",
"]",
"return",
"noise",
".",
"split",
"(",
"' '",
")",
"[",
"0",
"]"
] |
Gets the noise level of a network / cell.
@param string cell
A network / cell from iwlist scan.
@return string
The noise level of the network.
|
[
"Gets",
"the",
"noise",
"level",
"of",
"a",
"network",
"/",
"cell",
".",
"@param",
"string",
"cell",
"A",
"network",
"/",
"cell",
"from",
"iwlist",
"scan",
"."
] |
84c287dc6cfceb04ccbc0a8995f8a87323356ee5
|
https://github.com/cuzzo/iw_parse/blob/84c287dc6cfceb04ccbc0a8995f8a87323356ee5/iw_parse.py#L65-L78
|
train
|
cuzzo/iw_parse
|
iw_parse.py
|
get_channel
|
def get_channel(cell):
""" Gets the channel of a network / cell.
@param string cell
A network / cell from iwlist scan.
@return string
The channel of the network.
"""
channel = matching_line(cell, "Channel:")
if channel:
return channel
frequency = matching_line(cell, "Frequency:")
channel = re.sub(r".*\(Channel\s(\d{1,3})\).*", r"\1", frequency)
return channel
|
python
|
def get_channel(cell):
""" Gets the channel of a network / cell.
@param string cell
A network / cell from iwlist scan.
@return string
The channel of the network.
"""
channel = matching_line(cell, "Channel:")
if channel:
return channel
frequency = matching_line(cell, "Frequency:")
channel = re.sub(r".*\(Channel\s(\d{1,3})\).*", r"\1", frequency)
return channel
|
[
"def",
"get_channel",
"(",
"cell",
")",
":",
"channel",
"=",
"matching_line",
"(",
"cell",
",",
"\"Channel:\"",
")",
"if",
"channel",
":",
"return",
"channel",
"frequency",
"=",
"matching_line",
"(",
"cell",
",",
"\"Frequency:\"",
")",
"channel",
"=",
"re",
".",
"sub",
"(",
"r\".*\\(Channel\\s(\\d{1,3})\\).*\"",
",",
"r\"\\1\"",
",",
"frequency",
")",
"return",
"channel"
] |
Gets the channel of a network / cell.
@param string cell
A network / cell from iwlist scan.
@return string
The channel of the network.
|
[
"Gets",
"the",
"channel",
"of",
"a",
"network",
"/",
"cell",
".",
"@param",
"string",
"cell",
"A",
"network",
"/",
"cell",
"from",
"iwlist",
"scan",
"."
] |
84c287dc6cfceb04ccbc0a8995f8a87323356ee5
|
https://github.com/cuzzo/iw_parse/blob/84c287dc6cfceb04ccbc0a8995f8a87323356ee5/iw_parse.py#L80-L94
|
train
|
cuzzo/iw_parse
|
iw_parse.py
|
get_encryption
|
def get_encryption(cell, emit_version=False):
""" Gets the encryption type of a network / cell.
@param string cell
A network / cell from iwlist scan.
@return string
The encryption type of the network.
"""
enc = ""
if matching_line(cell, "Encryption key:") == "off":
enc = "Open"
else:
for line in cell:
matching = match(line,"IE:")
if matching == None:
continue
wpa = match(matching,"WPA")
if wpa == None:
continue
version_matches = VERSION_RGX.search(wpa)
if len(version_matches.regs) == 1:
version = version_matches \
.group(0) \
.lower() \
.replace("version", "") \
.strip()
wpa = wpa.replace(version_matches.group(0), "").strip()
if wpa == "":
wpa = "WPA"
if emit_version:
enc = "{0} v.{1}".format(wpa, version)
else:
enc = wpa
if wpa == "WPA2":
return enc
else:
enc = wpa
if enc == "":
enc = "WEP"
return enc
|
python
|
def get_encryption(cell, emit_version=False):
""" Gets the encryption type of a network / cell.
@param string cell
A network / cell from iwlist scan.
@return string
The encryption type of the network.
"""
enc = ""
if matching_line(cell, "Encryption key:") == "off":
enc = "Open"
else:
for line in cell:
matching = match(line,"IE:")
if matching == None:
continue
wpa = match(matching,"WPA")
if wpa == None:
continue
version_matches = VERSION_RGX.search(wpa)
if len(version_matches.regs) == 1:
version = version_matches \
.group(0) \
.lower() \
.replace("version", "") \
.strip()
wpa = wpa.replace(version_matches.group(0), "").strip()
if wpa == "":
wpa = "WPA"
if emit_version:
enc = "{0} v.{1}".format(wpa, version)
else:
enc = wpa
if wpa == "WPA2":
return enc
else:
enc = wpa
if enc == "":
enc = "WEP"
return enc
|
[
"def",
"get_encryption",
"(",
"cell",
",",
"emit_version",
"=",
"False",
")",
":",
"enc",
"=",
"\"\"",
"if",
"matching_line",
"(",
"cell",
",",
"\"Encryption key:\"",
")",
"==",
"\"off\"",
":",
"enc",
"=",
"\"Open\"",
"else",
":",
"for",
"line",
"in",
"cell",
":",
"matching",
"=",
"match",
"(",
"line",
",",
"\"IE:\"",
")",
"if",
"matching",
"==",
"None",
":",
"continue",
"wpa",
"=",
"match",
"(",
"matching",
",",
"\"WPA\"",
")",
"if",
"wpa",
"==",
"None",
":",
"continue",
"version_matches",
"=",
"VERSION_RGX",
".",
"search",
"(",
"wpa",
")",
"if",
"len",
"(",
"version_matches",
".",
"regs",
")",
"==",
"1",
":",
"version",
"=",
"version_matches",
".",
"group",
"(",
"0",
")",
".",
"lower",
"(",
")",
".",
"replace",
"(",
"\"version\"",
",",
"\"\"",
")",
".",
"strip",
"(",
")",
"wpa",
"=",
"wpa",
".",
"replace",
"(",
"version_matches",
".",
"group",
"(",
"0",
")",
",",
"\"\"",
")",
".",
"strip",
"(",
")",
"if",
"wpa",
"==",
"\"\"",
":",
"wpa",
"=",
"\"WPA\"",
"if",
"emit_version",
":",
"enc",
"=",
"\"{0} v.{1}\"",
".",
"format",
"(",
"wpa",
",",
"version",
")",
"else",
":",
"enc",
"=",
"wpa",
"if",
"wpa",
"==",
"\"WPA2\"",
":",
"return",
"enc",
"else",
":",
"enc",
"=",
"wpa",
"if",
"enc",
"==",
"\"\"",
":",
"enc",
"=",
"\"WEP\"",
"return",
"enc"
] |
Gets the encryption type of a network / cell.
@param string cell
A network / cell from iwlist scan.
@return string
The encryption type of the network.
|
[
"Gets",
"the",
"encryption",
"type",
"of",
"a",
"network",
"/",
"cell",
".",
"@param",
"string",
"cell",
"A",
"network",
"/",
"cell",
"from",
"iwlist",
"scan",
"."
] |
84c287dc6cfceb04ccbc0a8995f8a87323356ee5
|
https://github.com/cuzzo/iw_parse/blob/84c287dc6cfceb04ccbc0a8995f8a87323356ee5/iw_parse.py#L110-L152
|
train
|
cuzzo/iw_parse
|
iw_parse.py
|
matching_line
|
def matching_line(lines, keyword):
""" Returns the first matching line in a list of lines.
@see match()
"""
for line in lines:
matching = match(line,keyword)
if matching != None:
return matching
return None
|
python
|
def matching_line(lines, keyword):
""" Returns the first matching line in a list of lines.
@see match()
"""
for line in lines:
matching = match(line,keyword)
if matching != None:
return matching
return None
|
[
"def",
"matching_line",
"(",
"lines",
",",
"keyword",
")",
":",
"for",
"line",
"in",
"lines",
":",
"matching",
"=",
"match",
"(",
"line",
",",
"keyword",
")",
"if",
"matching",
"!=",
"None",
":",
"return",
"matching",
"return",
"None"
] |
Returns the first matching line in a list of lines.
@see match()
|
[
"Returns",
"the",
"first",
"matching",
"line",
"in",
"a",
"list",
"of",
"lines",
"."
] |
84c287dc6cfceb04ccbc0a8995f8a87323356ee5
|
https://github.com/cuzzo/iw_parse/blob/84c287dc6cfceb04ccbc0a8995f8a87323356ee5/iw_parse.py#L202-L210
|
train
|
cuzzo/iw_parse
|
iw_parse.py
|
match
|
def match(line, keyword):
""" If the first part of line (modulo blanks) matches keyword,
returns the end of that line. Otherwise checks if keyword is
anywhere in the line and returns that section, else returns None"""
line = line.lstrip()
length = len(keyword)
if line[:length] == keyword:
return line[length:]
else:
if keyword in line:
return line[line.index(keyword):]
else:
return None
|
python
|
def match(line, keyword):
""" If the first part of line (modulo blanks) matches keyword,
returns the end of that line. Otherwise checks if keyword is
anywhere in the line and returns that section, else returns None"""
line = line.lstrip()
length = len(keyword)
if line[:length] == keyword:
return line[length:]
else:
if keyword in line:
return line[line.index(keyword):]
else:
return None
|
[
"def",
"match",
"(",
"line",
",",
"keyword",
")",
":",
"line",
"=",
"line",
".",
"lstrip",
"(",
")",
"length",
"=",
"len",
"(",
"keyword",
")",
"if",
"line",
"[",
":",
"length",
"]",
"==",
"keyword",
":",
"return",
"line",
"[",
"length",
":",
"]",
"else",
":",
"if",
"keyword",
"in",
"line",
":",
"return",
"line",
"[",
"line",
".",
"index",
"(",
"keyword",
")",
":",
"]",
"else",
":",
"return",
"None"
] |
If the first part of line (modulo blanks) matches keyword,
returns the end of that line. Otherwise checks if keyword is
anywhere in the line and returns that section, else returns None
|
[
"If",
"the",
"first",
"part",
"of",
"line",
"(",
"modulo",
"blanks",
")",
"matches",
"keyword",
"returns",
"the",
"end",
"of",
"that",
"line",
".",
"Otherwise",
"checks",
"if",
"keyword",
"is",
"anywhere",
"in",
"the",
"line",
"and",
"returns",
"that",
"section",
"else",
"returns",
"None"
] |
84c287dc6cfceb04ccbc0a8995f8a87323356ee5
|
https://github.com/cuzzo/iw_parse/blob/84c287dc6cfceb04ccbc0a8995f8a87323356ee5/iw_parse.py#L212-L225
|
train
|
cuzzo/iw_parse
|
iw_parse.py
|
parse_cell
|
def parse_cell(cell, rules):
""" Applies the rules to the bunch of text describing a cell.
@param string cell
A network / cell from iwlist scan.
@param dictionary rules
A dictionary of parse rules.
@return dictionary
parsed networks. """
parsed_cell = {}
for key in rules:
rule = rules[key]
parsed_cell.update({key: rule(cell)})
return parsed_cell
|
python
|
def parse_cell(cell, rules):
""" Applies the rules to the bunch of text describing a cell.
@param string cell
A network / cell from iwlist scan.
@param dictionary rules
A dictionary of parse rules.
@return dictionary
parsed networks. """
parsed_cell = {}
for key in rules:
rule = rules[key]
parsed_cell.update({key: rule(cell)})
return parsed_cell
|
[
"def",
"parse_cell",
"(",
"cell",
",",
"rules",
")",
":",
"parsed_cell",
"=",
"{",
"}",
"for",
"key",
"in",
"rules",
":",
"rule",
"=",
"rules",
"[",
"key",
"]",
"parsed_cell",
".",
"update",
"(",
"{",
"key",
":",
"rule",
"(",
"cell",
")",
"}",
")",
"return",
"parsed_cell"
] |
Applies the rules to the bunch of text describing a cell.
@param string cell
A network / cell from iwlist scan.
@param dictionary rules
A dictionary of parse rules.
@return dictionary
parsed networks.
|
[
"Applies",
"the",
"rules",
"to",
"the",
"bunch",
"of",
"text",
"describing",
"a",
"cell",
".",
"@param",
"string",
"cell",
"A",
"network",
"/",
"cell",
"from",
"iwlist",
"scan",
".",
"@param",
"dictionary",
"rules",
"A",
"dictionary",
"of",
"parse",
"rules",
"."
] |
84c287dc6cfceb04ccbc0a8995f8a87323356ee5
|
https://github.com/cuzzo/iw_parse/blob/84c287dc6cfceb04ccbc0a8995f8a87323356ee5/iw_parse.py#L227-L241
|
train
|
cuzzo/iw_parse
|
iw_parse.py
|
get_parsed_cells
|
def get_parsed_cells(iw_data, rules=None):
""" Parses iwlist output into a list of networks.
@param list iw_data
Output from iwlist scan.
A list of strings.
@return list
properties: Name, Address, Quality, Channel, Frequency, Encryption, Signal Level, Noise Level, Bit Rates, Mode.
"""
# Here's a dictionary of rules that will be applied to the description
# of each cell. The key will be the name of the column in the table.
# The value is a function defined above.
rules = rules or {
"Name": get_name,
"Quality": get_quality,
"Channel": get_channel,
"Frequency": get_frequency,
"Encryption": get_encryption,
"Address": get_address,
"Signal Level": get_signal_level,
"Noise Level": get_noise_level,
"Bit Rates": get_bit_rates,
"Mode": get_mode,
}
cells = [[]]
parsed_cells = []
for line in iw_data:
cell_line = match(line, "Cell ")
if cell_line != None:
cells.append([])
line = cell_line[-27:]
cells[-1].append(line.rstrip())
cells = cells[1:]
for cell in cells:
parsed_cells.append(parse_cell(cell, rules))
sort_cells(parsed_cells)
return parsed_cells
|
python
|
def get_parsed_cells(iw_data, rules=None):
""" Parses iwlist output into a list of networks.
@param list iw_data
Output from iwlist scan.
A list of strings.
@return list
properties: Name, Address, Quality, Channel, Frequency, Encryption, Signal Level, Noise Level, Bit Rates, Mode.
"""
# Here's a dictionary of rules that will be applied to the description
# of each cell. The key will be the name of the column in the table.
# The value is a function defined above.
rules = rules or {
"Name": get_name,
"Quality": get_quality,
"Channel": get_channel,
"Frequency": get_frequency,
"Encryption": get_encryption,
"Address": get_address,
"Signal Level": get_signal_level,
"Noise Level": get_noise_level,
"Bit Rates": get_bit_rates,
"Mode": get_mode,
}
cells = [[]]
parsed_cells = []
for line in iw_data:
cell_line = match(line, "Cell ")
if cell_line != None:
cells.append([])
line = cell_line[-27:]
cells[-1].append(line.rstrip())
cells = cells[1:]
for cell in cells:
parsed_cells.append(parse_cell(cell, rules))
sort_cells(parsed_cells)
return parsed_cells
|
[
"def",
"get_parsed_cells",
"(",
"iw_data",
",",
"rules",
"=",
"None",
")",
":",
"# Here's a dictionary of rules that will be applied to the description",
"# of each cell. The key will be the name of the column in the table.",
"# The value is a function defined above.",
"rules",
"=",
"rules",
"or",
"{",
"\"Name\"",
":",
"get_name",
",",
"\"Quality\"",
":",
"get_quality",
",",
"\"Channel\"",
":",
"get_channel",
",",
"\"Frequency\"",
":",
"get_frequency",
",",
"\"Encryption\"",
":",
"get_encryption",
",",
"\"Address\"",
":",
"get_address",
",",
"\"Signal Level\"",
":",
"get_signal_level",
",",
"\"Noise Level\"",
":",
"get_noise_level",
",",
"\"Bit Rates\"",
":",
"get_bit_rates",
",",
"\"Mode\"",
":",
"get_mode",
",",
"}",
"cells",
"=",
"[",
"[",
"]",
"]",
"parsed_cells",
"=",
"[",
"]",
"for",
"line",
"in",
"iw_data",
":",
"cell_line",
"=",
"match",
"(",
"line",
",",
"\"Cell \"",
")",
"if",
"cell_line",
"!=",
"None",
":",
"cells",
".",
"append",
"(",
"[",
"]",
")",
"line",
"=",
"cell_line",
"[",
"-",
"27",
":",
"]",
"cells",
"[",
"-",
"1",
"]",
".",
"append",
"(",
"line",
".",
"rstrip",
"(",
")",
")",
"cells",
"=",
"cells",
"[",
"1",
":",
"]",
"for",
"cell",
"in",
"cells",
":",
"parsed_cells",
".",
"append",
"(",
"parse_cell",
"(",
"cell",
",",
"rules",
")",
")",
"sort_cells",
"(",
"parsed_cells",
")",
"return",
"parsed_cells"
] |
Parses iwlist output into a list of networks.
@param list iw_data
Output from iwlist scan.
A list of strings.
@return list
properties: Name, Address, Quality, Channel, Frequency, Encryption, Signal Level, Noise Level, Bit Rates, Mode.
|
[
"Parses",
"iwlist",
"output",
"into",
"a",
"list",
"of",
"networks",
".",
"@param",
"list",
"iw_data",
"Output",
"from",
"iwlist",
"scan",
".",
"A",
"list",
"of",
"strings",
"."
] |
84c287dc6cfceb04ccbc0a8995f8a87323356ee5
|
https://github.com/cuzzo/iw_parse/blob/84c287dc6cfceb04ccbc0a8995f8a87323356ee5/iw_parse.py#L269-L311
|
train
|
praw-dev/prawcore
|
prawcore/sessions.py
|
Session.request
|
def request(
self, method, path, data=None, files=None, json=None, params=None
):
"""Return the json content from the resource at ``path``.
:param method: The request verb. E.g., get, post, put.
:param path: The path of the request. This path will be combined with
the ``oauth_url`` of the Requestor.
:param data: Dictionary, bytes, or file-like object to send in the body
of the request.
:param files: Dictionary, mapping ``filename`` to file-like object.
:param json: Object to be serialized to JSON in the body of the
request.
:param params: The query parameters to send with the request.
Automatically refreshes the access token if it becomes invalid and a
refresh token is available. Raises InvalidInvocation in such a case if
a refresh token is not available.
"""
params = deepcopy(params) or {}
params["raw_json"] = 1
if isinstance(data, dict):
data = deepcopy(data)
data["api_type"] = "json"
data = sorted(data.items())
url = urljoin(self._requestor.oauth_url, path)
return self._request_with_retries(
data=data,
files=files,
json=json,
method=method,
params=params,
url=url,
)
|
python
|
def request(
self, method, path, data=None, files=None, json=None, params=None
):
"""Return the json content from the resource at ``path``.
:param method: The request verb. E.g., get, post, put.
:param path: The path of the request. This path will be combined with
the ``oauth_url`` of the Requestor.
:param data: Dictionary, bytes, or file-like object to send in the body
of the request.
:param files: Dictionary, mapping ``filename`` to file-like object.
:param json: Object to be serialized to JSON in the body of the
request.
:param params: The query parameters to send with the request.
Automatically refreshes the access token if it becomes invalid and a
refresh token is available. Raises InvalidInvocation in such a case if
a refresh token is not available.
"""
params = deepcopy(params) or {}
params["raw_json"] = 1
if isinstance(data, dict):
data = deepcopy(data)
data["api_type"] = "json"
data = sorted(data.items())
url = urljoin(self._requestor.oauth_url, path)
return self._request_with_retries(
data=data,
files=files,
json=json,
method=method,
params=params,
url=url,
)
|
[
"def",
"request",
"(",
"self",
",",
"method",
",",
"path",
",",
"data",
"=",
"None",
",",
"files",
"=",
"None",
",",
"json",
"=",
"None",
",",
"params",
"=",
"None",
")",
":",
"params",
"=",
"deepcopy",
"(",
"params",
")",
"or",
"{",
"}",
"params",
"[",
"\"raw_json\"",
"]",
"=",
"1",
"if",
"isinstance",
"(",
"data",
",",
"dict",
")",
":",
"data",
"=",
"deepcopy",
"(",
"data",
")",
"data",
"[",
"\"api_type\"",
"]",
"=",
"\"json\"",
"data",
"=",
"sorted",
"(",
"data",
".",
"items",
"(",
")",
")",
"url",
"=",
"urljoin",
"(",
"self",
".",
"_requestor",
".",
"oauth_url",
",",
"path",
")",
"return",
"self",
".",
"_request_with_retries",
"(",
"data",
"=",
"data",
",",
"files",
"=",
"files",
",",
"json",
"=",
"json",
",",
"method",
"=",
"method",
",",
"params",
"=",
"params",
",",
"url",
"=",
"url",
",",
")"
] |
Return the json content from the resource at ``path``.
:param method: The request verb. E.g., get, post, put.
:param path: The path of the request. This path will be combined with
the ``oauth_url`` of the Requestor.
:param data: Dictionary, bytes, or file-like object to send in the body
of the request.
:param files: Dictionary, mapping ``filename`` to file-like object.
:param json: Object to be serialized to JSON in the body of the
request.
:param params: The query parameters to send with the request.
Automatically refreshes the access token if it becomes invalid and a
refresh token is available. Raises InvalidInvocation in such a case if
a refresh token is not available.
|
[
"Return",
"the",
"json",
"content",
"from",
"the",
"resource",
"at",
"path",
"."
] |
b16ae88a1f2bf98095ed6fe64851cb7add7ed752
|
https://github.com/praw-dev/prawcore/blob/b16ae88a1f2bf98095ed6fe64851cb7add7ed752/prawcore/sessions.py#L226-L260
|
train
|
praw-dev/prawcore
|
examples/script_auth_friend_list.py
|
main
|
def main():
"""Provide the program's entry point when directly executed."""
authenticator = prawcore.TrustedAuthenticator(
prawcore.Requestor("prawcore_script_auth_example"),
os.environ["PRAWCORE_CLIENT_ID"],
os.environ["PRAWCORE_CLIENT_SECRET"],
)
authorizer = prawcore.ScriptAuthorizer(
authenticator,
os.environ["PRAWCORE_USERNAME"],
os.environ["PRAWCORE_PASSWORD"],
)
authorizer.refresh()
with prawcore.session(authorizer) as session:
data = session.request("GET", "/api/v1/me/friends")
for friend in data["data"]["children"]:
print(friend["name"])
return 0
|
python
|
def main():
"""Provide the program's entry point when directly executed."""
authenticator = prawcore.TrustedAuthenticator(
prawcore.Requestor("prawcore_script_auth_example"),
os.environ["PRAWCORE_CLIENT_ID"],
os.environ["PRAWCORE_CLIENT_SECRET"],
)
authorizer = prawcore.ScriptAuthorizer(
authenticator,
os.environ["PRAWCORE_USERNAME"],
os.environ["PRAWCORE_PASSWORD"],
)
authorizer.refresh()
with prawcore.session(authorizer) as session:
data = session.request("GET", "/api/v1/me/friends")
for friend in data["data"]["children"]:
print(friend["name"])
return 0
|
[
"def",
"main",
"(",
")",
":",
"authenticator",
"=",
"prawcore",
".",
"TrustedAuthenticator",
"(",
"prawcore",
".",
"Requestor",
"(",
"\"prawcore_script_auth_example\"",
")",
",",
"os",
".",
"environ",
"[",
"\"PRAWCORE_CLIENT_ID\"",
"]",
",",
"os",
".",
"environ",
"[",
"\"PRAWCORE_CLIENT_SECRET\"",
"]",
",",
")",
"authorizer",
"=",
"prawcore",
".",
"ScriptAuthorizer",
"(",
"authenticator",
",",
"os",
".",
"environ",
"[",
"\"PRAWCORE_USERNAME\"",
"]",
",",
"os",
".",
"environ",
"[",
"\"PRAWCORE_PASSWORD\"",
"]",
",",
")",
"authorizer",
".",
"refresh",
"(",
")",
"with",
"prawcore",
".",
"session",
"(",
"authorizer",
")",
"as",
"session",
":",
"data",
"=",
"session",
".",
"request",
"(",
"\"GET\"",
",",
"\"/api/v1/me/friends\"",
")",
"for",
"friend",
"in",
"data",
"[",
"\"data\"",
"]",
"[",
"\"children\"",
"]",
":",
"print",
"(",
"friend",
"[",
"\"name\"",
"]",
")",
"return",
"0"
] |
Provide the program's entry point when directly executed.
|
[
"Provide",
"the",
"program",
"s",
"entry",
"point",
"when",
"directly",
"executed",
"."
] |
b16ae88a1f2bf98095ed6fe64851cb7add7ed752
|
https://github.com/praw-dev/prawcore/blob/b16ae88a1f2bf98095ed6fe64851cb7add7ed752/examples/script_auth_friend_list.py#L15-L35
|
train
|
praw-dev/prawcore
|
examples/caching_requestor.py
|
main
|
def main():
"""Provide the program's entry point when directly executed."""
if len(sys.argv) != 2:
print("Usage: {} USERNAME".format(sys.argv[0]))
return 1
caching_requestor = prawcore.Requestor(
"prawcore_device_id_auth_example", session=CachingSession()
)
authenticator = prawcore.TrustedAuthenticator(
caching_requestor,
os.environ["PRAWCORE_CLIENT_ID"],
os.environ["PRAWCORE_CLIENT_SECRET"],
)
authorizer = prawcore.ReadOnlyAuthorizer(authenticator)
authorizer.refresh()
user = sys.argv[1]
with prawcore.session(authorizer) as session:
data1 = session.request("GET", "/api/v1/user/{}/trophies".format(user))
with prawcore.session(authorizer) as session:
data2 = session.request("GET", "/api/v1/user/{}/trophies".format(user))
for trophy in data1["data"]["trophies"]:
description = trophy["data"]["description"]
print(
"Original:",
trophy["data"]["name"]
+ (" ({})".format(description) if description else ""),
)
for trophy in data2["data"]["trophies"]:
description = trophy["data"]["description"]
print(
"Cached:",
trophy["data"]["name"]
+ (" ({})".format(description) if description else ""),
)
print(
"----\nCached == Original:",
data2["data"]["trophies"] == data2["data"]["trophies"],
)
return 0
|
python
|
def main():
"""Provide the program's entry point when directly executed."""
if len(sys.argv) != 2:
print("Usage: {} USERNAME".format(sys.argv[0]))
return 1
caching_requestor = prawcore.Requestor(
"prawcore_device_id_auth_example", session=CachingSession()
)
authenticator = prawcore.TrustedAuthenticator(
caching_requestor,
os.environ["PRAWCORE_CLIENT_ID"],
os.environ["PRAWCORE_CLIENT_SECRET"],
)
authorizer = prawcore.ReadOnlyAuthorizer(authenticator)
authorizer.refresh()
user = sys.argv[1]
with prawcore.session(authorizer) as session:
data1 = session.request("GET", "/api/v1/user/{}/trophies".format(user))
with prawcore.session(authorizer) as session:
data2 = session.request("GET", "/api/v1/user/{}/trophies".format(user))
for trophy in data1["data"]["trophies"]:
description = trophy["data"]["description"]
print(
"Original:",
trophy["data"]["name"]
+ (" ({})".format(description) if description else ""),
)
for trophy in data2["data"]["trophies"]:
description = trophy["data"]["description"]
print(
"Cached:",
trophy["data"]["name"]
+ (" ({})".format(description) if description else ""),
)
print(
"----\nCached == Original:",
data2["data"]["trophies"] == data2["data"]["trophies"],
)
return 0
|
[
"def",
"main",
"(",
")",
":",
"if",
"len",
"(",
"sys",
".",
"argv",
")",
"!=",
"2",
":",
"print",
"(",
"\"Usage: {} USERNAME\"",
".",
"format",
"(",
"sys",
".",
"argv",
"[",
"0",
"]",
")",
")",
"return",
"1",
"caching_requestor",
"=",
"prawcore",
".",
"Requestor",
"(",
"\"prawcore_device_id_auth_example\"",
",",
"session",
"=",
"CachingSession",
"(",
")",
")",
"authenticator",
"=",
"prawcore",
".",
"TrustedAuthenticator",
"(",
"caching_requestor",
",",
"os",
".",
"environ",
"[",
"\"PRAWCORE_CLIENT_ID\"",
"]",
",",
"os",
".",
"environ",
"[",
"\"PRAWCORE_CLIENT_SECRET\"",
"]",
",",
")",
"authorizer",
"=",
"prawcore",
".",
"ReadOnlyAuthorizer",
"(",
"authenticator",
")",
"authorizer",
".",
"refresh",
"(",
")",
"user",
"=",
"sys",
".",
"argv",
"[",
"1",
"]",
"with",
"prawcore",
".",
"session",
"(",
"authorizer",
")",
"as",
"session",
":",
"data1",
"=",
"session",
".",
"request",
"(",
"\"GET\"",
",",
"\"/api/v1/user/{}/trophies\"",
".",
"format",
"(",
"user",
")",
")",
"with",
"prawcore",
".",
"session",
"(",
"authorizer",
")",
"as",
"session",
":",
"data2",
"=",
"session",
".",
"request",
"(",
"\"GET\"",
",",
"\"/api/v1/user/{}/trophies\"",
".",
"format",
"(",
"user",
")",
")",
"for",
"trophy",
"in",
"data1",
"[",
"\"data\"",
"]",
"[",
"\"trophies\"",
"]",
":",
"description",
"=",
"trophy",
"[",
"\"data\"",
"]",
"[",
"\"description\"",
"]",
"print",
"(",
"\"Original:\"",
",",
"trophy",
"[",
"\"data\"",
"]",
"[",
"\"name\"",
"]",
"+",
"(",
"\" ({})\"",
".",
"format",
"(",
"description",
")",
"if",
"description",
"else",
"\"\"",
")",
",",
")",
"for",
"trophy",
"in",
"data2",
"[",
"\"data\"",
"]",
"[",
"\"trophies\"",
"]",
":",
"description",
"=",
"trophy",
"[",
"\"data\"",
"]",
"[",
"\"description\"",
"]",
"print",
"(",
"\"Cached:\"",
",",
"trophy",
"[",
"\"data\"",
"]",
"[",
"\"name\"",
"]",
"+",
"(",
"\" ({})\"",
".",
"format",
"(",
"description",
")",
"if",
"description",
"else",
"\"\"",
")",
",",
")",
"print",
"(",
"\"----\\nCached == Original:\"",
",",
"data2",
"[",
"\"data\"",
"]",
"[",
"\"trophies\"",
"]",
"==",
"data2",
"[",
"\"data\"",
"]",
"[",
"\"trophies\"",
"]",
",",
")",
"return",
"0"
] |
Provide the program's entry point when directly executed.
|
[
"Provide",
"the",
"program",
"s",
"entry",
"point",
"when",
"directly",
"executed",
"."
] |
b16ae88a1f2bf98095ed6fe64851cb7add7ed752
|
https://github.com/praw-dev/prawcore/blob/b16ae88a1f2bf98095ed6fe64851cb7add7ed752/examples/caching_requestor.py#L39-L83
|
train
|
praw-dev/prawcore
|
examples/caching_requestor.py
|
CachingSession.request
|
def request(self, method, url, params=None, **kwargs):
"""Perform a request, or return a cached response if available."""
params_key = tuple(params.items()) if params else ()
if method.upper() == "GET":
if (url, params_key) in self.get_cache:
print("Returning cached response for:", method, url, params)
return self.get_cache[(url, params_key)]
result = super().request(method, url, params, **kwargs)
if method.upper() == "GET":
self.get_cache[(url, params_key)] = result
print("Adding entry to the cache:", method, url, params)
return result
|
python
|
def request(self, method, url, params=None, **kwargs):
"""Perform a request, or return a cached response if available."""
params_key = tuple(params.items()) if params else ()
if method.upper() == "GET":
if (url, params_key) in self.get_cache:
print("Returning cached response for:", method, url, params)
return self.get_cache[(url, params_key)]
result = super().request(method, url, params, **kwargs)
if method.upper() == "GET":
self.get_cache[(url, params_key)] = result
print("Adding entry to the cache:", method, url, params)
return result
|
[
"def",
"request",
"(",
"self",
",",
"method",
",",
"url",
",",
"params",
"=",
"None",
",",
"*",
"*",
"kwargs",
")",
":",
"params_key",
"=",
"tuple",
"(",
"params",
".",
"items",
"(",
")",
")",
"if",
"params",
"else",
"(",
")",
"if",
"method",
".",
"upper",
"(",
")",
"==",
"\"GET\"",
":",
"if",
"(",
"url",
",",
"params_key",
")",
"in",
"self",
".",
"get_cache",
":",
"print",
"(",
"\"Returning cached response for:\"",
",",
"method",
",",
"url",
",",
"params",
")",
"return",
"self",
".",
"get_cache",
"[",
"(",
"url",
",",
"params_key",
")",
"]",
"result",
"=",
"super",
"(",
")",
".",
"request",
"(",
"method",
",",
"url",
",",
"params",
",",
"*",
"*",
"kwargs",
")",
"if",
"method",
".",
"upper",
"(",
")",
"==",
"\"GET\"",
":",
"self",
".",
"get_cache",
"[",
"(",
"url",
",",
"params_key",
")",
"]",
"=",
"result",
"print",
"(",
"\"Adding entry to the cache:\"",
",",
"method",
",",
"url",
",",
"params",
")",
"return",
"result"
] |
Perform a request, or return a cached response if available.
|
[
"Perform",
"a",
"request",
"or",
"return",
"a",
"cached",
"response",
"if",
"available",
"."
] |
b16ae88a1f2bf98095ed6fe64851cb7add7ed752
|
https://github.com/praw-dev/prawcore/blob/b16ae88a1f2bf98095ed6fe64851cb7add7ed752/examples/caching_requestor.py#L25-L36
|
train
|
neo4j-drivers/neobolt
|
neobolt/impl/python/routing.py
|
RoutingTable.parse_routing_info
|
def parse_routing_info(cls, records):
""" Parse the records returned from a getServers call and
return a new RoutingTable instance.
"""
if len(records) != 1:
raise RoutingProtocolError("Expected exactly one record")
record = records[0]
routers = []
readers = []
writers = []
try:
servers = record["servers"]
for server in servers:
role = server["role"]
addresses = []
for address in server["addresses"]:
addresses.append(SocketAddress.parse(address, DEFAULT_PORT))
if role == "ROUTE":
routers.extend(addresses)
elif role == "READ":
readers.extend(addresses)
elif role == "WRITE":
writers.extend(addresses)
ttl = record["ttl"]
except (KeyError, TypeError):
raise RoutingProtocolError("Cannot parse routing info")
else:
return cls(routers, readers, writers, ttl)
|
python
|
def parse_routing_info(cls, records):
""" Parse the records returned from a getServers call and
return a new RoutingTable instance.
"""
if len(records) != 1:
raise RoutingProtocolError("Expected exactly one record")
record = records[0]
routers = []
readers = []
writers = []
try:
servers = record["servers"]
for server in servers:
role = server["role"]
addresses = []
for address in server["addresses"]:
addresses.append(SocketAddress.parse(address, DEFAULT_PORT))
if role == "ROUTE":
routers.extend(addresses)
elif role == "READ":
readers.extend(addresses)
elif role == "WRITE":
writers.extend(addresses)
ttl = record["ttl"]
except (KeyError, TypeError):
raise RoutingProtocolError("Cannot parse routing info")
else:
return cls(routers, readers, writers, ttl)
|
[
"def",
"parse_routing_info",
"(",
"cls",
",",
"records",
")",
":",
"if",
"len",
"(",
"records",
")",
"!=",
"1",
":",
"raise",
"RoutingProtocolError",
"(",
"\"Expected exactly one record\"",
")",
"record",
"=",
"records",
"[",
"0",
"]",
"routers",
"=",
"[",
"]",
"readers",
"=",
"[",
"]",
"writers",
"=",
"[",
"]",
"try",
":",
"servers",
"=",
"record",
"[",
"\"servers\"",
"]",
"for",
"server",
"in",
"servers",
":",
"role",
"=",
"server",
"[",
"\"role\"",
"]",
"addresses",
"=",
"[",
"]",
"for",
"address",
"in",
"server",
"[",
"\"addresses\"",
"]",
":",
"addresses",
".",
"append",
"(",
"SocketAddress",
".",
"parse",
"(",
"address",
",",
"DEFAULT_PORT",
")",
")",
"if",
"role",
"==",
"\"ROUTE\"",
":",
"routers",
".",
"extend",
"(",
"addresses",
")",
"elif",
"role",
"==",
"\"READ\"",
":",
"readers",
".",
"extend",
"(",
"addresses",
")",
"elif",
"role",
"==",
"\"WRITE\"",
":",
"writers",
".",
"extend",
"(",
"addresses",
")",
"ttl",
"=",
"record",
"[",
"\"ttl\"",
"]",
"except",
"(",
"KeyError",
",",
"TypeError",
")",
":",
"raise",
"RoutingProtocolError",
"(",
"\"Cannot parse routing info\"",
")",
"else",
":",
"return",
"cls",
"(",
"routers",
",",
"readers",
",",
"writers",
",",
"ttl",
")"
] |
Parse the records returned from a getServers call and
return a new RoutingTable instance.
|
[
"Parse",
"the",
"records",
"returned",
"from",
"a",
"getServers",
"call",
"and",
"return",
"a",
"new",
"RoutingTable",
"instance",
"."
] |
724569d76e85777c4f5e30e8d0a18116bda4d8cd
|
https://github.com/neo4j-drivers/neobolt/blob/724569d76e85777c4f5e30e8d0a18116bda4d8cd/neobolt/impl/python/routing.py#L97-L124
|
train
|
neo4j-drivers/neobolt
|
neobolt/impl/python/routing.py
|
RoutingTable.is_fresh
|
def is_fresh(self, access_mode):
""" Indicator for whether routing information is still usable.
"""
log_debug("[#0000] C: <ROUTING> Checking table freshness for %r", access_mode)
expired = self.last_updated_time + self.ttl <= self.timer()
has_server_for_mode = bool(access_mode == READ_ACCESS and self.readers) or bool(access_mode == WRITE_ACCESS and self.writers)
log_debug("[#0000] C: <ROUTING> Table expired=%r", expired)
log_debug("[#0000] C: <ROUTING> Table routers=%r", self.routers)
log_debug("[#0000] C: <ROUTING> Table has_server_for_mode=%r", has_server_for_mode)
return not expired and self.routers and has_server_for_mode
|
python
|
def is_fresh(self, access_mode):
""" Indicator for whether routing information is still usable.
"""
log_debug("[#0000] C: <ROUTING> Checking table freshness for %r", access_mode)
expired = self.last_updated_time + self.ttl <= self.timer()
has_server_for_mode = bool(access_mode == READ_ACCESS and self.readers) or bool(access_mode == WRITE_ACCESS and self.writers)
log_debug("[#0000] C: <ROUTING> Table expired=%r", expired)
log_debug("[#0000] C: <ROUTING> Table routers=%r", self.routers)
log_debug("[#0000] C: <ROUTING> Table has_server_for_mode=%r", has_server_for_mode)
return not expired and self.routers and has_server_for_mode
|
[
"def",
"is_fresh",
"(",
"self",
",",
"access_mode",
")",
":",
"log_debug",
"(",
"\"[#0000] C: <ROUTING> Checking table freshness for %r\"",
",",
"access_mode",
")",
"expired",
"=",
"self",
".",
"last_updated_time",
"+",
"self",
".",
"ttl",
"<=",
"self",
".",
"timer",
"(",
")",
"has_server_for_mode",
"=",
"bool",
"(",
"access_mode",
"==",
"READ_ACCESS",
"and",
"self",
".",
"readers",
")",
"or",
"bool",
"(",
"access_mode",
"==",
"WRITE_ACCESS",
"and",
"self",
".",
"writers",
")",
"log_debug",
"(",
"\"[#0000] C: <ROUTING> Table expired=%r\"",
",",
"expired",
")",
"log_debug",
"(",
"\"[#0000] C: <ROUTING> Table routers=%r\"",
",",
"self",
".",
"routers",
")",
"log_debug",
"(",
"\"[#0000] C: <ROUTING> Table has_server_for_mode=%r\"",
",",
"has_server_for_mode",
")",
"return",
"not",
"expired",
"and",
"self",
".",
"routers",
"and",
"has_server_for_mode"
] |
Indicator for whether routing information is still usable.
|
[
"Indicator",
"for",
"whether",
"routing",
"information",
"is",
"still",
"usable",
"."
] |
724569d76e85777c4f5e30e8d0a18116bda4d8cd
|
https://github.com/neo4j-drivers/neobolt/blob/724569d76e85777c4f5e30e8d0a18116bda4d8cd/neobolt/impl/python/routing.py#L142-L151
|
train
|
neo4j-drivers/neobolt
|
neobolt/impl/python/routing.py
|
RoutingTable.update
|
def update(self, new_routing_table):
""" Update the current routing table with new routing information
from a replacement table.
"""
self.routers.replace(new_routing_table.routers)
self.readers.replace(new_routing_table.readers)
self.writers.replace(new_routing_table.writers)
self.last_updated_time = self.timer()
self.ttl = new_routing_table.ttl
log_debug("[#0000] S: <ROUTING> table=%r", self)
|
python
|
def update(self, new_routing_table):
""" Update the current routing table with new routing information
from a replacement table.
"""
self.routers.replace(new_routing_table.routers)
self.readers.replace(new_routing_table.readers)
self.writers.replace(new_routing_table.writers)
self.last_updated_time = self.timer()
self.ttl = new_routing_table.ttl
log_debug("[#0000] S: <ROUTING> table=%r", self)
|
[
"def",
"update",
"(",
"self",
",",
"new_routing_table",
")",
":",
"self",
".",
"routers",
".",
"replace",
"(",
"new_routing_table",
".",
"routers",
")",
"self",
".",
"readers",
".",
"replace",
"(",
"new_routing_table",
".",
"readers",
")",
"self",
".",
"writers",
".",
"replace",
"(",
"new_routing_table",
".",
"writers",
")",
"self",
".",
"last_updated_time",
"=",
"self",
".",
"timer",
"(",
")",
"self",
".",
"ttl",
"=",
"new_routing_table",
".",
"ttl",
"log_debug",
"(",
"\"[#0000] S: <ROUTING> table=%r\"",
",",
"self",
")"
] |
Update the current routing table with new routing information
from a replacement table.
|
[
"Update",
"the",
"current",
"routing",
"table",
"with",
"new",
"routing",
"information",
"from",
"a",
"replacement",
"table",
"."
] |
724569d76e85777c4f5e30e8d0a18116bda4d8cd
|
https://github.com/neo4j-drivers/neobolt/blob/724569d76e85777c4f5e30e8d0a18116bda4d8cd/neobolt/impl/python/routing.py#L153-L162
|
train
|
neo4j-drivers/neobolt
|
neobolt/impl/python/routing.py
|
RoutingConnectionPool.fetch_routing_info
|
def fetch_routing_info(self, address):
""" Fetch raw routing info from a given router address.
:param address: router address
:return: list of routing records or
None if no connection could be established
:raise ServiceUnavailable: if the server does not support routing or
if routing support is broken
"""
metadata = {}
records = []
def fail(md):
if md.get("code") == "Neo.ClientError.Procedure.ProcedureNotFound":
raise RoutingProtocolError("Server {!r} does not support routing".format(address))
else:
raise RoutingProtocolError("Routing support broken on server {!r}".format(address))
try:
with self.acquire_direct(address) as cx:
_, _, server_version = (cx.server.agent or "").partition("/")
# TODO 2.0: remove old routing procedure
if server_version and Version.parse(server_version) >= Version((3, 2)):
log_debug("[#%04X] C: <ROUTING> query=%r", cx.local_port, self.routing_context or {})
cx.run("CALL dbms.cluster.routing.getRoutingTable({context})",
{"context": self.routing_context}, on_success=metadata.update, on_failure=fail)
else:
log_debug("[#%04X] C: <ROUTING> query={}", cx.local_port)
cx.run("CALL dbms.cluster.routing.getServers", {}, on_success=metadata.update, on_failure=fail)
cx.pull_all(on_success=metadata.update, on_records=records.extend)
cx.sync()
routing_info = [dict(zip(metadata.get("fields", ()), values)) for values in records]
log_debug("[#%04X] S: <ROUTING> info=%r", cx.local_port, routing_info)
return routing_info
except RoutingProtocolError as error:
raise ServiceUnavailable(*error.args)
except ServiceUnavailable:
self.deactivate(address)
return None
|
python
|
def fetch_routing_info(self, address):
""" Fetch raw routing info from a given router address.
:param address: router address
:return: list of routing records or
None if no connection could be established
:raise ServiceUnavailable: if the server does not support routing or
if routing support is broken
"""
metadata = {}
records = []
def fail(md):
if md.get("code") == "Neo.ClientError.Procedure.ProcedureNotFound":
raise RoutingProtocolError("Server {!r} does not support routing".format(address))
else:
raise RoutingProtocolError("Routing support broken on server {!r}".format(address))
try:
with self.acquire_direct(address) as cx:
_, _, server_version = (cx.server.agent or "").partition("/")
# TODO 2.0: remove old routing procedure
if server_version and Version.parse(server_version) >= Version((3, 2)):
log_debug("[#%04X] C: <ROUTING> query=%r", cx.local_port, self.routing_context or {})
cx.run("CALL dbms.cluster.routing.getRoutingTable({context})",
{"context": self.routing_context}, on_success=metadata.update, on_failure=fail)
else:
log_debug("[#%04X] C: <ROUTING> query={}", cx.local_port)
cx.run("CALL dbms.cluster.routing.getServers", {}, on_success=metadata.update, on_failure=fail)
cx.pull_all(on_success=metadata.update, on_records=records.extend)
cx.sync()
routing_info = [dict(zip(metadata.get("fields", ()), values)) for values in records]
log_debug("[#%04X] S: <ROUTING> info=%r", cx.local_port, routing_info)
return routing_info
except RoutingProtocolError as error:
raise ServiceUnavailable(*error.args)
except ServiceUnavailable:
self.deactivate(address)
return None
|
[
"def",
"fetch_routing_info",
"(",
"self",
",",
"address",
")",
":",
"metadata",
"=",
"{",
"}",
"records",
"=",
"[",
"]",
"def",
"fail",
"(",
"md",
")",
":",
"if",
"md",
".",
"get",
"(",
"\"code\"",
")",
"==",
"\"Neo.ClientError.Procedure.ProcedureNotFound\"",
":",
"raise",
"RoutingProtocolError",
"(",
"\"Server {!r} does not support routing\"",
".",
"format",
"(",
"address",
")",
")",
"else",
":",
"raise",
"RoutingProtocolError",
"(",
"\"Routing support broken on server {!r}\"",
".",
"format",
"(",
"address",
")",
")",
"try",
":",
"with",
"self",
".",
"acquire_direct",
"(",
"address",
")",
"as",
"cx",
":",
"_",
",",
"_",
",",
"server_version",
"=",
"(",
"cx",
".",
"server",
".",
"agent",
"or",
"\"\"",
")",
".",
"partition",
"(",
"\"/\"",
")",
"# TODO 2.0: remove old routing procedure",
"if",
"server_version",
"and",
"Version",
".",
"parse",
"(",
"server_version",
")",
">=",
"Version",
"(",
"(",
"3",
",",
"2",
")",
")",
":",
"log_debug",
"(",
"\"[#%04X] C: <ROUTING> query=%r\"",
",",
"cx",
".",
"local_port",
",",
"self",
".",
"routing_context",
"or",
"{",
"}",
")",
"cx",
".",
"run",
"(",
"\"CALL dbms.cluster.routing.getRoutingTable({context})\"",
",",
"{",
"\"context\"",
":",
"self",
".",
"routing_context",
"}",
",",
"on_success",
"=",
"metadata",
".",
"update",
",",
"on_failure",
"=",
"fail",
")",
"else",
":",
"log_debug",
"(",
"\"[#%04X] C: <ROUTING> query={}\"",
",",
"cx",
".",
"local_port",
")",
"cx",
".",
"run",
"(",
"\"CALL dbms.cluster.routing.getServers\"",
",",
"{",
"}",
",",
"on_success",
"=",
"metadata",
".",
"update",
",",
"on_failure",
"=",
"fail",
")",
"cx",
".",
"pull_all",
"(",
"on_success",
"=",
"metadata",
".",
"update",
",",
"on_records",
"=",
"records",
".",
"extend",
")",
"cx",
".",
"sync",
"(",
")",
"routing_info",
"=",
"[",
"dict",
"(",
"zip",
"(",
"metadata",
".",
"get",
"(",
"\"fields\"",
",",
"(",
")",
")",
",",
"values",
")",
")",
"for",
"values",
"in",
"records",
"]",
"log_debug",
"(",
"\"[#%04X] S: <ROUTING> info=%r\"",
",",
"cx",
".",
"local_port",
",",
"routing_info",
")",
"return",
"routing_info",
"except",
"RoutingProtocolError",
"as",
"error",
":",
"raise",
"ServiceUnavailable",
"(",
"*",
"error",
".",
"args",
")",
"except",
"ServiceUnavailable",
":",
"self",
".",
"deactivate",
"(",
"address",
")",
"return",
"None"
] |
Fetch raw routing info from a given router address.
:param address: router address
:return: list of routing records or
None if no connection could be established
:raise ServiceUnavailable: if the server does not support routing or
if routing support is broken
|
[
"Fetch",
"raw",
"routing",
"info",
"from",
"a",
"given",
"router",
"address",
"."
] |
724569d76e85777c4f5e30e8d0a18116bda4d8cd
|
https://github.com/neo4j-drivers/neobolt/blob/724569d76e85777c4f5e30e8d0a18116bda4d8cd/neobolt/impl/python/routing.py#L222-L260
|
train
|
neo4j-drivers/neobolt
|
neobolt/impl/python/routing.py
|
RoutingConnectionPool.fetch_routing_table
|
def fetch_routing_table(self, address):
""" Fetch a routing table from a given router address.
:param address: router address
:return: a new RoutingTable instance or None if the given router is
currently unable to provide routing information
:raise ServiceUnavailable: if no writers are available
:raise ProtocolError: if the routing information received is unusable
"""
new_routing_info = self.fetch_routing_info(address)
if new_routing_info is None:
return None
# Parse routing info and count the number of each type of server
new_routing_table = RoutingTable.parse_routing_info(new_routing_info)
num_routers = len(new_routing_table.routers)
num_readers = len(new_routing_table.readers)
num_writers = len(new_routing_table.writers)
# No writers are available. This likely indicates a temporary state,
# such as leader switching, so we should not signal an error.
# When no writers available, then we flag we are reading in absence of writer
self.missing_writer = (num_writers == 0)
# No routers
if num_routers == 0:
raise RoutingProtocolError("No routing servers returned from server %r" % (address,))
# No readers
if num_readers == 0:
raise RoutingProtocolError("No read servers returned from server %r" % (address,))
# At least one of each is fine, so return this table
return new_routing_table
|
python
|
def fetch_routing_table(self, address):
""" Fetch a routing table from a given router address.
:param address: router address
:return: a new RoutingTable instance or None if the given router is
currently unable to provide routing information
:raise ServiceUnavailable: if no writers are available
:raise ProtocolError: if the routing information received is unusable
"""
new_routing_info = self.fetch_routing_info(address)
if new_routing_info is None:
return None
# Parse routing info and count the number of each type of server
new_routing_table = RoutingTable.parse_routing_info(new_routing_info)
num_routers = len(new_routing_table.routers)
num_readers = len(new_routing_table.readers)
num_writers = len(new_routing_table.writers)
# No writers are available. This likely indicates a temporary state,
# such as leader switching, so we should not signal an error.
# When no writers available, then we flag we are reading in absence of writer
self.missing_writer = (num_writers == 0)
# No routers
if num_routers == 0:
raise RoutingProtocolError("No routing servers returned from server %r" % (address,))
# No readers
if num_readers == 0:
raise RoutingProtocolError("No read servers returned from server %r" % (address,))
# At least one of each is fine, so return this table
return new_routing_table
|
[
"def",
"fetch_routing_table",
"(",
"self",
",",
"address",
")",
":",
"new_routing_info",
"=",
"self",
".",
"fetch_routing_info",
"(",
"address",
")",
"if",
"new_routing_info",
"is",
"None",
":",
"return",
"None",
"# Parse routing info and count the number of each type of server",
"new_routing_table",
"=",
"RoutingTable",
".",
"parse_routing_info",
"(",
"new_routing_info",
")",
"num_routers",
"=",
"len",
"(",
"new_routing_table",
".",
"routers",
")",
"num_readers",
"=",
"len",
"(",
"new_routing_table",
".",
"readers",
")",
"num_writers",
"=",
"len",
"(",
"new_routing_table",
".",
"writers",
")",
"# No writers are available. This likely indicates a temporary state,",
"# such as leader switching, so we should not signal an error.",
"# When no writers available, then we flag we are reading in absence of writer",
"self",
".",
"missing_writer",
"=",
"(",
"num_writers",
"==",
"0",
")",
"# No routers",
"if",
"num_routers",
"==",
"0",
":",
"raise",
"RoutingProtocolError",
"(",
"\"No routing servers returned from server %r\"",
"%",
"(",
"address",
",",
")",
")",
"# No readers",
"if",
"num_readers",
"==",
"0",
":",
"raise",
"RoutingProtocolError",
"(",
"\"No read servers returned from server %r\"",
"%",
"(",
"address",
",",
")",
")",
"# At least one of each is fine, so return this table",
"return",
"new_routing_table"
] |
Fetch a routing table from a given router address.
:param address: router address
:return: a new RoutingTable instance or None if the given router is
currently unable to provide routing information
:raise ServiceUnavailable: if no writers are available
:raise ProtocolError: if the routing information received is unusable
|
[
"Fetch",
"a",
"routing",
"table",
"from",
"a",
"given",
"router",
"address",
"."
] |
724569d76e85777c4f5e30e8d0a18116bda4d8cd
|
https://github.com/neo4j-drivers/neobolt/blob/724569d76e85777c4f5e30e8d0a18116bda4d8cd/neobolt/impl/python/routing.py#L262-L295
|
train
|
neo4j-drivers/neobolt
|
neobolt/impl/python/routing.py
|
RoutingConnectionPool.update_routing_table_from
|
def update_routing_table_from(self, *routers):
""" Try to update routing tables with the given routers.
:return: True if the routing table is successfully updated, otherwise False
"""
for router in routers:
new_routing_table = self.fetch_routing_table(router)
if new_routing_table is not None:
self.routing_table.update(new_routing_table)
return True
return False
|
python
|
def update_routing_table_from(self, *routers):
""" Try to update routing tables with the given routers.
:return: True if the routing table is successfully updated, otherwise False
"""
for router in routers:
new_routing_table = self.fetch_routing_table(router)
if new_routing_table is not None:
self.routing_table.update(new_routing_table)
return True
return False
|
[
"def",
"update_routing_table_from",
"(",
"self",
",",
"*",
"routers",
")",
":",
"for",
"router",
"in",
"routers",
":",
"new_routing_table",
"=",
"self",
".",
"fetch_routing_table",
"(",
"router",
")",
"if",
"new_routing_table",
"is",
"not",
"None",
":",
"self",
".",
"routing_table",
".",
"update",
"(",
"new_routing_table",
")",
"return",
"True",
"return",
"False"
] |
Try to update routing tables with the given routers.
:return: True if the routing table is successfully updated, otherwise False
|
[
"Try",
"to",
"update",
"routing",
"tables",
"with",
"the",
"given",
"routers",
"."
] |
724569d76e85777c4f5e30e8d0a18116bda4d8cd
|
https://github.com/neo4j-drivers/neobolt/blob/724569d76e85777c4f5e30e8d0a18116bda4d8cd/neobolt/impl/python/routing.py#L297-L307
|
train
|
neo4j-drivers/neobolt
|
neobolt/impl/python/routing.py
|
RoutingConnectionPool.update_routing_table
|
def update_routing_table(self):
""" Update the routing table from the first router able to provide
valid routing information.
"""
# copied because it can be modified
existing_routers = list(self.routing_table.routers)
has_tried_initial_routers = False
if self.missing_writer:
has_tried_initial_routers = True
if self.update_routing_table_from(self.initial_address):
return
if self.update_routing_table_from(*existing_routers):
return
if not has_tried_initial_routers and self.initial_address not in existing_routers:
if self.update_routing_table_from(self.initial_address):
return
# None of the routers have been successful, so just fail
raise ServiceUnavailable("Unable to retrieve routing information")
|
python
|
def update_routing_table(self):
""" Update the routing table from the first router able to provide
valid routing information.
"""
# copied because it can be modified
existing_routers = list(self.routing_table.routers)
has_tried_initial_routers = False
if self.missing_writer:
has_tried_initial_routers = True
if self.update_routing_table_from(self.initial_address):
return
if self.update_routing_table_from(*existing_routers):
return
if not has_tried_initial_routers and self.initial_address not in existing_routers:
if self.update_routing_table_from(self.initial_address):
return
# None of the routers have been successful, so just fail
raise ServiceUnavailable("Unable to retrieve routing information")
|
[
"def",
"update_routing_table",
"(",
"self",
")",
":",
"# copied because it can be modified",
"existing_routers",
"=",
"list",
"(",
"self",
".",
"routing_table",
".",
"routers",
")",
"has_tried_initial_routers",
"=",
"False",
"if",
"self",
".",
"missing_writer",
":",
"has_tried_initial_routers",
"=",
"True",
"if",
"self",
".",
"update_routing_table_from",
"(",
"self",
".",
"initial_address",
")",
":",
"return",
"if",
"self",
".",
"update_routing_table_from",
"(",
"*",
"existing_routers",
")",
":",
"return",
"if",
"not",
"has_tried_initial_routers",
"and",
"self",
".",
"initial_address",
"not",
"in",
"existing_routers",
":",
"if",
"self",
".",
"update_routing_table_from",
"(",
"self",
".",
"initial_address",
")",
":",
"return",
"# None of the routers have been successful, so just fail",
"raise",
"ServiceUnavailable",
"(",
"\"Unable to retrieve routing information\"",
")"
] |
Update the routing table from the first router able to provide
valid routing information.
|
[
"Update",
"the",
"routing",
"table",
"from",
"the",
"first",
"router",
"able",
"to",
"provide",
"valid",
"routing",
"information",
"."
] |
724569d76e85777c4f5e30e8d0a18116bda4d8cd
|
https://github.com/neo4j-drivers/neobolt/blob/724569d76e85777c4f5e30e8d0a18116bda4d8cd/neobolt/impl/python/routing.py#L309-L330
|
train
|
neo4j-drivers/neobolt
|
neobolt/impl/python/routing.py
|
RoutingConnectionPool.ensure_routing_table_is_fresh
|
def ensure_routing_table_is_fresh(self, access_mode):
""" Update the routing table if stale.
This method performs two freshness checks, before and after acquiring
the refresh lock. If the routing table is already fresh on entry, the
method exits immediately; otherwise, the refresh lock is acquired and
the second freshness check that follows determines whether an update
is still required.
This method is thread-safe.
:return: `True` if an update was required, `False` otherwise.
"""
if self.routing_table.is_fresh(access_mode):
return False
with self.refresh_lock:
if self.routing_table.is_fresh(access_mode):
if access_mode == READ_ACCESS:
# if reader is fresh but writers is not fresh, then we are reading in absence of writer
self.missing_writer = not self.routing_table.is_fresh(WRITE_ACCESS)
return False
self.update_routing_table()
self.update_connection_pool()
return True
|
python
|
def ensure_routing_table_is_fresh(self, access_mode):
""" Update the routing table if stale.
This method performs two freshness checks, before and after acquiring
the refresh lock. If the routing table is already fresh on entry, the
method exits immediately; otherwise, the refresh lock is acquired and
the second freshness check that follows determines whether an update
is still required.
This method is thread-safe.
:return: `True` if an update was required, `False` otherwise.
"""
if self.routing_table.is_fresh(access_mode):
return False
with self.refresh_lock:
if self.routing_table.is_fresh(access_mode):
if access_mode == READ_ACCESS:
# if reader is fresh but writers is not fresh, then we are reading in absence of writer
self.missing_writer = not self.routing_table.is_fresh(WRITE_ACCESS)
return False
self.update_routing_table()
self.update_connection_pool()
return True
|
[
"def",
"ensure_routing_table_is_fresh",
"(",
"self",
",",
"access_mode",
")",
":",
"if",
"self",
".",
"routing_table",
".",
"is_fresh",
"(",
"access_mode",
")",
":",
"return",
"False",
"with",
"self",
".",
"refresh_lock",
":",
"if",
"self",
".",
"routing_table",
".",
"is_fresh",
"(",
"access_mode",
")",
":",
"if",
"access_mode",
"==",
"READ_ACCESS",
":",
"# if reader is fresh but writers is not fresh, then we are reading in absence of writer",
"self",
".",
"missing_writer",
"=",
"not",
"self",
".",
"routing_table",
".",
"is_fresh",
"(",
"WRITE_ACCESS",
")",
"return",
"False",
"self",
".",
"update_routing_table",
"(",
")",
"self",
".",
"update_connection_pool",
"(",
")",
"return",
"True"
] |
Update the routing table if stale.
This method performs two freshness checks, before and after acquiring
the refresh lock. If the routing table is already fresh on entry, the
method exits immediately; otherwise, the refresh lock is acquired and
the second freshness check that follows determines whether an update
is still required.
This method is thread-safe.
:return: `True` if an update was required, `False` otherwise.
|
[
"Update",
"the",
"routing",
"table",
"if",
"stale",
"."
] |
724569d76e85777c4f5e30e8d0a18116bda4d8cd
|
https://github.com/neo4j-drivers/neobolt/blob/724569d76e85777c4f5e30e8d0a18116bda4d8cd/neobolt/impl/python/routing.py#L338-L361
|
train
|
neo4j-drivers/neobolt
|
neobolt/impl/python/routing.py
|
RoutingConnectionPool.deactivate
|
def deactivate(self, address):
""" Deactivate an address from the connection pool,
if present, remove from the routing table and also closing
all idle connections to that address.
"""
log_debug("[#0000] C: <ROUTING> Deactivating address %r", address)
# We use `discard` instead of `remove` here since the former
# will not fail if the address has already been removed.
self.routing_table.routers.discard(address)
self.routing_table.readers.discard(address)
self.routing_table.writers.discard(address)
log_debug("[#0000] C: <ROUTING> table=%r", self.routing_table)
super(RoutingConnectionPool, self).deactivate(address)
|
python
|
def deactivate(self, address):
""" Deactivate an address from the connection pool,
if present, remove from the routing table and also closing
all idle connections to that address.
"""
log_debug("[#0000] C: <ROUTING> Deactivating address %r", address)
# We use `discard` instead of `remove` here since the former
# will not fail if the address has already been removed.
self.routing_table.routers.discard(address)
self.routing_table.readers.discard(address)
self.routing_table.writers.discard(address)
log_debug("[#0000] C: <ROUTING> table=%r", self.routing_table)
super(RoutingConnectionPool, self).deactivate(address)
|
[
"def",
"deactivate",
"(",
"self",
",",
"address",
")",
":",
"log_debug",
"(",
"\"[#0000] C: <ROUTING> Deactivating address %r\"",
",",
"address",
")",
"# We use `discard` instead of `remove` here since the former",
"# will not fail if the address has already been removed.",
"self",
".",
"routing_table",
".",
"routers",
".",
"discard",
"(",
"address",
")",
"self",
".",
"routing_table",
".",
"readers",
".",
"discard",
"(",
"address",
")",
"self",
".",
"routing_table",
".",
"writers",
".",
"discard",
"(",
"address",
")",
"log_debug",
"(",
"\"[#0000] C: <ROUTING> table=%r\"",
",",
"self",
".",
"routing_table",
")",
"super",
"(",
"RoutingConnectionPool",
",",
"self",
")",
".",
"deactivate",
"(",
"address",
")"
] |
Deactivate an address from the connection pool,
if present, remove from the routing table and also closing
all idle connections to that address.
|
[
"Deactivate",
"an",
"address",
"from",
"the",
"connection",
"pool",
"if",
"present",
"remove",
"from",
"the",
"routing",
"table",
"and",
"also",
"closing",
"all",
"idle",
"connections",
"to",
"that",
"address",
"."
] |
724569d76e85777c4f5e30e8d0a18116bda4d8cd
|
https://github.com/neo4j-drivers/neobolt/blob/724569d76e85777c4f5e30e8d0a18116bda4d8cd/neobolt/impl/python/routing.py#L389-L401
|
train
|
neo4j-drivers/neobolt
|
neobolt/impl/python/routing.py
|
RoutingConnectionPool.remove_writer
|
def remove_writer(self, address):
""" Remove a writer address from the routing table, if present.
"""
log_debug("[#0000] C: <ROUTING> Removing writer %r", address)
self.routing_table.writers.discard(address)
log_debug("[#0000] C: <ROUTING> table=%r", self.routing_table)
|
python
|
def remove_writer(self, address):
""" Remove a writer address from the routing table, if present.
"""
log_debug("[#0000] C: <ROUTING> Removing writer %r", address)
self.routing_table.writers.discard(address)
log_debug("[#0000] C: <ROUTING> table=%r", self.routing_table)
|
[
"def",
"remove_writer",
"(",
"self",
",",
"address",
")",
":",
"log_debug",
"(",
"\"[#0000] C: <ROUTING> Removing writer %r\"",
",",
"address",
")",
"self",
".",
"routing_table",
".",
"writers",
".",
"discard",
"(",
"address",
")",
"log_debug",
"(",
"\"[#0000] C: <ROUTING> table=%r\"",
",",
"self",
".",
"routing_table",
")"
] |
Remove a writer address from the routing table, if present.
|
[
"Remove",
"a",
"writer",
"address",
"from",
"the",
"routing",
"table",
"if",
"present",
"."
] |
724569d76e85777c4f5e30e8d0a18116bda4d8cd
|
https://github.com/neo4j-drivers/neobolt/blob/724569d76e85777c4f5e30e8d0a18116bda4d8cd/neobolt/impl/python/routing.py#L403-L408
|
train
|
neo4j-drivers/neobolt
|
neobolt/impl/python/routing.py
|
RoutingConnectionPool.handle
|
def handle(self, error, connection):
""" Handle any cleanup or similar activity related to an error
occurring on a pooled connection.
"""
error_class = error.__class__
if error_class in (ConnectionExpired, ServiceUnavailable, DatabaseUnavailableError):
self.deactivate(connection.address)
elif error_class in (NotALeaderError, ForbiddenOnReadOnlyDatabaseError):
self.remove_writer(connection.address)
|
python
|
def handle(self, error, connection):
""" Handle any cleanup or similar activity related to an error
occurring on a pooled connection.
"""
error_class = error.__class__
if error_class in (ConnectionExpired, ServiceUnavailable, DatabaseUnavailableError):
self.deactivate(connection.address)
elif error_class in (NotALeaderError, ForbiddenOnReadOnlyDatabaseError):
self.remove_writer(connection.address)
|
[
"def",
"handle",
"(",
"self",
",",
"error",
",",
"connection",
")",
":",
"error_class",
"=",
"error",
".",
"__class__",
"if",
"error_class",
"in",
"(",
"ConnectionExpired",
",",
"ServiceUnavailable",
",",
"DatabaseUnavailableError",
")",
":",
"self",
".",
"deactivate",
"(",
"connection",
".",
"address",
")",
"elif",
"error_class",
"in",
"(",
"NotALeaderError",
",",
"ForbiddenOnReadOnlyDatabaseError",
")",
":",
"self",
".",
"remove_writer",
"(",
"connection",
".",
"address",
")"
] |
Handle any cleanup or similar activity related to an error
occurring on a pooled connection.
|
[
"Handle",
"any",
"cleanup",
"or",
"similar",
"activity",
"related",
"to",
"an",
"error",
"occurring",
"on",
"a",
"pooled",
"connection",
"."
] |
724569d76e85777c4f5e30e8d0a18116bda4d8cd
|
https://github.com/neo4j-drivers/neobolt/blob/724569d76e85777c4f5e30e8d0a18116bda4d8cd/neobolt/impl/python/routing.py#L410-L418
|
train
|
neo4j-drivers/neobolt
|
neobolt/types/spatial.py
|
point_type
|
def point_type(name, fields, srid_map):
""" Dynamically create a Point subclass.
"""
def srid(self):
try:
return srid_map[len(self)]
except KeyError:
return None
attributes = {"srid": property(srid)}
for index, subclass_field in enumerate(fields):
def accessor(self, i=index, f=subclass_field):
try:
return self[i]
except IndexError:
raise AttributeError(f)
for field_alias in {subclass_field, "xyz"[index]}:
attributes[field_alias] = property(accessor)
cls = type(name, (Point,), attributes)
with __srid_table_lock:
for dim, srid in srid_map.items():
__srid_table[srid] = (cls, dim)
return cls
|
python
|
def point_type(name, fields, srid_map):
""" Dynamically create a Point subclass.
"""
def srid(self):
try:
return srid_map[len(self)]
except KeyError:
return None
attributes = {"srid": property(srid)}
for index, subclass_field in enumerate(fields):
def accessor(self, i=index, f=subclass_field):
try:
return self[i]
except IndexError:
raise AttributeError(f)
for field_alias in {subclass_field, "xyz"[index]}:
attributes[field_alias] = property(accessor)
cls = type(name, (Point,), attributes)
with __srid_table_lock:
for dim, srid in srid_map.items():
__srid_table[srid] = (cls, dim)
return cls
|
[
"def",
"point_type",
"(",
"name",
",",
"fields",
",",
"srid_map",
")",
":",
"def",
"srid",
"(",
"self",
")",
":",
"try",
":",
"return",
"srid_map",
"[",
"len",
"(",
"self",
")",
"]",
"except",
"KeyError",
":",
"return",
"None",
"attributes",
"=",
"{",
"\"srid\"",
":",
"property",
"(",
"srid",
")",
"}",
"for",
"index",
",",
"subclass_field",
"in",
"enumerate",
"(",
"fields",
")",
":",
"def",
"accessor",
"(",
"self",
",",
"i",
"=",
"index",
",",
"f",
"=",
"subclass_field",
")",
":",
"try",
":",
"return",
"self",
"[",
"i",
"]",
"except",
"IndexError",
":",
"raise",
"AttributeError",
"(",
"f",
")",
"for",
"field_alias",
"in",
"{",
"subclass_field",
",",
"\"xyz\"",
"[",
"index",
"]",
"}",
":",
"attributes",
"[",
"field_alias",
"]",
"=",
"property",
"(",
"accessor",
")",
"cls",
"=",
"type",
"(",
"name",
",",
"(",
"Point",
",",
")",
",",
"attributes",
")",
"with",
"__srid_table_lock",
":",
"for",
"dim",
",",
"srid",
"in",
"srid_map",
".",
"items",
"(",
")",
":",
"__srid_table",
"[",
"srid",
"]",
"=",
"(",
"cls",
",",
"dim",
")",
"return",
"cls"
] |
Dynamically create a Point subclass.
|
[
"Dynamically",
"create",
"a",
"Point",
"subclass",
"."
] |
724569d76e85777c4f5e30e8d0a18116bda4d8cd
|
https://github.com/neo4j-drivers/neobolt/blob/724569d76e85777c4f5e30e8d0a18116bda4d8cd/neobolt/types/spatial.py#L72-L101
|
train
|
praw-dev/prawcore
|
examples/read_only_auth_trophies.py
|
main
|
def main():
"""Provide the program's entry point when directly executed."""
if len(sys.argv) != 2:
print("Usage: {} USERNAME".format(sys.argv[0]))
return 1
authenticator = prawcore.TrustedAuthenticator(
prawcore.Requestor("prawcore_read_only_example"),
os.environ["PRAWCORE_CLIENT_ID"],
os.environ["PRAWCORE_CLIENT_SECRET"],
)
authorizer = prawcore.ReadOnlyAuthorizer(authenticator)
authorizer.refresh()
user = sys.argv[1]
with prawcore.session(authorizer) as session:
data = session.request("GET", "/api/v1/user/{}/trophies".format(user))
for trophy in data["data"]["trophies"]:
description = trophy["data"]["description"]
print(
trophy["data"]["name"]
+ (" ({})".format(description) if description else "")
)
return 0
|
python
|
def main():
"""Provide the program's entry point when directly executed."""
if len(sys.argv) != 2:
print("Usage: {} USERNAME".format(sys.argv[0]))
return 1
authenticator = prawcore.TrustedAuthenticator(
prawcore.Requestor("prawcore_read_only_example"),
os.environ["PRAWCORE_CLIENT_ID"],
os.environ["PRAWCORE_CLIENT_SECRET"],
)
authorizer = prawcore.ReadOnlyAuthorizer(authenticator)
authorizer.refresh()
user = sys.argv[1]
with prawcore.session(authorizer) as session:
data = session.request("GET", "/api/v1/user/{}/trophies".format(user))
for trophy in data["data"]["trophies"]:
description = trophy["data"]["description"]
print(
trophy["data"]["name"]
+ (" ({})".format(description) if description else "")
)
return 0
|
[
"def",
"main",
"(",
")",
":",
"if",
"len",
"(",
"sys",
".",
"argv",
")",
"!=",
"2",
":",
"print",
"(",
"\"Usage: {} USERNAME\"",
".",
"format",
"(",
"sys",
".",
"argv",
"[",
"0",
"]",
")",
")",
"return",
"1",
"authenticator",
"=",
"prawcore",
".",
"TrustedAuthenticator",
"(",
"prawcore",
".",
"Requestor",
"(",
"\"prawcore_read_only_example\"",
")",
",",
"os",
".",
"environ",
"[",
"\"PRAWCORE_CLIENT_ID\"",
"]",
",",
"os",
".",
"environ",
"[",
"\"PRAWCORE_CLIENT_SECRET\"",
"]",
",",
")",
"authorizer",
"=",
"prawcore",
".",
"ReadOnlyAuthorizer",
"(",
"authenticator",
")",
"authorizer",
".",
"refresh",
"(",
")",
"user",
"=",
"sys",
".",
"argv",
"[",
"1",
"]",
"with",
"prawcore",
".",
"session",
"(",
"authorizer",
")",
"as",
"session",
":",
"data",
"=",
"session",
".",
"request",
"(",
"\"GET\"",
",",
"\"/api/v1/user/{}/trophies\"",
".",
"format",
"(",
"user",
")",
")",
"for",
"trophy",
"in",
"data",
"[",
"\"data\"",
"]",
"[",
"\"trophies\"",
"]",
":",
"description",
"=",
"trophy",
"[",
"\"data\"",
"]",
"[",
"\"description\"",
"]",
"print",
"(",
"trophy",
"[",
"\"data\"",
"]",
"[",
"\"name\"",
"]",
"+",
"(",
"\" ({})\"",
".",
"format",
"(",
"description",
")",
"if",
"description",
"else",
"\"\"",
")",
")",
"return",
"0"
] |
Provide the program's entry point when directly executed.
|
[
"Provide",
"the",
"program",
"s",
"entry",
"point",
"when",
"directly",
"executed",
"."
] |
b16ae88a1f2bf98095ed6fe64851cb7add7ed752
|
https://github.com/praw-dev/prawcore/blob/b16ae88a1f2bf98095ed6fe64851cb7add7ed752/examples/read_only_auth_trophies.py#L14-L39
|
train
|
chaoss/grimoirelab-sigils
|
src/migration/to_kibana5.py
|
main
|
def main():
"""Read a directory containing json files for Kibana panels,
beautify them and replace size value in aggregations as specified
through corresponding params params.
"""
args = parse_args()
configure_logging(args.debug)
src_path = args.src_path
dest_path = args.dest_path
old_str1 = '\\"size\\":' + args.old_size
old_str2 = '\\"size\\": ' + args.old_size
new_str = '\\"size\\":' + args.new_size
logging.info('Input path: %s', src_path)
logging.info('Output path: %s', dest_path)
logging.info('old str: %s', old_str1)
logging.info('old str: %s', old_str2)
logging.info('new str: %s', new_str)
if os.path.abspath(src_path) == os.path.abspath(dest_path):
logging.error('source and destination directiories must be different')
sys.exit(1)
# Iterate over input files
json_files = [f for f in os.listdir(src_path) if f.endswith('.json')]
for filename in json_files:
in_file_path = os.path.join(src_path, filename)
in_file_path = os.path.join(src_path, filename)
out_file_path = os.path.join(dest_path, filename)
logging.info('INPUT FILE: %s',in_file_path)
logging.info('OUTPUT FILE: %s',out_file_path)
# First beautify input
pretty = utils.beautify(filename=in_file_path)
# Iterate the beautified json string line by line
pretty_replaced = utils.replace(pretty, old_str1, new_str)
pretty_replaced = utils.replace(pretty_replaced, old_str2, new_str)
with open(out_file_path, 'w') as output_file:
output_file.write(pretty_replaced)
logging.info('This is the end.')
|
python
|
def main():
"""Read a directory containing json files for Kibana panels,
beautify them and replace size value in aggregations as specified
through corresponding params params.
"""
args = parse_args()
configure_logging(args.debug)
src_path = args.src_path
dest_path = args.dest_path
old_str1 = '\\"size\\":' + args.old_size
old_str2 = '\\"size\\": ' + args.old_size
new_str = '\\"size\\":' + args.new_size
logging.info('Input path: %s', src_path)
logging.info('Output path: %s', dest_path)
logging.info('old str: %s', old_str1)
logging.info('old str: %s', old_str2)
logging.info('new str: %s', new_str)
if os.path.abspath(src_path) == os.path.abspath(dest_path):
logging.error('source and destination directiories must be different')
sys.exit(1)
# Iterate over input files
json_files = [f for f in os.listdir(src_path) if f.endswith('.json')]
for filename in json_files:
in_file_path = os.path.join(src_path, filename)
in_file_path = os.path.join(src_path, filename)
out_file_path = os.path.join(dest_path, filename)
logging.info('INPUT FILE: %s',in_file_path)
logging.info('OUTPUT FILE: %s',out_file_path)
# First beautify input
pretty = utils.beautify(filename=in_file_path)
# Iterate the beautified json string line by line
pretty_replaced = utils.replace(pretty, old_str1, new_str)
pretty_replaced = utils.replace(pretty_replaced, old_str2, new_str)
with open(out_file_path, 'w') as output_file:
output_file.write(pretty_replaced)
logging.info('This is the end.')
|
[
"def",
"main",
"(",
")",
":",
"args",
"=",
"parse_args",
"(",
")",
"configure_logging",
"(",
"args",
".",
"debug",
")",
"src_path",
"=",
"args",
".",
"src_path",
"dest_path",
"=",
"args",
".",
"dest_path",
"old_str1",
"=",
"'\\\\\"size\\\\\":'",
"+",
"args",
".",
"old_size",
"old_str2",
"=",
"'\\\\\"size\\\\\": '",
"+",
"args",
".",
"old_size",
"new_str",
"=",
"'\\\\\"size\\\\\":'",
"+",
"args",
".",
"new_size",
"logging",
".",
"info",
"(",
"'Input path: %s'",
",",
"src_path",
")",
"logging",
".",
"info",
"(",
"'Output path: %s'",
",",
"dest_path",
")",
"logging",
".",
"info",
"(",
"'old str: %s'",
",",
"old_str1",
")",
"logging",
".",
"info",
"(",
"'old str: %s'",
",",
"old_str2",
")",
"logging",
".",
"info",
"(",
"'new str: %s'",
",",
"new_str",
")",
"if",
"os",
".",
"path",
".",
"abspath",
"(",
"src_path",
")",
"==",
"os",
".",
"path",
".",
"abspath",
"(",
"dest_path",
")",
":",
"logging",
".",
"error",
"(",
"'source and destination directiories must be different'",
")",
"sys",
".",
"exit",
"(",
"1",
")",
"# Iterate over input files",
"json_files",
"=",
"[",
"f",
"for",
"f",
"in",
"os",
".",
"listdir",
"(",
"src_path",
")",
"if",
"f",
".",
"endswith",
"(",
"'.json'",
")",
"]",
"for",
"filename",
"in",
"json_files",
":",
"in_file_path",
"=",
"os",
".",
"path",
".",
"join",
"(",
"src_path",
",",
"filename",
")",
"in_file_path",
"=",
"os",
".",
"path",
".",
"join",
"(",
"src_path",
",",
"filename",
")",
"out_file_path",
"=",
"os",
".",
"path",
".",
"join",
"(",
"dest_path",
",",
"filename",
")",
"logging",
".",
"info",
"(",
"'INPUT FILE: %s'",
",",
"in_file_path",
")",
"logging",
".",
"info",
"(",
"'OUTPUT FILE: %s'",
",",
"out_file_path",
")",
"# First beautify input",
"pretty",
"=",
"utils",
".",
"beautify",
"(",
"filename",
"=",
"in_file_path",
")",
"# Iterate the beautified json string line by line",
"pretty_replaced",
"=",
"utils",
".",
"replace",
"(",
"pretty",
",",
"old_str1",
",",
"new_str",
")",
"pretty_replaced",
"=",
"utils",
".",
"replace",
"(",
"pretty_replaced",
",",
"old_str2",
",",
"new_str",
")",
"with",
"open",
"(",
"out_file_path",
",",
"'w'",
")",
"as",
"output_file",
":",
"output_file",
".",
"write",
"(",
"pretty_replaced",
")",
"logging",
".",
"info",
"(",
"'This is the end.'",
")"
] |
Read a directory containing json files for Kibana panels,
beautify them and replace size value in aggregations as specified
through corresponding params params.
|
[
"Read",
"a",
"directory",
"containing",
"json",
"files",
"for",
"Kibana",
"panels",
"beautify",
"them",
"and",
"replace",
"size",
"value",
"in",
"aggregations",
"as",
"specified",
"through",
"corresponding",
"params",
"params",
"."
] |
33d395195acb316287143a535a2c6e4009bf0528
|
https://github.com/chaoss/grimoirelab-sigils/blob/33d395195acb316287143a535a2c6e4009bf0528/src/migration/to_kibana5.py#L40-L84
|
train
|
chaoss/grimoirelab-sigils
|
src/migration/to_kibana5.py
|
parse_args
|
def parse_args():
"""Parse arguments from the command line"""
parser = argparse.ArgumentParser(description=TO_KIBANA5_DESC_MSG)
parser.add_argument('-s', '--source', dest='src_path', \
required=True, help='source directory')
parser.add_argument('-d', '--dest', dest='dest_path', \
required=True, help='destination directory')
parser.add_argument('-o', '--old-size', dest='old_size', \
default='0', help='aggregation old size')
parser.add_argument('-n', '--new-size', dest='new_size', \
default='1000', help='aggregation new size')
parser.add_argument('-g', '--debug', dest='debug',
action='store_true')
return parser.parse_args()
|
python
|
def parse_args():
"""Parse arguments from the command line"""
parser = argparse.ArgumentParser(description=TO_KIBANA5_DESC_MSG)
parser.add_argument('-s', '--source', dest='src_path', \
required=True, help='source directory')
parser.add_argument('-d', '--dest', dest='dest_path', \
required=True, help='destination directory')
parser.add_argument('-o', '--old-size', dest='old_size', \
default='0', help='aggregation old size')
parser.add_argument('-n', '--new-size', dest='new_size', \
default='1000', help='aggregation new size')
parser.add_argument('-g', '--debug', dest='debug',
action='store_true')
return parser.parse_args()
|
[
"def",
"parse_args",
"(",
")",
":",
"parser",
"=",
"argparse",
".",
"ArgumentParser",
"(",
"description",
"=",
"TO_KIBANA5_DESC_MSG",
")",
"parser",
".",
"add_argument",
"(",
"'-s'",
",",
"'--source'",
",",
"dest",
"=",
"'src_path'",
",",
"required",
"=",
"True",
",",
"help",
"=",
"'source directory'",
")",
"parser",
".",
"add_argument",
"(",
"'-d'",
",",
"'--dest'",
",",
"dest",
"=",
"'dest_path'",
",",
"required",
"=",
"True",
",",
"help",
"=",
"'destination directory'",
")",
"parser",
".",
"add_argument",
"(",
"'-o'",
",",
"'--old-size'",
",",
"dest",
"=",
"'old_size'",
",",
"default",
"=",
"'0'",
",",
"help",
"=",
"'aggregation old size'",
")",
"parser",
".",
"add_argument",
"(",
"'-n'",
",",
"'--new-size'",
",",
"dest",
"=",
"'new_size'",
",",
"default",
"=",
"'1000'",
",",
"help",
"=",
"'aggregation new size'",
")",
"parser",
".",
"add_argument",
"(",
"'-g'",
",",
"'--debug'",
",",
"dest",
"=",
"'debug'",
",",
"action",
"=",
"'store_true'",
")",
"return",
"parser",
".",
"parse_args",
"(",
")"
] |
Parse arguments from the command line
|
[
"Parse",
"arguments",
"from",
"the",
"command",
"line"
] |
33d395195acb316287143a535a2c6e4009bf0528
|
https://github.com/chaoss/grimoirelab-sigils/blob/33d395195acb316287143a535a2c6e4009bf0528/src/migration/to_kibana5.py#L86-L104
|
train
|
chaoss/grimoirelab-sigils
|
src/migration/to_kibana5.py
|
configure_logging
|
def configure_logging(debug=False):
"""Configure logging
The function configures log messages. By default, log messages
are sent to stderr. Set the parameter `debug` to activate the
debug mode.
:param debug: set the debug mode
"""
if not debug:
logging.basicConfig(level=logging.INFO,
format=LOG_FORMAT)
else:
logging.basicConfig(level=logging.DEBUG,
format=DEBUG_LOG_FORMAT)
|
python
|
def configure_logging(debug=False):
"""Configure logging
The function configures log messages. By default, log messages
are sent to stderr. Set the parameter `debug` to activate the
debug mode.
:param debug: set the debug mode
"""
if not debug:
logging.basicConfig(level=logging.INFO,
format=LOG_FORMAT)
else:
logging.basicConfig(level=logging.DEBUG,
format=DEBUG_LOG_FORMAT)
|
[
"def",
"configure_logging",
"(",
"debug",
"=",
"False",
")",
":",
"if",
"not",
"debug",
":",
"logging",
".",
"basicConfig",
"(",
"level",
"=",
"logging",
".",
"INFO",
",",
"format",
"=",
"LOG_FORMAT",
")",
"else",
":",
"logging",
".",
"basicConfig",
"(",
"level",
"=",
"logging",
".",
"DEBUG",
",",
"format",
"=",
"DEBUG_LOG_FORMAT",
")"
] |
Configure logging
The function configures log messages. By default, log messages
are sent to stderr. Set the parameter `debug` to activate the
debug mode.
:param debug: set the debug mode
|
[
"Configure",
"logging",
"The",
"function",
"configures",
"log",
"messages",
".",
"By",
"default",
"log",
"messages",
"are",
"sent",
"to",
"stderr",
".",
"Set",
"the",
"parameter",
"debug",
"to",
"activate",
"the",
"debug",
"mode",
".",
":",
"param",
"debug",
":",
"set",
"the",
"debug",
"mode"
] |
33d395195acb316287143a535a2c6e4009bf0528
|
https://github.com/chaoss/grimoirelab-sigils/blob/33d395195acb316287143a535a2c6e4009bf0528/src/migration/to_kibana5.py#L107-L119
|
train
|
MartinThoma/memtop
|
memtop/__init__.py
|
signal_handler
|
def signal_handler(signal_name, frame):
"""Quit signal handler."""
sys.stdout.flush()
print("\nSIGINT in frame signal received. Quitting...")
sys.stdout.flush()
sys.exit(0)
|
python
|
def signal_handler(signal_name, frame):
"""Quit signal handler."""
sys.stdout.flush()
print("\nSIGINT in frame signal received. Quitting...")
sys.stdout.flush()
sys.exit(0)
|
[
"def",
"signal_handler",
"(",
"signal_name",
",",
"frame",
")",
":",
"sys",
".",
"stdout",
".",
"flush",
"(",
")",
"print",
"(",
"\"\\nSIGINT in frame signal received. Quitting...\"",
")",
"sys",
".",
"stdout",
".",
"flush",
"(",
")",
"sys",
".",
"exit",
"(",
"0",
")"
] |
Quit signal handler.
|
[
"Quit",
"signal",
"handler",
"."
] |
504d251f1951922db84883c2e660ba7e754d1546
|
https://github.com/MartinThoma/memtop/blob/504d251f1951922db84883c2e660ba7e754d1546/memtop/__init__.py#L68-L73
|
train
|
MartinThoma/memtop
|
memtop/__init__.py
|
graph_format
|
def graph_format(new_mem, old_mem, is_firstiteration=True):
"""Show changes graphically in memory consumption"""
if is_firstiteration:
output = " n/a "
elif new_mem - old_mem > 50000000:
output = " +++++"
elif new_mem - old_mem > 20000000:
output = " ++++ "
elif new_mem - old_mem > 5000000:
output = " +++ "
elif new_mem - old_mem > 1000000:
output = " ++ "
elif new_mem - old_mem > 50000:
output = " + "
elif old_mem - new_mem > 10000000:
output = "--- "
elif old_mem - new_mem > 2000000:
output = " -- "
elif old_mem - new_mem > 100000:
output = " - "
else:
output = " "
return output
|
python
|
def graph_format(new_mem, old_mem, is_firstiteration=True):
"""Show changes graphically in memory consumption"""
if is_firstiteration:
output = " n/a "
elif new_mem - old_mem > 50000000:
output = " +++++"
elif new_mem - old_mem > 20000000:
output = " ++++ "
elif new_mem - old_mem > 5000000:
output = " +++ "
elif new_mem - old_mem > 1000000:
output = " ++ "
elif new_mem - old_mem > 50000:
output = " + "
elif old_mem - new_mem > 10000000:
output = "--- "
elif old_mem - new_mem > 2000000:
output = " -- "
elif old_mem - new_mem > 100000:
output = " - "
else:
output = " "
return output
|
[
"def",
"graph_format",
"(",
"new_mem",
",",
"old_mem",
",",
"is_firstiteration",
"=",
"True",
")",
":",
"if",
"is_firstiteration",
":",
"output",
"=",
"\" n/a \"",
"elif",
"new_mem",
"-",
"old_mem",
">",
"50000000",
":",
"output",
"=",
"\" +++++\"",
"elif",
"new_mem",
"-",
"old_mem",
">",
"20000000",
":",
"output",
"=",
"\" ++++ \"",
"elif",
"new_mem",
"-",
"old_mem",
">",
"5000000",
":",
"output",
"=",
"\" +++ \"",
"elif",
"new_mem",
"-",
"old_mem",
">",
"1000000",
":",
"output",
"=",
"\" ++ \"",
"elif",
"new_mem",
"-",
"old_mem",
">",
"50000",
":",
"output",
"=",
"\" + \"",
"elif",
"old_mem",
"-",
"new_mem",
">",
"10000000",
":",
"output",
"=",
"\"--- \"",
"elif",
"old_mem",
"-",
"new_mem",
">",
"2000000",
":",
"output",
"=",
"\" -- \"",
"elif",
"old_mem",
"-",
"new_mem",
">",
"100000",
":",
"output",
"=",
"\" - \"",
"else",
":",
"output",
"=",
"\" \"",
"return",
"output"
] |
Show changes graphically in memory consumption
|
[
"Show",
"changes",
"graphically",
"in",
"memory",
"consumption"
] |
504d251f1951922db84883c2e660ba7e754d1546
|
https://github.com/MartinThoma/memtop/blob/504d251f1951922db84883c2e660ba7e754d1546/memtop/__init__.py#L93-L115
|
train
|
MartinThoma/memtop
|
memtop/__init__.py
|
get_cur_mem_use
|
def get_cur_mem_use():
"""return utilization of memory"""
# http://lwn.net/Articles/28345/
lines = open("/proc/meminfo", 'r').readlines()
emptySpace = re.compile('[ ]+')
for line in lines:
if "MemTotal" in line:
memtotal = float(emptySpace.split(line)[1])
if "SwapFree" in line:
swapfree = float(emptySpace.split(line)[1])
if "SwapTotal" in line:
swaptotal = float(emptySpace.split(line)[1])
if "MemFree" in line:
memfree = float(emptySpace.split(line)[1])
if "Cached" in line and not "SwapCached" in line:
cached = float(emptySpace.split(line)[1])
ramoccup = 1.0 - (memfree + cached) / memtotal
if swaptotal == 0:
swapoccup = 0
else:
swapoccup = 1.0 - swapfree / swaptotal
strramoccup = str(round(ramoccup * 100.0, 1))
strswapoccup = str(round(swapoccup * 100.0, 1))
return float(memtotal), strramoccup, strswapoccup
|
python
|
def get_cur_mem_use():
"""return utilization of memory"""
# http://lwn.net/Articles/28345/
lines = open("/proc/meminfo", 'r').readlines()
emptySpace = re.compile('[ ]+')
for line in lines:
if "MemTotal" in line:
memtotal = float(emptySpace.split(line)[1])
if "SwapFree" in line:
swapfree = float(emptySpace.split(line)[1])
if "SwapTotal" in line:
swaptotal = float(emptySpace.split(line)[1])
if "MemFree" in line:
memfree = float(emptySpace.split(line)[1])
if "Cached" in line and not "SwapCached" in line:
cached = float(emptySpace.split(line)[1])
ramoccup = 1.0 - (memfree + cached) / memtotal
if swaptotal == 0:
swapoccup = 0
else:
swapoccup = 1.0 - swapfree / swaptotal
strramoccup = str(round(ramoccup * 100.0, 1))
strswapoccup = str(round(swapoccup * 100.0, 1))
return float(memtotal), strramoccup, strswapoccup
|
[
"def",
"get_cur_mem_use",
"(",
")",
":",
"# http://lwn.net/Articles/28345/",
"lines",
"=",
"open",
"(",
"\"/proc/meminfo\"",
",",
"'r'",
")",
".",
"readlines",
"(",
")",
"emptySpace",
"=",
"re",
".",
"compile",
"(",
"'[ ]+'",
")",
"for",
"line",
"in",
"lines",
":",
"if",
"\"MemTotal\"",
"in",
"line",
":",
"memtotal",
"=",
"float",
"(",
"emptySpace",
".",
"split",
"(",
"line",
")",
"[",
"1",
"]",
")",
"if",
"\"SwapFree\"",
"in",
"line",
":",
"swapfree",
"=",
"float",
"(",
"emptySpace",
".",
"split",
"(",
"line",
")",
"[",
"1",
"]",
")",
"if",
"\"SwapTotal\"",
"in",
"line",
":",
"swaptotal",
"=",
"float",
"(",
"emptySpace",
".",
"split",
"(",
"line",
")",
"[",
"1",
"]",
")",
"if",
"\"MemFree\"",
"in",
"line",
":",
"memfree",
"=",
"float",
"(",
"emptySpace",
".",
"split",
"(",
"line",
")",
"[",
"1",
"]",
")",
"if",
"\"Cached\"",
"in",
"line",
"and",
"not",
"\"SwapCached\"",
"in",
"line",
":",
"cached",
"=",
"float",
"(",
"emptySpace",
".",
"split",
"(",
"line",
")",
"[",
"1",
"]",
")",
"ramoccup",
"=",
"1.0",
"-",
"(",
"memfree",
"+",
"cached",
")",
"/",
"memtotal",
"if",
"swaptotal",
"==",
"0",
":",
"swapoccup",
"=",
"0",
"else",
":",
"swapoccup",
"=",
"1.0",
"-",
"swapfree",
"/",
"swaptotal",
"strramoccup",
"=",
"str",
"(",
"round",
"(",
"ramoccup",
"*",
"100.0",
",",
"1",
")",
")",
"strswapoccup",
"=",
"str",
"(",
"round",
"(",
"swapoccup",
"*",
"100.0",
",",
"1",
")",
")",
"return",
"float",
"(",
"memtotal",
")",
",",
"strramoccup",
",",
"strswapoccup"
] |
return utilization of memory
|
[
"return",
"utilization",
"of",
"memory"
] |
504d251f1951922db84883c2e660ba7e754d1546
|
https://github.com/MartinThoma/memtop/blob/504d251f1951922db84883c2e660ba7e754d1546/memtop/__init__.py#L118-L144
|
train
|
MartinThoma/memtop
|
memtop/__init__.py
|
check_py_version
|
def check_py_version():
"""Check if a propper Python version is used."""
try:
if sys.version_info >= (2, 7):
return
except:
pass
print(" ")
print(" ERROR - memtop needs python version at least 2.7")
print(("Chances are that you can install newer version from your "
"repositories, or even that you have some newer version "
"installed yet."))
print("(one way to find out which versions are installed is to try "
"following: 'which python2.7' , 'which python3' and so...)")
print(" ")
sys.exit(-1)
|
python
|
def check_py_version():
"""Check if a propper Python version is used."""
try:
if sys.version_info >= (2, 7):
return
except:
pass
print(" ")
print(" ERROR - memtop needs python version at least 2.7")
print(("Chances are that you can install newer version from your "
"repositories, or even that you have some newer version "
"installed yet."))
print("(one way to find out which versions are installed is to try "
"following: 'which python2.7' , 'which python3' and so...)")
print(" ")
sys.exit(-1)
|
[
"def",
"check_py_version",
"(",
")",
":",
"try",
":",
"if",
"sys",
".",
"version_info",
">=",
"(",
"2",
",",
"7",
")",
":",
"return",
"except",
":",
"pass",
"print",
"(",
"\" \"",
")",
"print",
"(",
"\" ERROR - memtop needs python version at least 2.7\"",
")",
"print",
"(",
"(",
"\"Chances are that you can install newer version from your \"",
"\"repositories, or even that you have some newer version \"",
"\"installed yet.\"",
")",
")",
"print",
"(",
"\"(one way to find out which versions are installed is to try \"",
"\"following: 'which python2.7' , 'which python3' and so...)\"",
")",
"print",
"(",
"\" \"",
")",
"sys",
".",
"exit",
"(",
"-",
"1",
")"
] |
Check if a propper Python version is used.
|
[
"Check",
"if",
"a",
"propper",
"Python",
"version",
"is",
"used",
"."
] |
504d251f1951922db84883c2e660ba7e754d1546
|
https://github.com/MartinThoma/memtop/blob/504d251f1951922db84883c2e660ba7e754d1546/memtop/__init__.py#L229-L244
|
train
|
sfischer13/python-prompt
|
prompt/__init__.py
|
character
|
def character(prompt=None, empty=False):
"""Prompt a single character.
Parameters
----------
prompt : str, optional
Use an alternative prompt.
empty : bool, optional
Allow an empty response.
Returns
-------
str or None
A str if the user entered a single-character, non-empty string.
None if the user pressed only Enter and ``empty`` was True.
"""
s = _prompt_input(prompt)
if empty and not s:
return None
elif len(s) == 1:
return s
else:
return character(prompt=prompt, empty=empty)
|
python
|
def character(prompt=None, empty=False):
"""Prompt a single character.
Parameters
----------
prompt : str, optional
Use an alternative prompt.
empty : bool, optional
Allow an empty response.
Returns
-------
str or None
A str if the user entered a single-character, non-empty string.
None if the user pressed only Enter and ``empty`` was True.
"""
s = _prompt_input(prompt)
if empty and not s:
return None
elif len(s) == 1:
return s
else:
return character(prompt=prompt, empty=empty)
|
[
"def",
"character",
"(",
"prompt",
"=",
"None",
",",
"empty",
"=",
"False",
")",
":",
"s",
"=",
"_prompt_input",
"(",
"prompt",
")",
"if",
"empty",
"and",
"not",
"s",
":",
"return",
"None",
"elif",
"len",
"(",
"s",
")",
"==",
"1",
":",
"return",
"s",
"else",
":",
"return",
"character",
"(",
"prompt",
"=",
"prompt",
",",
"empty",
"=",
"empty",
")"
] |
Prompt a single character.
Parameters
----------
prompt : str, optional
Use an alternative prompt.
empty : bool, optional
Allow an empty response.
Returns
-------
str or None
A str if the user entered a single-character, non-empty string.
None if the user pressed only Enter and ``empty`` was True.
|
[
"Prompt",
"a",
"single",
"character",
"."
] |
d2acf5db64a9e45247c7abf1d67c2eb7db87bb48
|
https://github.com/sfischer13/python-prompt/blob/d2acf5db64a9e45247c7abf1d67c2eb7db87bb48/prompt/__init__.py#L46-L69
|
train
|
sfischer13/python-prompt
|
prompt/__init__.py
|
email
|
def email(prompt=None, empty=False, mode="simple"):
"""Prompt an email address.
This check is based on a simple regular expression and does not verify
whether an email actually exists.
Parameters
----------
prompt : str, optional
Use an alternative prompt.
empty : bool, optional
Allow an empty response.
mode : {'simple'}, optional
'simple' will use a simple regular expression.
No other mode is implemented yet.
Returns
-------
str or None
A str if the user entered a likely email address.
None if the user pressed only Enter and ``empty`` was True.
"""
if mode == "simple":
s = _prompt_input(prompt)
if empty and not s:
return None
else:
if RE_EMAIL_SIMPLE.match(s):
return s
else:
return email(prompt=prompt, empty=empty, mode=mode)
else:
raise ValueError
|
python
|
def email(prompt=None, empty=False, mode="simple"):
"""Prompt an email address.
This check is based on a simple regular expression and does not verify
whether an email actually exists.
Parameters
----------
prompt : str, optional
Use an alternative prompt.
empty : bool, optional
Allow an empty response.
mode : {'simple'}, optional
'simple' will use a simple regular expression.
No other mode is implemented yet.
Returns
-------
str or None
A str if the user entered a likely email address.
None if the user pressed only Enter and ``empty`` was True.
"""
if mode == "simple":
s = _prompt_input(prompt)
if empty and not s:
return None
else:
if RE_EMAIL_SIMPLE.match(s):
return s
else:
return email(prompt=prompt, empty=empty, mode=mode)
else:
raise ValueError
|
[
"def",
"email",
"(",
"prompt",
"=",
"None",
",",
"empty",
"=",
"False",
",",
"mode",
"=",
"\"simple\"",
")",
":",
"if",
"mode",
"==",
"\"simple\"",
":",
"s",
"=",
"_prompt_input",
"(",
"prompt",
")",
"if",
"empty",
"and",
"not",
"s",
":",
"return",
"None",
"else",
":",
"if",
"RE_EMAIL_SIMPLE",
".",
"match",
"(",
"s",
")",
":",
"return",
"s",
"else",
":",
"return",
"email",
"(",
"prompt",
"=",
"prompt",
",",
"empty",
"=",
"empty",
",",
"mode",
"=",
"mode",
")",
"else",
":",
"raise",
"ValueError"
] |
Prompt an email address.
This check is based on a simple regular expression and does not verify
whether an email actually exists.
Parameters
----------
prompt : str, optional
Use an alternative prompt.
empty : bool, optional
Allow an empty response.
mode : {'simple'}, optional
'simple' will use a simple regular expression.
No other mode is implemented yet.
Returns
-------
str or None
A str if the user entered a likely email address.
None if the user pressed only Enter and ``empty`` was True.
|
[
"Prompt",
"an",
"email",
"address",
"."
] |
d2acf5db64a9e45247c7abf1d67c2eb7db87bb48
|
https://github.com/sfischer13/python-prompt/blob/d2acf5db64a9e45247c7abf1d67c2eb7db87bb48/prompt/__init__.py#L72-L105
|
train
|
sfischer13/python-prompt
|
prompt/__init__.py
|
integer
|
def integer(prompt=None, empty=False):
"""Prompt an integer.
Parameters
----------
prompt : str, optional
Use an alternative prompt.
empty : bool, optional
Allow an empty response.
Returns
-------
int or None
An int if the user entered a valid integer.
None if the user pressed only Enter and ``empty`` was True.
"""
s = _prompt_input(prompt)
if empty and not s:
return None
else:
try:
return int(s)
except ValueError:
return integer(prompt=prompt, empty=empty)
|
python
|
def integer(prompt=None, empty=False):
"""Prompt an integer.
Parameters
----------
prompt : str, optional
Use an alternative prompt.
empty : bool, optional
Allow an empty response.
Returns
-------
int or None
An int if the user entered a valid integer.
None if the user pressed only Enter and ``empty`` was True.
"""
s = _prompt_input(prompt)
if empty and not s:
return None
else:
try:
return int(s)
except ValueError:
return integer(prompt=prompt, empty=empty)
|
[
"def",
"integer",
"(",
"prompt",
"=",
"None",
",",
"empty",
"=",
"False",
")",
":",
"s",
"=",
"_prompt_input",
"(",
"prompt",
")",
"if",
"empty",
"and",
"not",
"s",
":",
"return",
"None",
"else",
":",
"try",
":",
"return",
"int",
"(",
"s",
")",
"except",
"ValueError",
":",
"return",
"integer",
"(",
"prompt",
"=",
"prompt",
",",
"empty",
"=",
"empty",
")"
] |
Prompt an integer.
Parameters
----------
prompt : str, optional
Use an alternative prompt.
empty : bool, optional
Allow an empty response.
Returns
-------
int or None
An int if the user entered a valid integer.
None if the user pressed only Enter and ``empty`` was True.
|
[
"Prompt",
"an",
"integer",
"."
] |
d2acf5db64a9e45247c7abf1d67c2eb7db87bb48
|
https://github.com/sfischer13/python-prompt/blob/d2acf5db64a9e45247c7abf1d67c2eb7db87bb48/prompt/__init__.py#L108-L132
|
train
|
sfischer13/python-prompt
|
prompt/__init__.py
|
real
|
def real(prompt=None, empty=False):
"""Prompt a real number.
Parameters
----------
prompt : str, optional
Use an alternative prompt.
empty : bool, optional
Allow an empty response.
Returns
-------
float or None
A float if the user entered a valid real number.
None if the user pressed only Enter and ``empty`` was True.
"""
s = _prompt_input(prompt)
if empty and not s:
return None
else:
try:
return float(s)
except ValueError:
return real(prompt=prompt, empty=empty)
|
python
|
def real(prompt=None, empty=False):
"""Prompt a real number.
Parameters
----------
prompt : str, optional
Use an alternative prompt.
empty : bool, optional
Allow an empty response.
Returns
-------
float or None
A float if the user entered a valid real number.
None if the user pressed only Enter and ``empty`` was True.
"""
s = _prompt_input(prompt)
if empty and not s:
return None
else:
try:
return float(s)
except ValueError:
return real(prompt=prompt, empty=empty)
|
[
"def",
"real",
"(",
"prompt",
"=",
"None",
",",
"empty",
"=",
"False",
")",
":",
"s",
"=",
"_prompt_input",
"(",
"prompt",
")",
"if",
"empty",
"and",
"not",
"s",
":",
"return",
"None",
"else",
":",
"try",
":",
"return",
"float",
"(",
"s",
")",
"except",
"ValueError",
":",
"return",
"real",
"(",
"prompt",
"=",
"prompt",
",",
"empty",
"=",
"empty",
")"
] |
Prompt a real number.
Parameters
----------
prompt : str, optional
Use an alternative prompt.
empty : bool, optional
Allow an empty response.
Returns
-------
float or None
A float if the user entered a valid real number.
None if the user pressed only Enter and ``empty`` was True.
|
[
"Prompt",
"a",
"real",
"number",
"."
] |
d2acf5db64a9e45247c7abf1d67c2eb7db87bb48
|
https://github.com/sfischer13/python-prompt/blob/d2acf5db64a9e45247c7abf1d67c2eb7db87bb48/prompt/__init__.py#L135-L159
|
train
|
sfischer13/python-prompt
|
prompt/__init__.py
|
regex
|
def regex(pattern, prompt=None, empty=False, flags=0):
"""Prompt a string that matches a regular expression.
Parameters
----------
pattern : str
A regular expression that must be matched.
prompt : str, optional
Use an alternative prompt.
empty : bool, optional
Allow an empty response.
flags : int, optional
Flags that will be passed to ``re.match``.
Returns
-------
Match or None
A match object if the user entered a matching string.
None if the user pressed only Enter and ``empty`` was True.
See Also
--------
re.match
"""
s = _prompt_input(prompt)
if empty and not s:
return None
else:
m = re.match(pattern, s, flags=flags)
if m:
return m
else:
return regex(pattern, prompt=prompt, empty=empty, flags=flags)
|
python
|
def regex(pattern, prompt=None, empty=False, flags=0):
"""Prompt a string that matches a regular expression.
Parameters
----------
pattern : str
A regular expression that must be matched.
prompt : str, optional
Use an alternative prompt.
empty : bool, optional
Allow an empty response.
flags : int, optional
Flags that will be passed to ``re.match``.
Returns
-------
Match or None
A match object if the user entered a matching string.
None if the user pressed only Enter and ``empty`` was True.
See Also
--------
re.match
"""
s = _prompt_input(prompt)
if empty and not s:
return None
else:
m = re.match(pattern, s, flags=flags)
if m:
return m
else:
return regex(pattern, prompt=prompt, empty=empty, flags=flags)
|
[
"def",
"regex",
"(",
"pattern",
",",
"prompt",
"=",
"None",
",",
"empty",
"=",
"False",
",",
"flags",
"=",
"0",
")",
":",
"s",
"=",
"_prompt_input",
"(",
"prompt",
")",
"if",
"empty",
"and",
"not",
"s",
":",
"return",
"None",
"else",
":",
"m",
"=",
"re",
".",
"match",
"(",
"pattern",
",",
"s",
",",
"flags",
"=",
"flags",
")",
"if",
"m",
":",
"return",
"m",
"else",
":",
"return",
"regex",
"(",
"pattern",
",",
"prompt",
"=",
"prompt",
",",
"empty",
"=",
"empty",
",",
"flags",
"=",
"flags",
")"
] |
Prompt a string that matches a regular expression.
Parameters
----------
pattern : str
A regular expression that must be matched.
prompt : str, optional
Use an alternative prompt.
empty : bool, optional
Allow an empty response.
flags : int, optional
Flags that will be passed to ``re.match``.
Returns
-------
Match or None
A match object if the user entered a matching string.
None if the user pressed only Enter and ``empty`` was True.
See Also
--------
re.match
|
[
"Prompt",
"a",
"string",
"that",
"matches",
"a",
"regular",
"expression",
"."
] |
d2acf5db64a9e45247c7abf1d67c2eb7db87bb48
|
https://github.com/sfischer13/python-prompt/blob/d2acf5db64a9e45247c7abf1d67c2eb7db87bb48/prompt/__init__.py#L162-L195
|
train
|
sfischer13/python-prompt
|
prompt/__init__.py
|
secret
|
def secret(prompt=None, empty=False):
"""Prompt a string without echoing.
Parameters
----------
prompt : str, optional
Use an alternative prompt.
empty : bool, optional
Allow an empty response.
Returns
-------
str or None
A str if the user entered a non-empty string.
None if the user pressed only Enter and ``empty`` was True.
Raises
------
getpass.GetPassWarning
If echo free input is unavailable.
See Also
--------
getpass.getpass
"""
if prompt is None:
prompt = PROMPT
s = getpass.getpass(prompt=prompt)
if empty and not s:
return None
else:
if s:
return s
else:
return secret(prompt=prompt, empty=empty)
|
python
|
def secret(prompt=None, empty=False):
"""Prompt a string without echoing.
Parameters
----------
prompt : str, optional
Use an alternative prompt.
empty : bool, optional
Allow an empty response.
Returns
-------
str or None
A str if the user entered a non-empty string.
None if the user pressed only Enter and ``empty`` was True.
Raises
------
getpass.GetPassWarning
If echo free input is unavailable.
See Also
--------
getpass.getpass
"""
if prompt is None:
prompt = PROMPT
s = getpass.getpass(prompt=prompt)
if empty and not s:
return None
else:
if s:
return s
else:
return secret(prompt=prompt, empty=empty)
|
[
"def",
"secret",
"(",
"prompt",
"=",
"None",
",",
"empty",
"=",
"False",
")",
":",
"if",
"prompt",
"is",
"None",
":",
"prompt",
"=",
"PROMPT",
"s",
"=",
"getpass",
".",
"getpass",
"(",
"prompt",
"=",
"prompt",
")",
"if",
"empty",
"and",
"not",
"s",
":",
"return",
"None",
"else",
":",
"if",
"s",
":",
"return",
"s",
"else",
":",
"return",
"secret",
"(",
"prompt",
"=",
"prompt",
",",
"empty",
"=",
"empty",
")"
] |
Prompt a string without echoing.
Parameters
----------
prompt : str, optional
Use an alternative prompt.
empty : bool, optional
Allow an empty response.
Returns
-------
str or None
A str if the user entered a non-empty string.
None if the user pressed only Enter and ``empty`` was True.
Raises
------
getpass.GetPassWarning
If echo free input is unavailable.
See Also
--------
getpass.getpass
|
[
"Prompt",
"a",
"string",
"without",
"echoing",
"."
] |
d2acf5db64a9e45247c7abf1d67c2eb7db87bb48
|
https://github.com/sfischer13/python-prompt/blob/d2acf5db64a9e45247c7abf1d67c2eb7db87bb48/prompt/__init__.py#L198-L233
|
train
|
sfischer13/python-prompt
|
prompt/__init__.py
|
string
|
def string(prompt=None, empty=False):
"""Prompt a string.
Parameters
----------
prompt : str, optional
Use an alternative prompt.
empty : bool, optional
Allow an empty response.
Returns
-------
str or None
A str if the user entered a non-empty string.
None if the user pressed only Enter and ``empty`` was True.
"""
s = _prompt_input(prompt)
if empty and not s:
return None
else:
if s:
return s
else:
return string(prompt=prompt, empty=empty)
|
python
|
def string(prompt=None, empty=False):
"""Prompt a string.
Parameters
----------
prompt : str, optional
Use an alternative prompt.
empty : bool, optional
Allow an empty response.
Returns
-------
str or None
A str if the user entered a non-empty string.
None if the user pressed only Enter and ``empty`` was True.
"""
s = _prompt_input(prompt)
if empty and not s:
return None
else:
if s:
return s
else:
return string(prompt=prompt, empty=empty)
|
[
"def",
"string",
"(",
"prompt",
"=",
"None",
",",
"empty",
"=",
"False",
")",
":",
"s",
"=",
"_prompt_input",
"(",
"prompt",
")",
"if",
"empty",
"and",
"not",
"s",
":",
"return",
"None",
"else",
":",
"if",
"s",
":",
"return",
"s",
"else",
":",
"return",
"string",
"(",
"prompt",
"=",
"prompt",
",",
"empty",
"=",
"empty",
")"
] |
Prompt a string.
Parameters
----------
prompt : str, optional
Use an alternative prompt.
empty : bool, optional
Allow an empty response.
Returns
-------
str or None
A str if the user entered a non-empty string.
None if the user pressed only Enter and ``empty`` was True.
|
[
"Prompt",
"a",
"string",
"."
] |
d2acf5db64a9e45247c7abf1d67c2eb7db87bb48
|
https://github.com/sfischer13/python-prompt/blob/d2acf5db64a9e45247c7abf1d67c2eb7db87bb48/prompt/__init__.py#L236-L260
|
train
|
iurisilvio/Flask-SQLAlchemy-Cache
|
flask_sqlalchemy_cache/core.py
|
CachingQuery._get_cache_plus_key
|
def _get_cache_plus_key(self):
"""Return a cache region plus key."""
key = getattr(self, '_cache_key', self.key_from_query())
return self._cache.cache, key
|
python
|
def _get_cache_plus_key(self):
"""Return a cache region plus key."""
key = getattr(self, '_cache_key', self.key_from_query())
return self._cache.cache, key
|
[
"def",
"_get_cache_plus_key",
"(",
"self",
")",
":",
"key",
"=",
"getattr",
"(",
"self",
",",
"'_cache_key'",
",",
"self",
".",
"key_from_query",
"(",
")",
")",
"return",
"self",
".",
"_cache",
".",
"cache",
",",
"key"
] |
Return a cache region plus key.
|
[
"Return",
"a",
"cache",
"region",
"plus",
"key",
"."
] |
d29023c8fc09fd5a6a0ae24d18eee2de88215ab0
|
https://github.com/iurisilvio/Flask-SQLAlchemy-Cache/blob/d29023c8fc09fd5a6a0ae24d18eee2de88215ab0/flask_sqlalchemy_cache/core.py#L52-L55
|
train
|
iurisilvio/Flask-SQLAlchemy-Cache
|
flask_sqlalchemy_cache/core.py
|
CachingQuery.get_value
|
def get_value(self, merge=True, createfunc=None,
expiration_time=None, ignore_expiration=False):
"""
Return the value from the cache for this query.
"""
cache, cache_key = self._get_cache_plus_key()
# ignore_expiration means, if the value is in the cache
# but is expired, return it anyway. This doesn't make sense
# with createfunc, which says, if the value is expired, generate
# a new value.
assert not ignore_expiration or not createfunc, \
"Can't ignore expiration and also provide createfunc"
if ignore_expiration or not createfunc:
cached_value = cache.get(cache_key,
expiration_time=expiration_time,
ignore_expiration=ignore_expiration)
else:
cached_value = cache.get(cache_key)
if not cached_value:
cached_value = createfunc()
cache.set(cache_key, cached_value, timeout=expiration_time)
if cached_value and merge:
cached_value = self.merge_result(cached_value, load=False)
return cached_value
|
python
|
def get_value(self, merge=True, createfunc=None,
expiration_time=None, ignore_expiration=False):
"""
Return the value from the cache for this query.
"""
cache, cache_key = self._get_cache_plus_key()
# ignore_expiration means, if the value is in the cache
# but is expired, return it anyway. This doesn't make sense
# with createfunc, which says, if the value is expired, generate
# a new value.
assert not ignore_expiration or not createfunc, \
"Can't ignore expiration and also provide createfunc"
if ignore_expiration or not createfunc:
cached_value = cache.get(cache_key,
expiration_time=expiration_time,
ignore_expiration=ignore_expiration)
else:
cached_value = cache.get(cache_key)
if not cached_value:
cached_value = createfunc()
cache.set(cache_key, cached_value, timeout=expiration_time)
if cached_value and merge:
cached_value = self.merge_result(cached_value, load=False)
return cached_value
|
[
"def",
"get_value",
"(",
"self",
",",
"merge",
"=",
"True",
",",
"createfunc",
"=",
"None",
",",
"expiration_time",
"=",
"None",
",",
"ignore_expiration",
"=",
"False",
")",
":",
"cache",
",",
"cache_key",
"=",
"self",
".",
"_get_cache_plus_key",
"(",
")",
"# ignore_expiration means, if the value is in the cache",
"# but is expired, return it anyway. This doesn't make sense",
"# with createfunc, which says, if the value is expired, generate",
"# a new value.",
"assert",
"not",
"ignore_expiration",
"or",
"not",
"createfunc",
",",
"\"Can't ignore expiration and also provide createfunc\"",
"if",
"ignore_expiration",
"or",
"not",
"createfunc",
":",
"cached_value",
"=",
"cache",
".",
"get",
"(",
"cache_key",
",",
"expiration_time",
"=",
"expiration_time",
",",
"ignore_expiration",
"=",
"ignore_expiration",
")",
"else",
":",
"cached_value",
"=",
"cache",
".",
"get",
"(",
"cache_key",
")",
"if",
"not",
"cached_value",
":",
"cached_value",
"=",
"createfunc",
"(",
")",
"cache",
".",
"set",
"(",
"cache_key",
",",
"cached_value",
",",
"timeout",
"=",
"expiration_time",
")",
"if",
"cached_value",
"and",
"merge",
":",
"cached_value",
"=",
"self",
".",
"merge_result",
"(",
"cached_value",
",",
"load",
"=",
"False",
")",
"return",
"cached_value"
] |
Return the value from the cache for this query.
|
[
"Return",
"the",
"value",
"from",
"the",
"cache",
"for",
"this",
"query",
"."
] |
d29023c8fc09fd5a6a0ae24d18eee2de88215ab0
|
https://github.com/iurisilvio/Flask-SQLAlchemy-Cache/blob/d29023c8fc09fd5a6a0ae24d18eee2de88215ab0/flask_sqlalchemy_cache/core.py#L62-L89
|
train
|
iurisilvio/Flask-SQLAlchemy-Cache
|
flask_sqlalchemy_cache/core.py
|
CachingQuery.set_value
|
def set_value(self, value):
"""Set the value in the cache for this query."""
cache, cache_key = self._get_cache_plus_key()
cache.set(cache_key, value)
|
python
|
def set_value(self, value):
"""Set the value in the cache for this query."""
cache, cache_key = self._get_cache_plus_key()
cache.set(cache_key, value)
|
[
"def",
"set_value",
"(",
"self",
",",
"value",
")",
":",
"cache",
",",
"cache_key",
"=",
"self",
".",
"_get_cache_plus_key",
"(",
")",
"cache",
".",
"set",
"(",
"cache_key",
",",
"value",
")"
] |
Set the value in the cache for this query.
|
[
"Set",
"the",
"value",
"in",
"the",
"cache",
"for",
"this",
"query",
"."
] |
d29023c8fc09fd5a6a0ae24d18eee2de88215ab0
|
https://github.com/iurisilvio/Flask-SQLAlchemy-Cache/blob/d29023c8fc09fd5a6a0ae24d18eee2de88215ab0/flask_sqlalchemy_cache/core.py#L91-L94
|
train
|
iurisilvio/Flask-SQLAlchemy-Cache
|
flask_sqlalchemy_cache/core.py
|
CachingQuery.key_from_query
|
def key_from_query(self, qualifier=None):
"""
Given a Query, create a cache key.
There are many approaches to this; here we use the simplest, which is
to create an md5 hash of the text of the SQL statement, combined with
stringified versions of all the bound parameters within it.
There's a bit of a performance hit with compiling out "query.statement"
here; other approaches include setting up an explicit cache key with a
particular Query, then combining that with the bound parameter values.
"""
stmt = self.with_labels().statement
compiled = stmt.compile()
params = compiled.params
values = [str(compiled)]
for k in sorted(params):
values.append(repr(params[k]))
key = u" ".join(values)
return md5(key.encode('utf8')).hexdigest()
|
python
|
def key_from_query(self, qualifier=None):
"""
Given a Query, create a cache key.
There are many approaches to this; here we use the simplest, which is
to create an md5 hash of the text of the SQL statement, combined with
stringified versions of all the bound parameters within it.
There's a bit of a performance hit with compiling out "query.statement"
here; other approaches include setting up an explicit cache key with a
particular Query, then combining that with the bound parameter values.
"""
stmt = self.with_labels().statement
compiled = stmt.compile()
params = compiled.params
values = [str(compiled)]
for k in sorted(params):
values.append(repr(params[k]))
key = u" ".join(values)
return md5(key.encode('utf8')).hexdigest()
|
[
"def",
"key_from_query",
"(",
"self",
",",
"qualifier",
"=",
"None",
")",
":",
"stmt",
"=",
"self",
".",
"with_labels",
"(",
")",
".",
"statement",
"compiled",
"=",
"stmt",
".",
"compile",
"(",
")",
"params",
"=",
"compiled",
".",
"params",
"values",
"=",
"[",
"str",
"(",
"compiled",
")",
"]",
"for",
"k",
"in",
"sorted",
"(",
"params",
")",
":",
"values",
".",
"append",
"(",
"repr",
"(",
"params",
"[",
"k",
"]",
")",
")",
"key",
"=",
"u\" \"",
".",
"join",
"(",
"values",
")",
"return",
"md5",
"(",
"key",
".",
"encode",
"(",
"'utf8'",
")",
")",
".",
"hexdigest",
"(",
")"
] |
Given a Query, create a cache key.
There are many approaches to this; here we use the simplest, which is
to create an md5 hash of the text of the SQL statement, combined with
stringified versions of all the bound parameters within it.
There's a bit of a performance hit with compiling out "query.statement"
here; other approaches include setting up an explicit cache key with a
particular Query, then combining that with the bound parameter values.
|
[
"Given",
"a",
"Query",
"create",
"a",
"cache",
"key",
"."
] |
d29023c8fc09fd5a6a0ae24d18eee2de88215ab0
|
https://github.com/iurisilvio/Flask-SQLAlchemy-Cache/blob/d29023c8fc09fd5a6a0ae24d18eee2de88215ab0/flask_sqlalchemy_cache/core.py#L96-L116
|
train
|
iurisilvio/Flask-SQLAlchemy-Cache
|
flask_sqlalchemy_cache/core.py
|
RelationshipCache.process_query_conditionally
|
def process_query_conditionally(self, query):
"""
Process a Query that is used within a lazy loader.
(the process_query_conditionally() method is a SQLAlchemy
hook invoked only within lazyload.)
"""
if query._current_path:
mapper, prop = query._current_path[-2:]
for cls in mapper.class_.__mro__:
k = (cls, prop.key)
relationship_option = self._relationship_options.get(k)
if relationship_option:
query._cache = relationship_option
break
|
python
|
def process_query_conditionally(self, query):
"""
Process a Query that is used within a lazy loader.
(the process_query_conditionally() method is a SQLAlchemy
hook invoked only within lazyload.)
"""
if query._current_path:
mapper, prop = query._current_path[-2:]
for cls in mapper.class_.__mro__:
k = (cls, prop.key)
relationship_option = self._relationship_options.get(k)
if relationship_option:
query._cache = relationship_option
break
|
[
"def",
"process_query_conditionally",
"(",
"self",
",",
"query",
")",
":",
"if",
"query",
".",
"_current_path",
":",
"mapper",
",",
"prop",
"=",
"query",
".",
"_current_path",
"[",
"-",
"2",
":",
"]",
"for",
"cls",
"in",
"mapper",
".",
"class_",
".",
"__mro__",
":",
"k",
"=",
"(",
"cls",
",",
"prop",
".",
"key",
")",
"relationship_option",
"=",
"self",
".",
"_relationship_options",
".",
"get",
"(",
"k",
")",
"if",
"relationship_option",
":",
"query",
".",
"_cache",
"=",
"relationship_option",
"break"
] |
Process a Query that is used within a lazy loader.
(the process_query_conditionally() method is a SQLAlchemy
hook invoked only within lazyload.)
|
[
"Process",
"a",
"Query",
"that",
"is",
"used",
"within",
"a",
"lazy",
"loader",
"."
] |
d29023c8fc09fd5a6a0ae24d18eee2de88215ab0
|
https://github.com/iurisilvio/Flask-SQLAlchemy-Cache/blob/d29023c8fc09fd5a6a0ae24d18eee2de88215ab0/flask_sqlalchemy_cache/core.py#L179-L193
|
train
|
jakevdp/supersmoother
|
supersmoother/smoother.py
|
Smoother.fit
|
def fit(self, t, y, dy=1, presorted=False):
"""Fit the smoother
Parameters
----------
t : array_like
time locations of the points to smooth
y : array_like
y locations of the points to smooth
dy : array_like or float (default = 1)
Errors in the y values
presorted : bool (default = False)
If True, then t is assumed to be sorted.
Returns
-------
self : Smoother instance
"""
self.t, self.y, self.dy = self._validate_inputs(t, y, dy, presorted)
self._fit(self.t, self.y, self.dy)
return self
|
python
|
def fit(self, t, y, dy=1, presorted=False):
"""Fit the smoother
Parameters
----------
t : array_like
time locations of the points to smooth
y : array_like
y locations of the points to smooth
dy : array_like or float (default = 1)
Errors in the y values
presorted : bool (default = False)
If True, then t is assumed to be sorted.
Returns
-------
self : Smoother instance
"""
self.t, self.y, self.dy = self._validate_inputs(t, y, dy, presorted)
self._fit(self.t, self.y, self.dy)
return self
|
[
"def",
"fit",
"(",
"self",
",",
"t",
",",
"y",
",",
"dy",
"=",
"1",
",",
"presorted",
"=",
"False",
")",
":",
"self",
".",
"t",
",",
"self",
".",
"y",
",",
"self",
".",
"dy",
"=",
"self",
".",
"_validate_inputs",
"(",
"t",
",",
"y",
",",
"dy",
",",
"presorted",
")",
"self",
".",
"_fit",
"(",
"self",
".",
"t",
",",
"self",
".",
"y",
",",
"self",
".",
"dy",
")",
"return",
"self"
] |
Fit the smoother
Parameters
----------
t : array_like
time locations of the points to smooth
y : array_like
y locations of the points to smooth
dy : array_like or float (default = 1)
Errors in the y values
presorted : bool (default = False)
If True, then t is assumed to be sorted.
Returns
-------
self : Smoother instance
|
[
"Fit",
"the",
"smoother"
] |
0c96cf13dcd6f9006d3c0421f9cd6e18abe27a2f
|
https://github.com/jakevdp/supersmoother/blob/0c96cf13dcd6f9006d3c0421f9cd6e18abe27a2f/supersmoother/smoother.py#L13-L33
|
train
|
jakevdp/supersmoother
|
supersmoother/smoother.py
|
Smoother.predict
|
def predict(self, t):
"""Predict the smoothed function value at time t
Parameters
----------
t : array_like
Times at which to predict the result
Returns
-------
y : ndarray
Smoothed values at time t
"""
t = np.asarray(t)
return self._predict(np.ravel(t)).reshape(t.shape)
|
python
|
def predict(self, t):
"""Predict the smoothed function value at time t
Parameters
----------
t : array_like
Times at which to predict the result
Returns
-------
y : ndarray
Smoothed values at time t
"""
t = np.asarray(t)
return self._predict(np.ravel(t)).reshape(t.shape)
|
[
"def",
"predict",
"(",
"self",
",",
"t",
")",
":",
"t",
"=",
"np",
".",
"asarray",
"(",
"t",
")",
"return",
"self",
".",
"_predict",
"(",
"np",
".",
"ravel",
"(",
"t",
")",
")",
".",
"reshape",
"(",
"t",
".",
"shape",
")"
] |
Predict the smoothed function value at time t
Parameters
----------
t : array_like
Times at which to predict the result
Returns
-------
y : ndarray
Smoothed values at time t
|
[
"Predict",
"the",
"smoothed",
"function",
"value",
"at",
"time",
"t"
] |
0c96cf13dcd6f9006d3c0421f9cd6e18abe27a2f
|
https://github.com/jakevdp/supersmoother/blob/0c96cf13dcd6f9006d3c0421f9cd6e18abe27a2f/supersmoother/smoother.py#L35-L49
|
train
|
jakevdp/supersmoother
|
supersmoother/smoother.py
|
Smoother.cv_residuals
|
def cv_residuals(self, cv=True):
"""Return the residuals of the cross-validation for the fit data"""
vals = self.cv_values(cv)
return (self.y - vals) / self.dy
|
python
|
def cv_residuals(self, cv=True):
"""Return the residuals of the cross-validation for the fit data"""
vals = self.cv_values(cv)
return (self.y - vals) / self.dy
|
[
"def",
"cv_residuals",
"(",
"self",
",",
"cv",
"=",
"True",
")",
":",
"vals",
"=",
"self",
".",
"cv_values",
"(",
"cv",
")",
"return",
"(",
"self",
".",
"y",
"-",
"vals",
")",
"/",
"self",
".",
"dy"
] |
Return the residuals of the cross-validation for the fit data
|
[
"Return",
"the",
"residuals",
"of",
"the",
"cross",
"-",
"validation",
"for",
"the",
"fit",
"data"
] |
0c96cf13dcd6f9006d3c0421f9cd6e18abe27a2f
|
https://github.com/jakevdp/supersmoother/blob/0c96cf13dcd6f9006d3c0421f9cd6e18abe27a2f/supersmoother/smoother.py#L55-L58
|
train
|
jakevdp/supersmoother
|
supersmoother/smoother.py
|
Smoother.cv_error
|
def cv_error(self, cv=True, skip_endpoints=True):
"""Return the sum of cross-validation residuals for the input data"""
resids = self.cv_residuals(cv)
if skip_endpoints:
resids = resids[1:-1]
return np.mean(abs(resids))
|
python
|
def cv_error(self, cv=True, skip_endpoints=True):
"""Return the sum of cross-validation residuals for the input data"""
resids = self.cv_residuals(cv)
if skip_endpoints:
resids = resids[1:-1]
return np.mean(abs(resids))
|
[
"def",
"cv_error",
"(",
"self",
",",
"cv",
"=",
"True",
",",
"skip_endpoints",
"=",
"True",
")",
":",
"resids",
"=",
"self",
".",
"cv_residuals",
"(",
"cv",
")",
"if",
"skip_endpoints",
":",
"resids",
"=",
"resids",
"[",
"1",
":",
"-",
"1",
"]",
"return",
"np",
".",
"mean",
"(",
"abs",
"(",
"resids",
")",
")"
] |
Return the sum of cross-validation residuals for the input data
|
[
"Return",
"the",
"sum",
"of",
"cross",
"-",
"validation",
"residuals",
"for",
"the",
"input",
"data"
] |
0c96cf13dcd6f9006d3c0421f9cd6e18abe27a2f
|
https://github.com/jakevdp/supersmoother/blob/0c96cf13dcd6f9006d3c0421f9cd6e18abe27a2f/supersmoother/smoother.py#L60-L65
|
train
|
casebeer/audiogen
|
audiogen/noise.py
|
arcfour
|
def arcfour(key, csbN=1):
'''Return a generator for the ARCFOUR/RC4 pseudorandom keystream for the
key provided. Keys should be byte strings or sequences of ints.'''
if isinstance(key, str):
key = [ord(c) for c in key]
s = range(256)
j = 0
for n in range(csbN):
for i in range(256):
j = (j + s[i] + key[i % len(key)]) % 256
t = s[i]
s[i] = s[j]
s[j] = t
i = 0
j = 0
while True:
i = (i + 1) % 256
j = (j + s[i]) % 256
t = s[i]
s[i] = s[j]
s[j] = t
yield s[(s[i] + s[j]) % 256]
|
python
|
def arcfour(key, csbN=1):
'''Return a generator for the ARCFOUR/RC4 pseudorandom keystream for the
key provided. Keys should be byte strings or sequences of ints.'''
if isinstance(key, str):
key = [ord(c) for c in key]
s = range(256)
j = 0
for n in range(csbN):
for i in range(256):
j = (j + s[i] + key[i % len(key)]) % 256
t = s[i]
s[i] = s[j]
s[j] = t
i = 0
j = 0
while True:
i = (i + 1) % 256
j = (j + s[i]) % 256
t = s[i]
s[i] = s[j]
s[j] = t
yield s[(s[i] + s[j]) % 256]
|
[
"def",
"arcfour",
"(",
"key",
",",
"csbN",
"=",
"1",
")",
":",
"if",
"isinstance",
"(",
"key",
",",
"str",
")",
":",
"key",
"=",
"[",
"ord",
"(",
"c",
")",
"for",
"c",
"in",
"key",
"]",
"s",
"=",
"range",
"(",
"256",
")",
"j",
"=",
"0",
"for",
"n",
"in",
"range",
"(",
"csbN",
")",
":",
"for",
"i",
"in",
"range",
"(",
"256",
")",
":",
"j",
"=",
"(",
"j",
"+",
"s",
"[",
"i",
"]",
"+",
"key",
"[",
"i",
"%",
"len",
"(",
"key",
")",
"]",
")",
"%",
"256",
"t",
"=",
"s",
"[",
"i",
"]",
"s",
"[",
"i",
"]",
"=",
"s",
"[",
"j",
"]",
"s",
"[",
"j",
"]",
"=",
"t",
"i",
"=",
"0",
"j",
"=",
"0",
"while",
"True",
":",
"i",
"=",
"(",
"i",
"+",
"1",
")",
"%",
"256",
"j",
"=",
"(",
"j",
"+",
"s",
"[",
"i",
"]",
")",
"%",
"256",
"t",
"=",
"s",
"[",
"i",
"]",
"s",
"[",
"i",
"]",
"=",
"s",
"[",
"j",
"]",
"s",
"[",
"j",
"]",
"=",
"t",
"yield",
"s",
"[",
"(",
"s",
"[",
"i",
"]",
"+",
"s",
"[",
"j",
"]",
")",
"%",
"256",
"]"
] |
Return a generator for the ARCFOUR/RC4 pseudorandom keystream for the
key provided. Keys should be byte strings or sequences of ints.
|
[
"Return",
"a",
"generator",
"for",
"the",
"ARCFOUR",
"/",
"RC4",
"pseudorandom",
"keystream",
"for",
"the",
"key",
"provided",
".",
"Keys",
"should",
"be",
"byte",
"strings",
"or",
"sequences",
"of",
"ints",
"."
] |
184dee2ca32c2bb4315a0f18e62288728fcd7881
|
https://github.com/casebeer/audiogen/blob/184dee2ca32c2bb4315a0f18e62288728fcd7881/audiogen/noise.py#L5-L26
|
train
|
casebeer/audiogen
|
audiogen/noise.py
|
arcfour_drop
|
def arcfour_drop(key, n=3072):
'''Return a generator for the RC4-drop pseudorandom keystream given by
the key and number of bytes to drop passed as arguments. Dropped bytes
default to the more conservative 3072, NOT the SCAN default of 768.'''
af = arcfour(key)
[af.next() for c in range(n)]
return af
|
python
|
def arcfour_drop(key, n=3072):
'''Return a generator for the RC4-drop pseudorandom keystream given by
the key and number of bytes to drop passed as arguments. Dropped bytes
default to the more conservative 3072, NOT the SCAN default of 768.'''
af = arcfour(key)
[af.next() for c in range(n)]
return af
|
[
"def",
"arcfour_drop",
"(",
"key",
",",
"n",
"=",
"3072",
")",
":",
"af",
"=",
"arcfour",
"(",
"key",
")",
"[",
"af",
".",
"next",
"(",
")",
"for",
"c",
"in",
"range",
"(",
"n",
")",
"]",
"return",
"af"
] |
Return a generator for the RC4-drop pseudorandom keystream given by
the key and number of bytes to drop passed as arguments. Dropped bytes
default to the more conservative 3072, NOT the SCAN default of 768.
|
[
"Return",
"a",
"generator",
"for",
"the",
"RC4",
"-",
"drop",
"pseudorandom",
"keystream",
"given",
"by",
"the",
"key",
"and",
"number",
"of",
"bytes",
"to",
"drop",
"passed",
"as",
"arguments",
".",
"Dropped",
"bytes",
"default",
"to",
"the",
"more",
"conservative",
"3072",
"NOT",
"the",
"SCAN",
"default",
"of",
"768",
"."
] |
184dee2ca32c2bb4315a0f18e62288728fcd7881
|
https://github.com/casebeer/audiogen/blob/184dee2ca32c2bb4315a0f18e62288728fcd7881/audiogen/noise.py#L28-L34
|
train
|
zeroSteiner/AdvancedHTTPServer
|
advancedhttpserver.py
|
resolve_ssl_protocol_version
|
def resolve_ssl_protocol_version(version=None):
"""
Look up an SSL protocol version by name. If *version* is not specified, then
the strongest protocol available will be returned.
:param str version: The name of the version to look up.
:return: A protocol constant from the :py:mod:`ssl` module.
:rtype: int
"""
if version is None:
protocol_preference = ('TLSv1_2', 'TLSv1_1', 'TLSv1', 'SSLv3', 'SSLv23', 'SSLv2')
for protocol in protocol_preference:
if hasattr(ssl, 'PROTOCOL_' + protocol):
return getattr(ssl, 'PROTOCOL_' + protocol)
raise RuntimeError('could not find a suitable ssl PROTOCOL_ version constant')
elif isinstance(version, str):
if not hasattr(ssl, 'PROTOCOL_' + version):
raise ValueError('invalid ssl protocol version: ' + version)
return getattr(ssl, 'PROTOCOL_' + version)
raise TypeError("ssl_version() argument 1 must be str, not {0}".format(type(version).__name__))
|
python
|
def resolve_ssl_protocol_version(version=None):
"""
Look up an SSL protocol version by name. If *version* is not specified, then
the strongest protocol available will be returned.
:param str version: The name of the version to look up.
:return: A protocol constant from the :py:mod:`ssl` module.
:rtype: int
"""
if version is None:
protocol_preference = ('TLSv1_2', 'TLSv1_1', 'TLSv1', 'SSLv3', 'SSLv23', 'SSLv2')
for protocol in protocol_preference:
if hasattr(ssl, 'PROTOCOL_' + protocol):
return getattr(ssl, 'PROTOCOL_' + protocol)
raise RuntimeError('could not find a suitable ssl PROTOCOL_ version constant')
elif isinstance(version, str):
if not hasattr(ssl, 'PROTOCOL_' + version):
raise ValueError('invalid ssl protocol version: ' + version)
return getattr(ssl, 'PROTOCOL_' + version)
raise TypeError("ssl_version() argument 1 must be str, not {0}".format(type(version).__name__))
|
[
"def",
"resolve_ssl_protocol_version",
"(",
"version",
"=",
"None",
")",
":",
"if",
"version",
"is",
"None",
":",
"protocol_preference",
"=",
"(",
"'TLSv1_2'",
",",
"'TLSv1_1'",
",",
"'TLSv1'",
",",
"'SSLv3'",
",",
"'SSLv23'",
",",
"'SSLv2'",
")",
"for",
"protocol",
"in",
"protocol_preference",
":",
"if",
"hasattr",
"(",
"ssl",
",",
"'PROTOCOL_'",
"+",
"protocol",
")",
":",
"return",
"getattr",
"(",
"ssl",
",",
"'PROTOCOL_'",
"+",
"protocol",
")",
"raise",
"RuntimeError",
"(",
"'could not find a suitable ssl PROTOCOL_ version constant'",
")",
"elif",
"isinstance",
"(",
"version",
",",
"str",
")",
":",
"if",
"not",
"hasattr",
"(",
"ssl",
",",
"'PROTOCOL_'",
"+",
"version",
")",
":",
"raise",
"ValueError",
"(",
"'invalid ssl protocol version: '",
"+",
"version",
")",
"return",
"getattr",
"(",
"ssl",
",",
"'PROTOCOL_'",
"+",
"version",
")",
"raise",
"TypeError",
"(",
"\"ssl_version() argument 1 must be str, not {0}\"",
".",
"format",
"(",
"type",
"(",
"version",
")",
".",
"__name__",
")",
")"
] |
Look up an SSL protocol version by name. If *version* is not specified, then
the strongest protocol available will be returned.
:param str version: The name of the version to look up.
:return: A protocol constant from the :py:mod:`ssl` module.
:rtype: int
|
[
"Look",
"up",
"an",
"SSL",
"protocol",
"version",
"by",
"name",
".",
"If",
"*",
"version",
"*",
"is",
"not",
"specified",
"then",
"the",
"strongest",
"protocol",
"available",
"will",
"be",
"returned",
"."
] |
8c53cf7e1ddbf7ae9f573c82c5fe5f6992db7b5a
|
https://github.com/zeroSteiner/AdvancedHTTPServer/blob/8c53cf7e1ddbf7ae9f573c82c5fe5f6992db7b5a/advancedhttpserver.py#L215-L234
|
train
|
zeroSteiner/AdvancedHTTPServer
|
advancedhttpserver.py
|
build_server_from_argparser
|
def build_server_from_argparser(description=None, server_klass=None, handler_klass=None):
"""
Build a server from command line arguments. If a ServerClass or
HandlerClass is specified, then the object must inherit from the
corresponding AdvancedHTTPServer base class.
:param str description: Description string to be passed to the argument parser.
:param server_klass: Alternative server class to use.
:type server_klass: :py:class:`.AdvancedHTTPServer`
:param handler_klass: Alternative handler class to use.
:type handler_klass: :py:class:`.RequestHandler`
:return: A configured server instance.
:rtype: :py:class:`.AdvancedHTTPServer`
"""
import argparse
def _argp_dir_type(arg):
if not os.path.isdir(arg):
raise argparse.ArgumentTypeError("{0} is not a valid directory".format(repr(arg)))
return arg
def _argp_port_type(arg):
if not arg.isdigit():
raise argparse.ArgumentTypeError("{0} is not a valid port".format(repr(arg)))
arg = int(arg)
if arg < 0 or arg > 65535:
raise argparse.ArgumentTypeError("{0} is not a valid port".format(repr(arg)))
return arg
description = (description or 'HTTP Server')
server_klass = (server_klass or AdvancedHTTPServer)
handler_klass = (handler_klass or RequestHandler)
parser = argparse.ArgumentParser(conflict_handler='resolve', description=description, fromfile_prefix_chars='@')
parser.epilog = 'When a config file is specified with --config only the --log, --log-file and --password options will be used.'
parser.add_argument('-c', '--conf', dest='config', type=argparse.FileType('r'), help='read settings from a config file')
parser.add_argument('-i', '--ip', dest='ip', default='0.0.0.0', help='the ip address to serve on')
parser.add_argument('-L', '--log', dest='loglvl', choices=('DEBUG', 'INFO', 'WARNING', 'ERROR', 'CRITICAL'), default='INFO', help='set the logging level')
parser.add_argument('-p', '--port', dest='port', default=8080, type=_argp_port_type, help='port to serve on')
parser.add_argument('-v', '--version', action='version', version=parser.prog + ' Version: ' + __version__)
parser.add_argument('-w', '--web-root', dest='web_root', default='.', type=_argp_dir_type, help='path to the web root directory')
parser.add_argument('--log-file', dest='log_file', help='log information to a file')
parser.add_argument('--no-threads', dest='use_threads', action='store_false', default=True, help='disable threading')
parser.add_argument('--password', dest='password', help='password to use for basic authentication')
ssl_group = parser.add_argument_group('ssl options')
ssl_group.add_argument('--ssl-cert', dest='ssl_cert', help='the ssl cert to use')
ssl_group.add_argument('--ssl-key', dest='ssl_key', help='the ssl key to use')
ssl_group.add_argument('--ssl-version', dest='ssl_version', choices=[p[9:] for p in dir(ssl) if p.startswith('PROTOCOL_')], help='the version of ssl to use')
arguments = parser.parse_args()
logging.getLogger('').setLevel(logging.DEBUG)
console_log_handler = logging.StreamHandler()
console_log_handler.setLevel(getattr(logging, arguments.loglvl))
console_log_handler.setFormatter(logging.Formatter("%(asctime)s %(levelname)-8s %(message)s"))
logging.getLogger('').addHandler(console_log_handler)
if arguments.log_file:
main_file_handler = logging.handlers.RotatingFileHandler(arguments.log_file, maxBytes=262144, backupCount=5)
main_file_handler.setLevel(logging.DEBUG)
main_file_handler.setFormatter(logging.Formatter("%(asctime)s %(name)-30s %(levelname)-10s %(message)s"))
logging.getLogger('').setLevel(logging.DEBUG)
logging.getLogger('').addHandler(main_file_handler)
if arguments.config:
config = ConfigParser()
config.readfp(arguments.config)
server = build_server_from_config(
config,
'server',
server_klass=server_klass,
handler_klass=handler_klass
)
else:
server = server_klass(
handler_klass,
address=(arguments.ip, arguments.port),
use_threads=arguments.use_threads,
ssl_certfile=arguments.ssl_cert,
ssl_keyfile=arguments.ssl_key,
ssl_version=arguments.ssl_version
)
server.serve_files_root = arguments.web_root
if arguments.password:
server.auth_add_creds('', arguments.password)
return server
|
python
|
def build_server_from_argparser(description=None, server_klass=None, handler_klass=None):
"""
Build a server from command line arguments. If a ServerClass or
HandlerClass is specified, then the object must inherit from the
corresponding AdvancedHTTPServer base class.
:param str description: Description string to be passed to the argument parser.
:param server_klass: Alternative server class to use.
:type server_klass: :py:class:`.AdvancedHTTPServer`
:param handler_klass: Alternative handler class to use.
:type handler_klass: :py:class:`.RequestHandler`
:return: A configured server instance.
:rtype: :py:class:`.AdvancedHTTPServer`
"""
import argparse
def _argp_dir_type(arg):
if not os.path.isdir(arg):
raise argparse.ArgumentTypeError("{0} is not a valid directory".format(repr(arg)))
return arg
def _argp_port_type(arg):
if not arg.isdigit():
raise argparse.ArgumentTypeError("{0} is not a valid port".format(repr(arg)))
arg = int(arg)
if arg < 0 or arg > 65535:
raise argparse.ArgumentTypeError("{0} is not a valid port".format(repr(arg)))
return arg
description = (description or 'HTTP Server')
server_klass = (server_klass or AdvancedHTTPServer)
handler_klass = (handler_klass or RequestHandler)
parser = argparse.ArgumentParser(conflict_handler='resolve', description=description, fromfile_prefix_chars='@')
parser.epilog = 'When a config file is specified with --config only the --log, --log-file and --password options will be used.'
parser.add_argument('-c', '--conf', dest='config', type=argparse.FileType('r'), help='read settings from a config file')
parser.add_argument('-i', '--ip', dest='ip', default='0.0.0.0', help='the ip address to serve on')
parser.add_argument('-L', '--log', dest='loglvl', choices=('DEBUG', 'INFO', 'WARNING', 'ERROR', 'CRITICAL'), default='INFO', help='set the logging level')
parser.add_argument('-p', '--port', dest='port', default=8080, type=_argp_port_type, help='port to serve on')
parser.add_argument('-v', '--version', action='version', version=parser.prog + ' Version: ' + __version__)
parser.add_argument('-w', '--web-root', dest='web_root', default='.', type=_argp_dir_type, help='path to the web root directory')
parser.add_argument('--log-file', dest='log_file', help='log information to a file')
parser.add_argument('--no-threads', dest='use_threads', action='store_false', default=True, help='disable threading')
parser.add_argument('--password', dest='password', help='password to use for basic authentication')
ssl_group = parser.add_argument_group('ssl options')
ssl_group.add_argument('--ssl-cert', dest='ssl_cert', help='the ssl cert to use')
ssl_group.add_argument('--ssl-key', dest='ssl_key', help='the ssl key to use')
ssl_group.add_argument('--ssl-version', dest='ssl_version', choices=[p[9:] for p in dir(ssl) if p.startswith('PROTOCOL_')], help='the version of ssl to use')
arguments = parser.parse_args()
logging.getLogger('').setLevel(logging.DEBUG)
console_log_handler = logging.StreamHandler()
console_log_handler.setLevel(getattr(logging, arguments.loglvl))
console_log_handler.setFormatter(logging.Formatter("%(asctime)s %(levelname)-8s %(message)s"))
logging.getLogger('').addHandler(console_log_handler)
if arguments.log_file:
main_file_handler = logging.handlers.RotatingFileHandler(arguments.log_file, maxBytes=262144, backupCount=5)
main_file_handler.setLevel(logging.DEBUG)
main_file_handler.setFormatter(logging.Formatter("%(asctime)s %(name)-30s %(levelname)-10s %(message)s"))
logging.getLogger('').setLevel(logging.DEBUG)
logging.getLogger('').addHandler(main_file_handler)
if arguments.config:
config = ConfigParser()
config.readfp(arguments.config)
server = build_server_from_config(
config,
'server',
server_klass=server_klass,
handler_klass=handler_klass
)
else:
server = server_klass(
handler_klass,
address=(arguments.ip, arguments.port),
use_threads=arguments.use_threads,
ssl_certfile=arguments.ssl_cert,
ssl_keyfile=arguments.ssl_key,
ssl_version=arguments.ssl_version
)
server.serve_files_root = arguments.web_root
if arguments.password:
server.auth_add_creds('', arguments.password)
return server
|
[
"def",
"build_server_from_argparser",
"(",
"description",
"=",
"None",
",",
"server_klass",
"=",
"None",
",",
"handler_klass",
"=",
"None",
")",
":",
"import",
"argparse",
"def",
"_argp_dir_type",
"(",
"arg",
")",
":",
"if",
"not",
"os",
".",
"path",
".",
"isdir",
"(",
"arg",
")",
":",
"raise",
"argparse",
".",
"ArgumentTypeError",
"(",
"\"{0} is not a valid directory\"",
".",
"format",
"(",
"repr",
"(",
"arg",
")",
")",
")",
"return",
"arg",
"def",
"_argp_port_type",
"(",
"arg",
")",
":",
"if",
"not",
"arg",
".",
"isdigit",
"(",
")",
":",
"raise",
"argparse",
".",
"ArgumentTypeError",
"(",
"\"{0} is not a valid port\"",
".",
"format",
"(",
"repr",
"(",
"arg",
")",
")",
")",
"arg",
"=",
"int",
"(",
"arg",
")",
"if",
"arg",
"<",
"0",
"or",
"arg",
">",
"65535",
":",
"raise",
"argparse",
".",
"ArgumentTypeError",
"(",
"\"{0} is not a valid port\"",
".",
"format",
"(",
"repr",
"(",
"arg",
")",
")",
")",
"return",
"arg",
"description",
"=",
"(",
"description",
"or",
"'HTTP Server'",
")",
"server_klass",
"=",
"(",
"server_klass",
"or",
"AdvancedHTTPServer",
")",
"handler_klass",
"=",
"(",
"handler_klass",
"or",
"RequestHandler",
")",
"parser",
"=",
"argparse",
".",
"ArgumentParser",
"(",
"conflict_handler",
"=",
"'resolve'",
",",
"description",
"=",
"description",
",",
"fromfile_prefix_chars",
"=",
"'@'",
")",
"parser",
".",
"epilog",
"=",
"'When a config file is specified with --config only the --log, --log-file and --password options will be used.'",
"parser",
".",
"add_argument",
"(",
"'-c'",
",",
"'--conf'",
",",
"dest",
"=",
"'config'",
",",
"type",
"=",
"argparse",
".",
"FileType",
"(",
"'r'",
")",
",",
"help",
"=",
"'read settings from a config file'",
")",
"parser",
".",
"add_argument",
"(",
"'-i'",
",",
"'--ip'",
",",
"dest",
"=",
"'ip'",
",",
"default",
"=",
"'0.0.0.0'",
",",
"help",
"=",
"'the ip address to serve on'",
")",
"parser",
".",
"add_argument",
"(",
"'-L'",
",",
"'--log'",
",",
"dest",
"=",
"'loglvl'",
",",
"choices",
"=",
"(",
"'DEBUG'",
",",
"'INFO'",
",",
"'WARNING'",
",",
"'ERROR'",
",",
"'CRITICAL'",
")",
",",
"default",
"=",
"'INFO'",
",",
"help",
"=",
"'set the logging level'",
")",
"parser",
".",
"add_argument",
"(",
"'-p'",
",",
"'--port'",
",",
"dest",
"=",
"'port'",
",",
"default",
"=",
"8080",
",",
"type",
"=",
"_argp_port_type",
",",
"help",
"=",
"'port to serve on'",
")",
"parser",
".",
"add_argument",
"(",
"'-v'",
",",
"'--version'",
",",
"action",
"=",
"'version'",
",",
"version",
"=",
"parser",
".",
"prog",
"+",
"' Version: '",
"+",
"__version__",
")",
"parser",
".",
"add_argument",
"(",
"'-w'",
",",
"'--web-root'",
",",
"dest",
"=",
"'web_root'",
",",
"default",
"=",
"'.'",
",",
"type",
"=",
"_argp_dir_type",
",",
"help",
"=",
"'path to the web root directory'",
")",
"parser",
".",
"add_argument",
"(",
"'--log-file'",
",",
"dest",
"=",
"'log_file'",
",",
"help",
"=",
"'log information to a file'",
")",
"parser",
".",
"add_argument",
"(",
"'--no-threads'",
",",
"dest",
"=",
"'use_threads'",
",",
"action",
"=",
"'store_false'",
",",
"default",
"=",
"True",
",",
"help",
"=",
"'disable threading'",
")",
"parser",
".",
"add_argument",
"(",
"'--password'",
",",
"dest",
"=",
"'password'",
",",
"help",
"=",
"'password to use for basic authentication'",
")",
"ssl_group",
"=",
"parser",
".",
"add_argument_group",
"(",
"'ssl options'",
")",
"ssl_group",
".",
"add_argument",
"(",
"'--ssl-cert'",
",",
"dest",
"=",
"'ssl_cert'",
",",
"help",
"=",
"'the ssl cert to use'",
")",
"ssl_group",
".",
"add_argument",
"(",
"'--ssl-key'",
",",
"dest",
"=",
"'ssl_key'",
",",
"help",
"=",
"'the ssl key to use'",
")",
"ssl_group",
".",
"add_argument",
"(",
"'--ssl-version'",
",",
"dest",
"=",
"'ssl_version'",
",",
"choices",
"=",
"[",
"p",
"[",
"9",
":",
"]",
"for",
"p",
"in",
"dir",
"(",
"ssl",
")",
"if",
"p",
".",
"startswith",
"(",
"'PROTOCOL_'",
")",
"]",
",",
"help",
"=",
"'the version of ssl to use'",
")",
"arguments",
"=",
"parser",
".",
"parse_args",
"(",
")",
"logging",
".",
"getLogger",
"(",
"''",
")",
".",
"setLevel",
"(",
"logging",
".",
"DEBUG",
")",
"console_log_handler",
"=",
"logging",
".",
"StreamHandler",
"(",
")",
"console_log_handler",
".",
"setLevel",
"(",
"getattr",
"(",
"logging",
",",
"arguments",
".",
"loglvl",
")",
")",
"console_log_handler",
".",
"setFormatter",
"(",
"logging",
".",
"Formatter",
"(",
"\"%(asctime)s %(levelname)-8s %(message)s\"",
")",
")",
"logging",
".",
"getLogger",
"(",
"''",
")",
".",
"addHandler",
"(",
"console_log_handler",
")",
"if",
"arguments",
".",
"log_file",
":",
"main_file_handler",
"=",
"logging",
".",
"handlers",
".",
"RotatingFileHandler",
"(",
"arguments",
".",
"log_file",
",",
"maxBytes",
"=",
"262144",
",",
"backupCount",
"=",
"5",
")",
"main_file_handler",
".",
"setLevel",
"(",
"logging",
".",
"DEBUG",
")",
"main_file_handler",
".",
"setFormatter",
"(",
"logging",
".",
"Formatter",
"(",
"\"%(asctime)s %(name)-30s %(levelname)-10s %(message)s\"",
")",
")",
"logging",
".",
"getLogger",
"(",
"''",
")",
".",
"setLevel",
"(",
"logging",
".",
"DEBUG",
")",
"logging",
".",
"getLogger",
"(",
"''",
")",
".",
"addHandler",
"(",
"main_file_handler",
")",
"if",
"arguments",
".",
"config",
":",
"config",
"=",
"ConfigParser",
"(",
")",
"config",
".",
"readfp",
"(",
"arguments",
".",
"config",
")",
"server",
"=",
"build_server_from_config",
"(",
"config",
",",
"'server'",
",",
"server_klass",
"=",
"server_klass",
",",
"handler_klass",
"=",
"handler_klass",
")",
"else",
":",
"server",
"=",
"server_klass",
"(",
"handler_klass",
",",
"address",
"=",
"(",
"arguments",
".",
"ip",
",",
"arguments",
".",
"port",
")",
",",
"use_threads",
"=",
"arguments",
".",
"use_threads",
",",
"ssl_certfile",
"=",
"arguments",
".",
"ssl_cert",
",",
"ssl_keyfile",
"=",
"arguments",
".",
"ssl_key",
",",
"ssl_version",
"=",
"arguments",
".",
"ssl_version",
")",
"server",
".",
"serve_files_root",
"=",
"arguments",
".",
"web_root",
"if",
"arguments",
".",
"password",
":",
"server",
".",
"auth_add_creds",
"(",
"''",
",",
"arguments",
".",
"password",
")",
"return",
"server"
] |
Build a server from command line arguments. If a ServerClass or
HandlerClass is specified, then the object must inherit from the
corresponding AdvancedHTTPServer base class.
:param str description: Description string to be passed to the argument parser.
:param server_klass: Alternative server class to use.
:type server_klass: :py:class:`.AdvancedHTTPServer`
:param handler_klass: Alternative handler class to use.
:type handler_klass: :py:class:`.RequestHandler`
:return: A configured server instance.
:rtype: :py:class:`.AdvancedHTTPServer`
|
[
"Build",
"a",
"server",
"from",
"command",
"line",
"arguments",
".",
"If",
"a",
"ServerClass",
"or",
"HandlerClass",
"is",
"specified",
"then",
"the",
"object",
"must",
"inherit",
"from",
"the",
"corresponding",
"AdvancedHTTPServer",
"base",
"class",
"."
] |
8c53cf7e1ddbf7ae9f573c82c5fe5f6992db7b5a
|
https://github.com/zeroSteiner/AdvancedHTTPServer/blob/8c53cf7e1ddbf7ae9f573c82c5fe5f6992db7b5a/advancedhttpserver.py#L236-L321
|
train
|
zeroSteiner/AdvancedHTTPServer
|
advancedhttpserver.py
|
build_server_from_config
|
def build_server_from_config(config, section_name, server_klass=None, handler_klass=None):
"""
Build a server from a provided :py:class:`configparser.ConfigParser`
instance. If a ServerClass or HandlerClass is specified, then the
object must inherit from the corresponding AdvancedHTTPServer base
class.
:param config: Configuration to retrieve settings from.
:type config: :py:class:`configparser.ConfigParser`
:param str section_name: The section name of the configuration to use.
:param server_klass: Alternative server class to use.
:type server_klass: :py:class:`.AdvancedHTTPServer`
:param handler_klass: Alternative handler class to use.
:type handler_klass: :py:class:`.RequestHandler`
:return: A configured server instance.
:rtype: :py:class:`.AdvancedHTTPServer`
"""
server_klass = (server_klass or AdvancedHTTPServer)
handler_klass = (handler_klass or RequestHandler)
port = config.getint(section_name, 'port')
web_root = None
if config.has_option(section_name, 'web_root'):
web_root = config.get(section_name, 'web_root')
if config.has_option(section_name, 'ip'):
ip = config.get(section_name, 'ip')
else:
ip = '0.0.0.0'
ssl_certfile = None
if config.has_option(section_name, 'ssl_cert'):
ssl_certfile = config.get(section_name, 'ssl_cert')
ssl_keyfile = None
if config.has_option(section_name, 'ssl_key'):
ssl_keyfile = config.get(section_name, 'ssl_key')
ssl_version = None
if config.has_option(section_name, 'ssl_version'):
ssl_version = config.get(section_name, 'ssl_version')
server = server_klass(
handler_klass,
address=(ip, port),
ssl_certfile=ssl_certfile,
ssl_keyfile=ssl_keyfile,
ssl_version=ssl_version
)
if config.has_option(section_name, 'password_type'):
password_type = config.get(section_name, 'password_type')
else:
password_type = 'md5'
if config.has_option(section_name, 'password'):
password = config.get(section_name, 'password')
if config.has_option(section_name, 'username'):
username = config.get(section_name, 'username')
else:
username = ''
server.auth_add_creds(username, password, pwtype=password_type)
cred_idx = 0
while config.has_option(section_name, 'password' + str(cred_idx)):
password = config.get(section_name, 'password' + str(cred_idx))
if not config.has_option(section_name, 'username' + str(cred_idx)):
break
username = config.get(section_name, 'username' + str(cred_idx))
server.auth_add_creds(username, password, pwtype=password_type)
cred_idx += 1
if web_root is None:
server.serve_files = False
else:
server.serve_files = True
server.serve_files_root = web_root
if config.has_option(section_name, 'list_directories'):
server.serve_files_list_directories = config.getboolean(section_name, 'list_directories')
return server
|
python
|
def build_server_from_config(config, section_name, server_klass=None, handler_klass=None):
"""
Build a server from a provided :py:class:`configparser.ConfigParser`
instance. If a ServerClass or HandlerClass is specified, then the
object must inherit from the corresponding AdvancedHTTPServer base
class.
:param config: Configuration to retrieve settings from.
:type config: :py:class:`configparser.ConfigParser`
:param str section_name: The section name of the configuration to use.
:param server_klass: Alternative server class to use.
:type server_klass: :py:class:`.AdvancedHTTPServer`
:param handler_klass: Alternative handler class to use.
:type handler_klass: :py:class:`.RequestHandler`
:return: A configured server instance.
:rtype: :py:class:`.AdvancedHTTPServer`
"""
server_klass = (server_klass or AdvancedHTTPServer)
handler_klass = (handler_klass or RequestHandler)
port = config.getint(section_name, 'port')
web_root = None
if config.has_option(section_name, 'web_root'):
web_root = config.get(section_name, 'web_root')
if config.has_option(section_name, 'ip'):
ip = config.get(section_name, 'ip')
else:
ip = '0.0.0.0'
ssl_certfile = None
if config.has_option(section_name, 'ssl_cert'):
ssl_certfile = config.get(section_name, 'ssl_cert')
ssl_keyfile = None
if config.has_option(section_name, 'ssl_key'):
ssl_keyfile = config.get(section_name, 'ssl_key')
ssl_version = None
if config.has_option(section_name, 'ssl_version'):
ssl_version = config.get(section_name, 'ssl_version')
server = server_klass(
handler_klass,
address=(ip, port),
ssl_certfile=ssl_certfile,
ssl_keyfile=ssl_keyfile,
ssl_version=ssl_version
)
if config.has_option(section_name, 'password_type'):
password_type = config.get(section_name, 'password_type')
else:
password_type = 'md5'
if config.has_option(section_name, 'password'):
password = config.get(section_name, 'password')
if config.has_option(section_name, 'username'):
username = config.get(section_name, 'username')
else:
username = ''
server.auth_add_creds(username, password, pwtype=password_type)
cred_idx = 0
while config.has_option(section_name, 'password' + str(cred_idx)):
password = config.get(section_name, 'password' + str(cred_idx))
if not config.has_option(section_name, 'username' + str(cred_idx)):
break
username = config.get(section_name, 'username' + str(cred_idx))
server.auth_add_creds(username, password, pwtype=password_type)
cred_idx += 1
if web_root is None:
server.serve_files = False
else:
server.serve_files = True
server.serve_files_root = web_root
if config.has_option(section_name, 'list_directories'):
server.serve_files_list_directories = config.getboolean(section_name, 'list_directories')
return server
|
[
"def",
"build_server_from_config",
"(",
"config",
",",
"section_name",
",",
"server_klass",
"=",
"None",
",",
"handler_klass",
"=",
"None",
")",
":",
"server_klass",
"=",
"(",
"server_klass",
"or",
"AdvancedHTTPServer",
")",
"handler_klass",
"=",
"(",
"handler_klass",
"or",
"RequestHandler",
")",
"port",
"=",
"config",
".",
"getint",
"(",
"section_name",
",",
"'port'",
")",
"web_root",
"=",
"None",
"if",
"config",
".",
"has_option",
"(",
"section_name",
",",
"'web_root'",
")",
":",
"web_root",
"=",
"config",
".",
"get",
"(",
"section_name",
",",
"'web_root'",
")",
"if",
"config",
".",
"has_option",
"(",
"section_name",
",",
"'ip'",
")",
":",
"ip",
"=",
"config",
".",
"get",
"(",
"section_name",
",",
"'ip'",
")",
"else",
":",
"ip",
"=",
"'0.0.0.0'",
"ssl_certfile",
"=",
"None",
"if",
"config",
".",
"has_option",
"(",
"section_name",
",",
"'ssl_cert'",
")",
":",
"ssl_certfile",
"=",
"config",
".",
"get",
"(",
"section_name",
",",
"'ssl_cert'",
")",
"ssl_keyfile",
"=",
"None",
"if",
"config",
".",
"has_option",
"(",
"section_name",
",",
"'ssl_key'",
")",
":",
"ssl_keyfile",
"=",
"config",
".",
"get",
"(",
"section_name",
",",
"'ssl_key'",
")",
"ssl_version",
"=",
"None",
"if",
"config",
".",
"has_option",
"(",
"section_name",
",",
"'ssl_version'",
")",
":",
"ssl_version",
"=",
"config",
".",
"get",
"(",
"section_name",
",",
"'ssl_version'",
")",
"server",
"=",
"server_klass",
"(",
"handler_klass",
",",
"address",
"=",
"(",
"ip",
",",
"port",
")",
",",
"ssl_certfile",
"=",
"ssl_certfile",
",",
"ssl_keyfile",
"=",
"ssl_keyfile",
",",
"ssl_version",
"=",
"ssl_version",
")",
"if",
"config",
".",
"has_option",
"(",
"section_name",
",",
"'password_type'",
")",
":",
"password_type",
"=",
"config",
".",
"get",
"(",
"section_name",
",",
"'password_type'",
")",
"else",
":",
"password_type",
"=",
"'md5'",
"if",
"config",
".",
"has_option",
"(",
"section_name",
",",
"'password'",
")",
":",
"password",
"=",
"config",
".",
"get",
"(",
"section_name",
",",
"'password'",
")",
"if",
"config",
".",
"has_option",
"(",
"section_name",
",",
"'username'",
")",
":",
"username",
"=",
"config",
".",
"get",
"(",
"section_name",
",",
"'username'",
")",
"else",
":",
"username",
"=",
"''",
"server",
".",
"auth_add_creds",
"(",
"username",
",",
"password",
",",
"pwtype",
"=",
"password_type",
")",
"cred_idx",
"=",
"0",
"while",
"config",
".",
"has_option",
"(",
"section_name",
",",
"'password'",
"+",
"str",
"(",
"cred_idx",
")",
")",
":",
"password",
"=",
"config",
".",
"get",
"(",
"section_name",
",",
"'password'",
"+",
"str",
"(",
"cred_idx",
")",
")",
"if",
"not",
"config",
".",
"has_option",
"(",
"section_name",
",",
"'username'",
"+",
"str",
"(",
"cred_idx",
")",
")",
":",
"break",
"username",
"=",
"config",
".",
"get",
"(",
"section_name",
",",
"'username'",
"+",
"str",
"(",
"cred_idx",
")",
")",
"server",
".",
"auth_add_creds",
"(",
"username",
",",
"password",
",",
"pwtype",
"=",
"password_type",
")",
"cred_idx",
"+=",
"1",
"if",
"web_root",
"is",
"None",
":",
"server",
".",
"serve_files",
"=",
"False",
"else",
":",
"server",
".",
"serve_files",
"=",
"True",
"server",
".",
"serve_files_root",
"=",
"web_root",
"if",
"config",
".",
"has_option",
"(",
"section_name",
",",
"'list_directories'",
")",
":",
"server",
".",
"serve_files_list_directories",
"=",
"config",
".",
"getboolean",
"(",
"section_name",
",",
"'list_directories'",
")",
"return",
"server"
] |
Build a server from a provided :py:class:`configparser.ConfigParser`
instance. If a ServerClass or HandlerClass is specified, then the
object must inherit from the corresponding AdvancedHTTPServer base
class.
:param config: Configuration to retrieve settings from.
:type config: :py:class:`configparser.ConfigParser`
:param str section_name: The section name of the configuration to use.
:param server_klass: Alternative server class to use.
:type server_klass: :py:class:`.AdvancedHTTPServer`
:param handler_klass: Alternative handler class to use.
:type handler_klass: :py:class:`.RequestHandler`
:return: A configured server instance.
:rtype: :py:class:`.AdvancedHTTPServer`
|
[
"Build",
"a",
"server",
"from",
"a",
"provided",
":",
"py",
":",
"class",
":",
"configparser",
".",
"ConfigParser",
"instance",
".",
"If",
"a",
"ServerClass",
"or",
"HandlerClass",
"is",
"specified",
"then",
"the",
"object",
"must",
"inherit",
"from",
"the",
"corresponding",
"AdvancedHTTPServer",
"base",
"class",
"."
] |
8c53cf7e1ddbf7ae9f573c82c5fe5f6992db7b5a
|
https://github.com/zeroSteiner/AdvancedHTTPServer/blob/8c53cf7e1ddbf7ae9f573c82c5fe5f6992db7b5a/advancedhttpserver.py#L323-L395
|
train
|
zeroSteiner/AdvancedHTTPServer
|
advancedhttpserver.py
|
RPCClient.set_serializer
|
def set_serializer(self, serializer_name, compression=None):
"""
Configure the serializer to use for communication with the server.
The serializer specified must be valid and in the
:py:data:`.g_serializer_drivers` map.
:param str serializer_name: The name of the serializer to use.
:param str compression: The name of a compression library to use.
"""
self.serializer = Serializer(serializer_name, charset='UTF-8', compression=compression)
self.logger.debug('using serializer: ' + serializer_name)
|
python
|
def set_serializer(self, serializer_name, compression=None):
"""
Configure the serializer to use for communication with the server.
The serializer specified must be valid and in the
:py:data:`.g_serializer_drivers` map.
:param str serializer_name: The name of the serializer to use.
:param str compression: The name of a compression library to use.
"""
self.serializer = Serializer(serializer_name, charset='UTF-8', compression=compression)
self.logger.debug('using serializer: ' + serializer_name)
|
[
"def",
"set_serializer",
"(",
"self",
",",
"serializer_name",
",",
"compression",
"=",
"None",
")",
":",
"self",
".",
"serializer",
"=",
"Serializer",
"(",
"serializer_name",
",",
"charset",
"=",
"'UTF-8'",
",",
"compression",
"=",
"compression",
")",
"self",
".",
"logger",
".",
"debug",
"(",
"'using serializer: '",
"+",
"serializer_name",
")"
] |
Configure the serializer to use for communication with the server.
The serializer specified must be valid and in the
:py:data:`.g_serializer_drivers` map.
:param str serializer_name: The name of the serializer to use.
:param str compression: The name of a compression library to use.
|
[
"Configure",
"the",
"serializer",
"to",
"use",
"for",
"communication",
"with",
"the",
"server",
".",
"The",
"serializer",
"specified",
"must",
"be",
"valid",
"and",
"in",
"the",
":",
"py",
":",
"data",
":",
".",
"g_serializer_drivers",
"map",
"."
] |
8c53cf7e1ddbf7ae9f573c82c5fe5f6992db7b5a
|
https://github.com/zeroSteiner/AdvancedHTTPServer/blob/8c53cf7e1ddbf7ae9f573c82c5fe5f6992db7b5a/advancedhttpserver.py#L548-L558
|
train
|
zeroSteiner/AdvancedHTTPServer
|
advancedhttpserver.py
|
RPCClient.reconnect
|
def reconnect(self):
"""Reconnect to the remote server."""
self.lock.acquire()
if self.use_ssl:
self.client = http.client.HTTPSConnection(self.host, self.port, context=self.ssl_context)
else:
self.client = http.client.HTTPConnection(self.host, self.port)
self.lock.release()
|
python
|
def reconnect(self):
"""Reconnect to the remote server."""
self.lock.acquire()
if self.use_ssl:
self.client = http.client.HTTPSConnection(self.host, self.port, context=self.ssl_context)
else:
self.client = http.client.HTTPConnection(self.host, self.port)
self.lock.release()
|
[
"def",
"reconnect",
"(",
"self",
")",
":",
"self",
".",
"lock",
".",
"acquire",
"(",
")",
"if",
"self",
".",
"use_ssl",
":",
"self",
".",
"client",
"=",
"http",
".",
"client",
".",
"HTTPSConnection",
"(",
"self",
".",
"host",
",",
"self",
".",
"port",
",",
"context",
"=",
"self",
".",
"ssl_context",
")",
"else",
":",
"self",
".",
"client",
"=",
"http",
".",
"client",
".",
"HTTPConnection",
"(",
"self",
".",
"host",
",",
"self",
".",
"port",
")",
"self",
".",
"lock",
".",
"release",
"(",
")"
] |
Reconnect to the remote server.
|
[
"Reconnect",
"to",
"the",
"remote",
"server",
"."
] |
8c53cf7e1ddbf7ae9f573c82c5fe5f6992db7b5a
|
https://github.com/zeroSteiner/AdvancedHTTPServer/blob/8c53cf7e1ddbf7ae9f573c82c5fe5f6992db7b5a/advancedhttpserver.py#L571-L578
|
train
|
zeroSteiner/AdvancedHTTPServer
|
advancedhttpserver.py
|
RPCClient.call
|
def call(self, method, *args, **kwargs):
"""
Issue a call to the remote end point to execute the specified
procedure.
:param str method: The name of the remote procedure to execute.
:return: The return value from the remote function.
"""
if kwargs:
options = self.encode(dict(args=args, kwargs=kwargs))
else:
options = self.encode(args)
headers = {}
if self.headers:
headers.update(self.headers)
headers['Content-Type'] = self.serializer.content_type
headers['Content-Length'] = str(len(options))
headers['Connection'] = 'close'
if self.username is not None and self.password is not None:
headers['Authorization'] = 'Basic ' + base64.b64encode((self.username + ':' + self.password).encode('UTF-8')).decode('UTF-8')
method = os.path.join(self.uri_base, method)
self.logger.debug('calling RPC method: ' + method[1:])
try:
with self.lock:
self.client.request('RPC', method, options, headers)
resp = self.client.getresponse()
except http.client.ImproperConnectionState:
raise RPCConnectionError('improper connection state')
if resp.status != 200:
raise RPCError(resp.reason, resp.status)
resp_data = resp.read()
resp_data = self.decode(resp_data)
if not ('exception_occurred' in resp_data and 'result' in resp_data):
raise RPCError('missing response information', resp.status)
if resp_data['exception_occurred']:
raise RPCError('remote method incurred an exception', resp.status, remote_exception=resp_data['exception'])
return resp_data['result']
|
python
|
def call(self, method, *args, **kwargs):
"""
Issue a call to the remote end point to execute the specified
procedure.
:param str method: The name of the remote procedure to execute.
:return: The return value from the remote function.
"""
if kwargs:
options = self.encode(dict(args=args, kwargs=kwargs))
else:
options = self.encode(args)
headers = {}
if self.headers:
headers.update(self.headers)
headers['Content-Type'] = self.serializer.content_type
headers['Content-Length'] = str(len(options))
headers['Connection'] = 'close'
if self.username is not None and self.password is not None:
headers['Authorization'] = 'Basic ' + base64.b64encode((self.username + ':' + self.password).encode('UTF-8')).decode('UTF-8')
method = os.path.join(self.uri_base, method)
self.logger.debug('calling RPC method: ' + method[1:])
try:
with self.lock:
self.client.request('RPC', method, options, headers)
resp = self.client.getresponse()
except http.client.ImproperConnectionState:
raise RPCConnectionError('improper connection state')
if resp.status != 200:
raise RPCError(resp.reason, resp.status)
resp_data = resp.read()
resp_data = self.decode(resp_data)
if not ('exception_occurred' in resp_data and 'result' in resp_data):
raise RPCError('missing response information', resp.status)
if resp_data['exception_occurred']:
raise RPCError('remote method incurred an exception', resp.status, remote_exception=resp_data['exception'])
return resp_data['result']
|
[
"def",
"call",
"(",
"self",
",",
"method",
",",
"*",
"args",
",",
"*",
"*",
"kwargs",
")",
":",
"if",
"kwargs",
":",
"options",
"=",
"self",
".",
"encode",
"(",
"dict",
"(",
"args",
"=",
"args",
",",
"kwargs",
"=",
"kwargs",
")",
")",
"else",
":",
"options",
"=",
"self",
".",
"encode",
"(",
"args",
")",
"headers",
"=",
"{",
"}",
"if",
"self",
".",
"headers",
":",
"headers",
".",
"update",
"(",
"self",
".",
"headers",
")",
"headers",
"[",
"'Content-Type'",
"]",
"=",
"self",
".",
"serializer",
".",
"content_type",
"headers",
"[",
"'Content-Length'",
"]",
"=",
"str",
"(",
"len",
"(",
"options",
")",
")",
"headers",
"[",
"'Connection'",
"]",
"=",
"'close'",
"if",
"self",
".",
"username",
"is",
"not",
"None",
"and",
"self",
".",
"password",
"is",
"not",
"None",
":",
"headers",
"[",
"'Authorization'",
"]",
"=",
"'Basic '",
"+",
"base64",
".",
"b64encode",
"(",
"(",
"self",
".",
"username",
"+",
"':'",
"+",
"self",
".",
"password",
")",
".",
"encode",
"(",
"'UTF-8'",
")",
")",
".",
"decode",
"(",
"'UTF-8'",
")",
"method",
"=",
"os",
".",
"path",
".",
"join",
"(",
"self",
".",
"uri_base",
",",
"method",
")",
"self",
".",
"logger",
".",
"debug",
"(",
"'calling RPC method: '",
"+",
"method",
"[",
"1",
":",
"]",
")",
"try",
":",
"with",
"self",
".",
"lock",
":",
"self",
".",
"client",
".",
"request",
"(",
"'RPC'",
",",
"method",
",",
"options",
",",
"headers",
")",
"resp",
"=",
"self",
".",
"client",
".",
"getresponse",
"(",
")",
"except",
"http",
".",
"client",
".",
"ImproperConnectionState",
":",
"raise",
"RPCConnectionError",
"(",
"'improper connection state'",
")",
"if",
"resp",
".",
"status",
"!=",
"200",
":",
"raise",
"RPCError",
"(",
"resp",
".",
"reason",
",",
"resp",
".",
"status",
")",
"resp_data",
"=",
"resp",
".",
"read",
"(",
")",
"resp_data",
"=",
"self",
".",
"decode",
"(",
"resp_data",
")",
"if",
"not",
"(",
"'exception_occurred'",
"in",
"resp_data",
"and",
"'result'",
"in",
"resp_data",
")",
":",
"raise",
"RPCError",
"(",
"'missing response information'",
",",
"resp",
".",
"status",
")",
"if",
"resp_data",
"[",
"'exception_occurred'",
"]",
":",
"raise",
"RPCError",
"(",
"'remote method incurred an exception'",
",",
"resp",
".",
"status",
",",
"remote_exception",
"=",
"resp_data",
"[",
"'exception'",
"]",
")",
"return",
"resp_data",
"[",
"'result'",
"]"
] |
Issue a call to the remote end point to execute the specified
procedure.
:param str method: The name of the remote procedure to execute.
:return: The return value from the remote function.
|
[
"Issue",
"a",
"call",
"to",
"the",
"remote",
"end",
"point",
"to",
"execute",
"the",
"specified",
"procedure",
"."
] |
8c53cf7e1ddbf7ae9f573c82c5fe5f6992db7b5a
|
https://github.com/zeroSteiner/AdvancedHTTPServer/blob/8c53cf7e1ddbf7ae9f573c82c5fe5f6992db7b5a/advancedhttpserver.py#L580-L620
|
train
|
zeroSteiner/AdvancedHTTPServer
|
advancedhttpserver.py
|
RPCClientCached.cache_call
|
def cache_call(self, method, *options):
"""
Call a remote method and store the result locally. Subsequent
calls to the same method with the same arguments will return the
cached result without invoking the remote procedure. Cached results are
kept indefinitely and must be manually refreshed with a call to
:py:meth:`.cache_call_refresh`.
:param str method: The name of the remote procedure to execute.
:return: The return value from the remote function.
"""
options_hash = self.encode(options)
if len(options_hash) > 20:
options_hash = hashlib.new('sha1', options_hash).digest()
options_hash = sqlite3.Binary(options_hash)
with self.cache_lock:
cursor = self.cache_db.cursor()
cursor.execute('SELECT return_value FROM cache WHERE method = ? AND options_hash = ?', (method, options_hash))
return_value = cursor.fetchone()
if return_value:
return_value = bytes(return_value[0])
return self.decode(return_value)
return_value = self.call(method, *options)
store_return_value = sqlite3.Binary(self.encode(return_value))
with self.cache_lock:
cursor = self.cache_db.cursor()
cursor.execute('INSERT INTO cache (method, options_hash, return_value) VALUES (?, ?, ?)', (method, options_hash, store_return_value))
self.cache_db.commit()
return return_value
|
python
|
def cache_call(self, method, *options):
"""
Call a remote method and store the result locally. Subsequent
calls to the same method with the same arguments will return the
cached result without invoking the remote procedure. Cached results are
kept indefinitely and must be manually refreshed with a call to
:py:meth:`.cache_call_refresh`.
:param str method: The name of the remote procedure to execute.
:return: The return value from the remote function.
"""
options_hash = self.encode(options)
if len(options_hash) > 20:
options_hash = hashlib.new('sha1', options_hash).digest()
options_hash = sqlite3.Binary(options_hash)
with self.cache_lock:
cursor = self.cache_db.cursor()
cursor.execute('SELECT return_value FROM cache WHERE method = ? AND options_hash = ?', (method, options_hash))
return_value = cursor.fetchone()
if return_value:
return_value = bytes(return_value[0])
return self.decode(return_value)
return_value = self.call(method, *options)
store_return_value = sqlite3.Binary(self.encode(return_value))
with self.cache_lock:
cursor = self.cache_db.cursor()
cursor.execute('INSERT INTO cache (method, options_hash, return_value) VALUES (?, ?, ?)', (method, options_hash, store_return_value))
self.cache_db.commit()
return return_value
|
[
"def",
"cache_call",
"(",
"self",
",",
"method",
",",
"*",
"options",
")",
":",
"options_hash",
"=",
"self",
".",
"encode",
"(",
"options",
")",
"if",
"len",
"(",
"options_hash",
")",
">",
"20",
":",
"options_hash",
"=",
"hashlib",
".",
"new",
"(",
"'sha1'",
",",
"options_hash",
")",
".",
"digest",
"(",
")",
"options_hash",
"=",
"sqlite3",
".",
"Binary",
"(",
"options_hash",
")",
"with",
"self",
".",
"cache_lock",
":",
"cursor",
"=",
"self",
".",
"cache_db",
".",
"cursor",
"(",
")",
"cursor",
".",
"execute",
"(",
"'SELECT return_value FROM cache WHERE method = ? AND options_hash = ?'",
",",
"(",
"method",
",",
"options_hash",
")",
")",
"return_value",
"=",
"cursor",
".",
"fetchone",
"(",
")",
"if",
"return_value",
":",
"return_value",
"=",
"bytes",
"(",
"return_value",
"[",
"0",
"]",
")",
"return",
"self",
".",
"decode",
"(",
"return_value",
")",
"return_value",
"=",
"self",
".",
"call",
"(",
"method",
",",
"*",
"options",
")",
"store_return_value",
"=",
"sqlite3",
".",
"Binary",
"(",
"self",
".",
"encode",
"(",
"return_value",
")",
")",
"with",
"self",
".",
"cache_lock",
":",
"cursor",
"=",
"self",
".",
"cache_db",
".",
"cursor",
"(",
")",
"cursor",
".",
"execute",
"(",
"'INSERT INTO cache (method, options_hash, return_value) VALUES (?, ?, ?)'",
",",
"(",
"method",
",",
"options_hash",
",",
"store_return_value",
")",
")",
"self",
".",
"cache_db",
".",
"commit",
"(",
")",
"return",
"return_value"
] |
Call a remote method and store the result locally. Subsequent
calls to the same method with the same arguments will return the
cached result without invoking the remote procedure. Cached results are
kept indefinitely and must be manually refreshed with a call to
:py:meth:`.cache_call_refresh`.
:param str method: The name of the remote procedure to execute.
:return: The return value from the remote function.
|
[
"Call",
"a",
"remote",
"method",
"and",
"store",
"the",
"result",
"locally",
".",
"Subsequent",
"calls",
"to",
"the",
"same",
"method",
"with",
"the",
"same",
"arguments",
"will",
"return",
"the",
"cached",
"result",
"without",
"invoking",
"the",
"remote",
"procedure",
".",
"Cached",
"results",
"are",
"kept",
"indefinitely",
"and",
"must",
"be",
"manually",
"refreshed",
"with",
"a",
"call",
"to",
":",
"py",
":",
"meth",
":",
".",
"cache_call_refresh",
"."
] |
8c53cf7e1ddbf7ae9f573c82c5fe5f6992db7b5a
|
https://github.com/zeroSteiner/AdvancedHTTPServer/blob/8c53cf7e1ddbf7ae9f573c82c5fe5f6992db7b5a/advancedhttpserver.py#L636-L665
|
train
|
zeroSteiner/AdvancedHTTPServer
|
advancedhttpserver.py
|
RPCClientCached.cache_call_refresh
|
def cache_call_refresh(self, method, *options):
"""
Call a remote method and update the local cache with the result
if it already existed.
:param str method: The name of the remote procedure to execute.
:return: The return value from the remote function.
"""
options_hash = self.encode(options)
if len(options_hash) > 20:
options_hash = hashlib.new('sha1', options).digest()
options_hash = sqlite3.Binary(options_hash)
with self.cache_lock:
cursor = self.cache_db.cursor()
cursor.execute('DELETE FROM cache WHERE method = ? AND options_hash = ?', (method, options_hash))
return_value = self.call(method, *options)
store_return_value = sqlite3.Binary(self.encode(return_value))
with self.cache_lock:
cursor = self.cache_db.cursor()
cursor.execute('INSERT INTO cache (method, options_hash, return_value) VALUES (?, ?, ?)', (method, options_hash, store_return_value))
self.cache_db.commit()
return return_value
|
python
|
def cache_call_refresh(self, method, *options):
"""
Call a remote method and update the local cache with the result
if it already existed.
:param str method: The name of the remote procedure to execute.
:return: The return value from the remote function.
"""
options_hash = self.encode(options)
if len(options_hash) > 20:
options_hash = hashlib.new('sha1', options).digest()
options_hash = sqlite3.Binary(options_hash)
with self.cache_lock:
cursor = self.cache_db.cursor()
cursor.execute('DELETE FROM cache WHERE method = ? AND options_hash = ?', (method, options_hash))
return_value = self.call(method, *options)
store_return_value = sqlite3.Binary(self.encode(return_value))
with self.cache_lock:
cursor = self.cache_db.cursor()
cursor.execute('INSERT INTO cache (method, options_hash, return_value) VALUES (?, ?, ?)', (method, options_hash, store_return_value))
self.cache_db.commit()
return return_value
|
[
"def",
"cache_call_refresh",
"(",
"self",
",",
"method",
",",
"*",
"options",
")",
":",
"options_hash",
"=",
"self",
".",
"encode",
"(",
"options",
")",
"if",
"len",
"(",
"options_hash",
")",
">",
"20",
":",
"options_hash",
"=",
"hashlib",
".",
"new",
"(",
"'sha1'",
",",
"options",
")",
".",
"digest",
"(",
")",
"options_hash",
"=",
"sqlite3",
".",
"Binary",
"(",
"options_hash",
")",
"with",
"self",
".",
"cache_lock",
":",
"cursor",
"=",
"self",
".",
"cache_db",
".",
"cursor",
"(",
")",
"cursor",
".",
"execute",
"(",
"'DELETE FROM cache WHERE method = ? AND options_hash = ?'",
",",
"(",
"method",
",",
"options_hash",
")",
")",
"return_value",
"=",
"self",
".",
"call",
"(",
"method",
",",
"*",
"options",
")",
"store_return_value",
"=",
"sqlite3",
".",
"Binary",
"(",
"self",
".",
"encode",
"(",
"return_value",
")",
")",
"with",
"self",
".",
"cache_lock",
":",
"cursor",
"=",
"self",
".",
"cache_db",
".",
"cursor",
"(",
")",
"cursor",
".",
"execute",
"(",
"'INSERT INTO cache (method, options_hash, return_value) VALUES (?, ?, ?)'",
",",
"(",
"method",
",",
"options_hash",
",",
"store_return_value",
")",
")",
"self",
".",
"cache_db",
".",
"commit",
"(",
")",
"return",
"return_value"
] |
Call a remote method and update the local cache with the result
if it already existed.
:param str method: The name of the remote procedure to execute.
:return: The return value from the remote function.
|
[
"Call",
"a",
"remote",
"method",
"and",
"update",
"the",
"local",
"cache",
"with",
"the",
"result",
"if",
"it",
"already",
"existed",
"."
] |
8c53cf7e1ddbf7ae9f573c82c5fe5f6992db7b5a
|
https://github.com/zeroSteiner/AdvancedHTTPServer/blob/8c53cf7e1ddbf7ae9f573c82c5fe5f6992db7b5a/advancedhttpserver.py#L667-L689
|
train
|
zeroSteiner/AdvancedHTTPServer
|
advancedhttpserver.py
|
RPCClientCached.cache_clear
|
def cache_clear(self):
"""Purge the local store of all cached function information."""
with self.cache_lock:
cursor = self.cache_db.cursor()
cursor.execute('DELETE FROM cache')
self.cache_db.commit()
self.logger.info('the RPC cache has been purged')
return
|
python
|
def cache_clear(self):
"""Purge the local store of all cached function information."""
with self.cache_lock:
cursor = self.cache_db.cursor()
cursor.execute('DELETE FROM cache')
self.cache_db.commit()
self.logger.info('the RPC cache has been purged')
return
|
[
"def",
"cache_clear",
"(",
"self",
")",
":",
"with",
"self",
".",
"cache_lock",
":",
"cursor",
"=",
"self",
".",
"cache_db",
".",
"cursor",
"(",
")",
"cursor",
".",
"execute",
"(",
"'DELETE FROM cache'",
")",
"self",
".",
"cache_db",
".",
"commit",
"(",
")",
"self",
".",
"logger",
".",
"info",
"(",
"'the RPC cache has been purged'",
")",
"return"
] |
Purge the local store of all cached function information.
|
[
"Purge",
"the",
"local",
"store",
"of",
"all",
"cached",
"function",
"information",
"."
] |
8c53cf7e1ddbf7ae9f573c82c5fe5f6992db7b5a
|
https://github.com/zeroSteiner/AdvancedHTTPServer/blob/8c53cf7e1ddbf7ae9f573c82c5fe5f6992db7b5a/advancedhttpserver.py#L691-L698
|
train
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.