repository_name stringclasses 316 values | func_path_in_repository stringlengths 6 223 | func_name stringlengths 1 134 | language stringclasses 1 value | func_code_string stringlengths 57 65.5k | func_documentation_string stringlengths 1 46.3k | split_name stringclasses 1 value | func_code_url stringlengths 91 315 | called_functions listlengths 1 156 ⌀ | enclosing_scope stringlengths 2 1.48M |
|---|---|---|---|---|---|---|---|---|---|
cnelson/python-fleet | fleet/v1/objects/unit.py | Unit.destroy | python | def destroy(self):
# if this unit didn't come from fleet, we can't destroy it
if not self._is_live():
raise RuntimeError('A unit must be submitted to fleet before it can destroyed.')
return self._client.destroy_unit(self.name) | Remove a unit from the fleet cluster
Returns:
True: The unit was removed
Raises:
fleet.v1.errors.APIError: Fleet returned a response code >= 400 | train | https://github.com/cnelson/python-fleet/blob/a11dcd8bb3986d1d8f0af90d2da7399c9cc54b4d/fleet/v1/objects/unit.py#L304-L319 | [
"def _is_live(self):\n \"\"\"Checks to see if this unit came from fleet, or was created locally\n\n Only units with a .name property (set by the server), and _client property are considered 'live'\n\n Returns:\n True: The object is live\n False: The object is not\n\n \"\"\"\n if 'name' in self._data and self._client:\n return True\n\n return False\n"
] | class Unit(FleetObject):
"""This object represents a Unit in Fleet
Create and modify Unit entities to communicate to fleet the desired state of the cluster.
This simply declares what should be happening; the backend system still has to react to the changes in
this desired state. The actual state of the system is communicated with UnitState entities.
Attributes (all are readonly):
Always available:
options (update with add_option, remove_option): list of UnitOption entities
desiredState: (update with set_desired_state): state the user wishes the Unit to be in
("inactive", "loaded", or "launched")
Available once units are submitted to fleet:
name: unique identifier of entity
currentState: state the Unit is currently in (same possible values as desiredState)
machineID: ID of machine to which the Unit is scheduled
A UnitOption represents a single option in a systemd unit file.
section: name of section that contains the option (e.g. "Unit", "Service", "Socket")
name: name of option (e.g. "BindsTo", "After", "ExecStart")
value: value of option (e.g. "/usr/bin/docker run busybox /bin/sleep 1000")
"""
_STATES = ['inactive', 'loaded', 'launched']
def __init__(self, client=None, data=None, desired_state=None, options=None, from_file=None, from_string=None):
"""Create a new unit
Args:
client (fleet.v1.Client, optional): The fleet client that retrieved this object
data (dict, optional): Initialize this object with this data. If this is used you must not
specify options, desired_state, from_file, or from_string
desired_state (string, optional): The desired_state for this object, defaults to 'launched' if not specified
If you do not specify data, You may specify one of the following args to initialize the object:
options (list, optional): A list of options to initialize the object with.
from_file (str, optional): Initialize this object from the unit file on disk at this path
from_string (str, optional): Initialize this object from the unit file in this string
If none are specified, an empty unit will be created
Raises:
IOError: from_file was specified and it does not exist
ValueError: Conflicting options, or The unit contents specified in from_string or from_file is not valid
"""
# make sure if they specify data, then they didn't specify anything else
if data and (desired_state or options or from_file or from_string):
raise ValueError('If you specify data you can not specify desired_state,'
'options, from_file, or from_string')
# count how many of options, from_file, from_string we have
given = 0
for thing in [options, from_file, from_string]:
if thing:
given += 1
# we should only have one, if we have more, yell at them
if given > 1:
raise ValueError('You must specify only one of options, from_file, from_string')
# ensure we have a minimum structure if we aren't passed one
if data is None:
# we set this here, instead as a default value to the arg
# as we want to be able to check it vs data above, it should be None in that case
if desired_state is None:
desired_state = 'launched'
if options is None:
options = []
# Minimum structure required by fleet
data = {
'desiredState': desired_state,
'options': options
}
# Call the parent class to configure us
super(Unit, self).__init__(client=client, data=data)
# If they asked us to load from a file, attemp to slurp it up
if from_file:
with open(from_file, 'r') as fh:
self._set_options_from_file(fh)
# If they asked us to load from a string, lie to the loader with StringIO
if from_string:
self._set_options_from_file(StringIO(from_string))
def __repr__(self):
return '<{0}: {1}>'.format(
self.__class__.__name__,
self.as_dict()
)
def __str__(self):
"""Generate a Unit file representation of this object"""
# build our output here
output = []
# get a ist of sections
sections = set([x['section'] for x in self._data['options']])
for section in sections:
# for each section, add it to our output
output.append(u'[{0}]'.format(section))
# iterate through the list of options, adding all items to this section
for option in self._data['options']:
if option['section'] == section:
output.append(u'{0}={1}'.format(option['name'], option['value']))
# join and return the output
return u"\n".join(output)
def _set_options_from_file(self, file_handle):
"""Parses a unit file and updates self._data['options']
Args:
file_handle (file): a file-like object (supporting read()) containing a unit
Returns:
True: The file was successfuly parsed and options were updated
Raises:
IOError: from_file was specified and it does not exist
ValueError: The unit contents specified in from_string or from_file is not valid
"""
# TODO: Find a library to handle this unit file parsing
# Can't use configparser, it doesn't handle multiple entries for the same key in the same section
# This is terribly naive
# build our output here
options = []
# keep track of line numbers to report when parsing problems happen
line_number = 0
# the section we are currently in
section = None
for line in file_handle.read().splitlines():
line_number += 1
# clear any extra white space
orig_line = line
line = line.strip()
# ignore comments, and blank lines
if not line or line.startswith('#'):
continue
# is this a section header? If so, update our variable and continue
# Section headers look like: [Section]
if line.startswith('[') and line.endswith(']'):
section = line.strip('[]')
continue
# We encountered a non blank line outside of a section, this is a problem
if not section:
raise ValueError(
'Unable to parse unit file; '
'Unexpected line outside of a section: {0} (line: {1}'.format(
line,
line_number
))
# Attempt to parse a line inside a section
# Lines should look like: name=value \
# continuation
continuation = False
try:
# if the previous value ends with \ then we are a continuation
# so remove the \, and set the flag so we'll append to this below
if options[-1]['value'].endswith('\\'):
options[-1]['value'] = options[-1]['value'][:-1]
continuation = True
except IndexError:
pass
try:
# if we are a continuation, then just append our value to the previous line
if continuation:
options[-1]['value'] += orig_line
continue
# else we are a normal line, so spit and get our name / value
name, value = line.split('=', 1)
options.append({
'section': section,
'name': name,
'value': value
})
except ValueError:
raise ValueError(
'Unable to parse unit file; '
'Malformed line in section {0}: {1} (line: {2})'.format(
section,
line,
line_number
))
# update our internal structure
self._data['options'] = options
return True
def _is_live(self):
"""Checks to see if this unit came from fleet, or was created locally
Only units with a .name property (set by the server), and _client property are considered 'live'
Returns:
True: The object is live
False: The object is not
"""
if 'name' in self._data and self._client:
return True
return False
def add_option(self, section, name, value):
"""Add an option to a section of the unit file
Args:
section (str): The name of the section, If it doesn't exist it will be created
name (str): The name of the option to add
value (str): The value of the option
Returns:
True: The item was added
"""
# Don't allow updating units we loaded from fleet, it's not supported
if self._is_live():
raise RuntimeError('Submitted units cannot update their options')
option = {
'section': section,
'name': name,
'value': value
}
self._data['options'].append(option)
return True
def remove_option(self, section, name, value=None):
"""Remove an option from a unit
Args:
section (str): The section to remove from.
name (str): The item to remove.
value (str, optional): If specified, only the option matching this value will be removed
If not specified, all options with ``name`` in ``section`` will be removed
Returns:
True: At least one item was removed
False: The item requested to remove was not found
"""
# Don't allow updating units we loaded from fleet, it's not supported
if self._is_live():
raise RuntimeError('Submitted units cannot update their options')
removed = 0
# iterate through a copy of the options
for option in list(self._data['options']):
# if it's in our section
if option['section'] == section:
# and it matches our name
if option['name'] == name:
# and they didn't give us a value, or it macthes
if value is None or option['value'] == value:
# nuke it from the source
self._data['options'].remove(option)
removed += 1
if removed > 0:
return True
return False
def set_desired_state(self, state):
"""Update the desired state of a unit.
Args:
state (str): The desired state for the unit, must be one of ``_STATES``
Returns:
str: The updated state
Raises:
fleet.v1.errors.APIError: Fleet returned a response code >= 400
ValueError: An invalid value for ``state`` was provided
"""
if state not in self._STATES:
raise ValueError(
'state must be one of: {0}'.format(
self._STATES
))
# update our internal structure
self._data['desiredState'] = state
# if we have a name, then we came from the server
# and we have a handle to an active client
# Then update our selves on the server
if self._is_live():
self._update('_data', self._client.set_unit_desired_state(self.name, self.desiredState))
# Return the state
return self._data['desiredState']
|
cnelson/python-fleet | fleet/v1/objects/unit.py | Unit.set_desired_state | python | def set_desired_state(self, state):
if state not in self._STATES:
raise ValueError(
'state must be one of: {0}'.format(
self._STATES
))
# update our internal structure
self._data['desiredState'] = state
# if we have a name, then we came from the server
# and we have a handle to an active client
# Then update our selves on the server
if self._is_live():
self._update('_data', self._client.set_unit_desired_state(self.name, self.desiredState))
# Return the state
return self._data['desiredState'] | Update the desired state of a unit.
Args:
state (str): The desired state for the unit, must be one of ``_STATES``
Returns:
str: The updated state
Raises:
fleet.v1.errors.APIError: Fleet returned a response code >= 400
ValueError: An invalid value for ``state`` was provided | train | https://github.com/cnelson/python-fleet/blob/a11dcd8bb3986d1d8f0af90d2da7399c9cc54b4d/fleet/v1/objects/unit.py#L321-L350 | [
"def _is_live(self):\n \"\"\"Checks to see if this unit came from fleet, or was created locally\n\n Only units with a .name property (set by the server), and _client property are considered 'live'\n\n Returns:\n True: The object is live\n False: The object is not\n\n \"\"\"\n if 'name' in self._data and self._client:\n return True\n\n return False\n"
] | class Unit(FleetObject):
"""This object represents a Unit in Fleet
Create and modify Unit entities to communicate to fleet the desired state of the cluster.
This simply declares what should be happening; the backend system still has to react to the changes in
this desired state. The actual state of the system is communicated with UnitState entities.
Attributes (all are readonly):
Always available:
options (update with add_option, remove_option): list of UnitOption entities
desiredState: (update with set_desired_state): state the user wishes the Unit to be in
("inactive", "loaded", or "launched")
Available once units are submitted to fleet:
name: unique identifier of entity
currentState: state the Unit is currently in (same possible values as desiredState)
machineID: ID of machine to which the Unit is scheduled
A UnitOption represents a single option in a systemd unit file.
section: name of section that contains the option (e.g. "Unit", "Service", "Socket")
name: name of option (e.g. "BindsTo", "After", "ExecStart")
value: value of option (e.g. "/usr/bin/docker run busybox /bin/sleep 1000")
"""
_STATES = ['inactive', 'loaded', 'launched']
def __init__(self, client=None, data=None, desired_state=None, options=None, from_file=None, from_string=None):
"""Create a new unit
Args:
client (fleet.v1.Client, optional): The fleet client that retrieved this object
data (dict, optional): Initialize this object with this data. If this is used you must not
specify options, desired_state, from_file, or from_string
desired_state (string, optional): The desired_state for this object, defaults to 'launched' if not specified
If you do not specify data, You may specify one of the following args to initialize the object:
options (list, optional): A list of options to initialize the object with.
from_file (str, optional): Initialize this object from the unit file on disk at this path
from_string (str, optional): Initialize this object from the unit file in this string
If none are specified, an empty unit will be created
Raises:
IOError: from_file was specified and it does not exist
ValueError: Conflicting options, or The unit contents specified in from_string or from_file is not valid
"""
# make sure if they specify data, then they didn't specify anything else
if data and (desired_state or options or from_file or from_string):
raise ValueError('If you specify data you can not specify desired_state,'
'options, from_file, or from_string')
# count how many of options, from_file, from_string we have
given = 0
for thing in [options, from_file, from_string]:
if thing:
given += 1
# we should only have one, if we have more, yell at them
if given > 1:
raise ValueError('You must specify only one of options, from_file, from_string')
# ensure we have a minimum structure if we aren't passed one
if data is None:
# we set this here, instead as a default value to the arg
# as we want to be able to check it vs data above, it should be None in that case
if desired_state is None:
desired_state = 'launched'
if options is None:
options = []
# Minimum structure required by fleet
data = {
'desiredState': desired_state,
'options': options
}
# Call the parent class to configure us
super(Unit, self).__init__(client=client, data=data)
# If they asked us to load from a file, attemp to slurp it up
if from_file:
with open(from_file, 'r') as fh:
self._set_options_from_file(fh)
# If they asked us to load from a string, lie to the loader with StringIO
if from_string:
self._set_options_from_file(StringIO(from_string))
def __repr__(self):
return '<{0}: {1}>'.format(
self.__class__.__name__,
self.as_dict()
)
def __str__(self):
"""Generate a Unit file representation of this object"""
# build our output here
output = []
# get a ist of sections
sections = set([x['section'] for x in self._data['options']])
for section in sections:
# for each section, add it to our output
output.append(u'[{0}]'.format(section))
# iterate through the list of options, adding all items to this section
for option in self._data['options']:
if option['section'] == section:
output.append(u'{0}={1}'.format(option['name'], option['value']))
# join and return the output
return u"\n".join(output)
def _set_options_from_file(self, file_handle):
"""Parses a unit file and updates self._data['options']
Args:
file_handle (file): a file-like object (supporting read()) containing a unit
Returns:
True: The file was successfuly parsed and options were updated
Raises:
IOError: from_file was specified and it does not exist
ValueError: The unit contents specified in from_string or from_file is not valid
"""
# TODO: Find a library to handle this unit file parsing
# Can't use configparser, it doesn't handle multiple entries for the same key in the same section
# This is terribly naive
# build our output here
options = []
# keep track of line numbers to report when parsing problems happen
line_number = 0
# the section we are currently in
section = None
for line in file_handle.read().splitlines():
line_number += 1
# clear any extra white space
orig_line = line
line = line.strip()
# ignore comments, and blank lines
if not line or line.startswith('#'):
continue
# is this a section header? If so, update our variable and continue
# Section headers look like: [Section]
if line.startswith('[') and line.endswith(']'):
section = line.strip('[]')
continue
# We encountered a non blank line outside of a section, this is a problem
if not section:
raise ValueError(
'Unable to parse unit file; '
'Unexpected line outside of a section: {0} (line: {1}'.format(
line,
line_number
))
# Attempt to parse a line inside a section
# Lines should look like: name=value \
# continuation
continuation = False
try:
# if the previous value ends with \ then we are a continuation
# so remove the \, and set the flag so we'll append to this below
if options[-1]['value'].endswith('\\'):
options[-1]['value'] = options[-1]['value'][:-1]
continuation = True
except IndexError:
pass
try:
# if we are a continuation, then just append our value to the previous line
if continuation:
options[-1]['value'] += orig_line
continue
# else we are a normal line, so spit and get our name / value
name, value = line.split('=', 1)
options.append({
'section': section,
'name': name,
'value': value
})
except ValueError:
raise ValueError(
'Unable to parse unit file; '
'Malformed line in section {0}: {1} (line: {2})'.format(
section,
line,
line_number
))
# update our internal structure
self._data['options'] = options
return True
def _is_live(self):
"""Checks to see if this unit came from fleet, or was created locally
Only units with a .name property (set by the server), and _client property are considered 'live'
Returns:
True: The object is live
False: The object is not
"""
if 'name' in self._data and self._client:
return True
return False
def add_option(self, section, name, value):
"""Add an option to a section of the unit file
Args:
section (str): The name of the section, If it doesn't exist it will be created
name (str): The name of the option to add
value (str): The value of the option
Returns:
True: The item was added
"""
# Don't allow updating units we loaded from fleet, it's not supported
if self._is_live():
raise RuntimeError('Submitted units cannot update their options')
option = {
'section': section,
'name': name,
'value': value
}
self._data['options'].append(option)
return True
def remove_option(self, section, name, value=None):
"""Remove an option from a unit
Args:
section (str): The section to remove from.
name (str): The item to remove.
value (str, optional): If specified, only the option matching this value will be removed
If not specified, all options with ``name`` in ``section`` will be removed
Returns:
True: At least one item was removed
False: The item requested to remove was not found
"""
# Don't allow updating units we loaded from fleet, it's not supported
if self._is_live():
raise RuntimeError('Submitted units cannot update their options')
removed = 0
# iterate through a copy of the options
for option in list(self._data['options']):
# if it's in our section
if option['section'] == section:
# and it matches our name
if option['name'] == name:
# and they didn't give us a value, or it macthes
if value is None or option['value'] == value:
# nuke it from the source
self._data['options'].remove(option)
removed += 1
if removed > 0:
return True
return False
def destroy(self):
"""Remove a unit from the fleet cluster
Returns:
True: The unit was removed
Raises:
fleet.v1.errors.APIError: Fleet returned a response code >= 400
"""
# if this unit didn't come from fleet, we can't destroy it
if not self._is_live():
raise RuntimeError('A unit must be submitted to fleet before it can destroyed.')
return self._client.destroy_unit(self.name)
|
cnelson/python-fleet | fleet/v1/client.py | SSHTunnel.forward_tcp | python | def forward_tcp(self, host, port):
return self.transport.open_channel(
'direct-tcpip',
(host, port),
self.transport.getpeername()
) | Open a connection to host:port via an ssh tunnel.
Args:
host (str): The host to connect to.
port (int): The port to connect to.
Returns:
A socket-like object that is connected to the provided host:port. | train | https://github.com/cnelson/python-fleet/blob/a11dcd8bb3986d1d8f0af90d2da7399c9cc54b4d/fleet/v1/client.py#L97-L113 | null | class SSHTunnel(object):
"""Use paramiko to setup local "ssh -L" tunnels for Client to use"""
def __init__(
self,
host,
username=None,
port=22,
timeout=10,
known_hosts_file=None,
strict_host_key_checking=True
):
"""Connect to the SSH server, and authenticate
Args:
host (str or paramiko.transport.Transport): The hostname to connect to or an already connected Transport.
username (str): The username to use when authenticating.
port (int): The port to connect to, defaults to 22.
timeout (int): The timeout to wait for a connection in seconds, defaults to 10.
known_hosts_file (str): A path to a known host file, ignored if strict_host_key_checking is False.
strict_host_key_checking (bool): Verify host keys presented by remote machines before
initiating SSH connections, defaults to True.
Raises:
ValueError: strict_host_key_checking was true, but known_hosts_file didn't exist.
socket.gaierror: Unable to resolve host
socket.error: Unable to connect to host:port
paramiko.ssh_exception.SSHException: Error authenticating during SSH connection.
"""
self.client = None
self.transport = None
# if they passed us a transport, then we don't need to make our own
if isinstance(host, paramiko.transport.Transport):
self.transport = host
else:
# assume they passed us a hostname, and we connect to it
self.client = paramiko.SSHClient()
# if we are strict, then we have to have a host file
if strict_host_key_checking:
try:
self.client.load_system_host_keys(os.path.expanduser(known_hosts_file))
except IOError:
raise ValueError(
'Strict Host Key Checking is enabled, but hosts file ({0}) '
'does not exist or is unreadable.'.format(known_hosts_file)
)
else:
# don't load the host file, and set to AutoAdd missing keys
self.client.set_missing_host_key_policy(paramiko.AutoAddPolicy())
# Connect to the host, with the provided params, let exceptions bubble up
self.client.connect(
host,
port=port,
username=username,
banner_timeout=timeout,
)
# Stash our transport
self.transport = self.client.get_transport()
def forward_unix(self, path):
"""Open a connection to a unix socket via an ssh tunnel.
Requires the server to be running OpenSSH >=6.7.
Args:
path (str): A path to a unix domain socket.
Returns:
A socket-like object that is connected to the provided path.
Raises:
RuntimeError: All the time because of what it says on the tin.
"""
raise RuntimeError(
'Paramiko does not yet support tunneling unix domain sockets. '
'Help is needed to add this functionality! '
'https://github.com/paramiko/paramiko/issues/544'
)
|
cnelson/python-fleet | fleet/v1/client.py | Client._split_hostport | python | def _split_hostport(self, hostport, default_port=None):
try:
(host, port) = hostport.split(':', 1)
except ValueError: # no colon in the string so make our own port
host = hostport
if default_port is None:
raise ValueError('No port found in hostport, and default_port not provided.')
port = default_port
try:
port = int(port)
if port < 1 or port > 65535:
raise ValueError()
except ValueError:
raise ValueError("{0} is not a valid TCP port".format(port))
return (host, port) | Split a string in the format of '<host>:<port>' into it's component parts
default_port will be used if a port is not included in the string
Args:
str ('<host>' or '<host>:<port>'): A string to split into it's parts
Returns:
two item tuple: (host, port)
Raises:
ValueError: The string was in an invalid element | train | https://github.com/cnelson/python-fleet/blob/a11dcd8bb3986d1d8f0af90d2da7399c9cc54b4d/fleet/v1/client.py#L296-L328 | null | class Client(object):
"""A python wrapper for the fleet v1 API
The fleet v1 API is documented here: https://github.com/coreos/fleet/blob/master/Documentation/api-v1.md
"""
_API = 'fleet'
_VERSION = 'v1'
_STATES = ['inactive', 'loaded', 'launched']
def __init__(
self,
endpoint,
http=None,
ssh_tunnel=None,
ssh_username='core',
ssh_timeout=10,
ssh_known_hosts_file='~/.fleetctl/known_hosts',
ssh_strict_host_key_checking=True,
ssh_raw_transport=None
):
"""Connect to the fleet API and generate a client based on it's discovery document.
Args:
endpoint (str): A URL where the fleet API can be reached. Supported schemes are:
http: A HTTP connection over a TCP socket.
Example: http://127.0.0.1:49153
http+unix: A HTTP connection over a unix domain socket. You must escape the path (/ = %2F).
Example: http+unix://%2Fvar%2Frun%2Ffleet.sock
http (httplib2.Http): An instance of httplib2.Http (or something that acts like it) that HTTP requests will
be made through. You do not need to pass this unless you need to configure specific options for your
http client, or want to pass in a mock for testing.
ssh_tunnel (str '<host>[:<port>]'): Establish an SSH tunnel through the provided address for communication
with fleet. Defaults to None. If specified, the following other options adjust it's behaivor:
ssh_username (str): Username to use when connecting to SSH, defaults to 'core'.
ssh_timeout (float): Amount of time in seconds to allow for SSH connection initialization
before failing, defaults to 10.
ssh_known_hosts_file (str): File used to store remote machine fingerprints,
defaults to '~/.fleetctl/known_hosts'. Ignored if `ssh_strict_host_key_checking` is False
ssh_strict_host_key_checking (bool): Verify host keys presented by remote machines before
initiating SSH connections, defaults to True.
ssh_raw_transport (paramiko.transport.Transport): An active Transport on which open_channel() will be
called to establish connections.
See Advanced SSH Tunneling in docs/client.md for more information.
Raises:
ValueError: The endpoint provided was not accessible or your ssh configuration is incorrect
"""
# stash this for later
self._endpoint = endpoint.strip('/')
self._ssh_client = None
# we overload the http when our proxy enabled versin if they request ssh tunneling
# so we need to make sure they didn't give us both
if (ssh_tunnel or ssh_raw_transport) and http:
raise ValueError('You cannot specify your own http client, and request ssh tunneling.')
# only one way to connect, not both
if ssh_tunnel and ssh_raw_transport:
raise ValueError('If ssh_tunnel is specified, ssh_raw_transport must be None')
# see if we need to setup an ssh tunnel
self._ssh_tunnel = None
# if they handed us a transport, then we either bail or are good to go
if ssh_raw_transport:
if not isinstance(ssh_raw_transport, paramiko.transport.Transport):
raise ValueError('ssh_raw_transport must be an active instance of paramiko.transport.Transport.')
self._ssh_tunnel = SSHTunnel(host=ssh_raw_transport)
# otherwise we are connecting ourselves
elif ssh_tunnel:
(ssh_host, ssh_port) = self._split_hostport(ssh_tunnel, default_port=22)
try:
self._ssh_tunnel = SSHTunnel(
host=ssh_host,
port=ssh_port,
username=ssh_username,
timeout=ssh_timeout,
known_hosts_file=ssh_known_hosts_file,
strict_host_key_checking=ssh_strict_host_key_checking
)
except socket.gaierror as exc:
raise ValueError('{0} could not be resolved.'.format(ssh_host))
except socket.error as exc:
raise ValueError('Unable to connect to {0}:{1}: {2}'.format(
ssh_host,
ssh_port,
exc
))
except paramiko.ssh_exception.SSHException as exc:
raise ValueError('Unable to connect via ssh: {0}: {1}'.format(
exc.__class__.__name__,
exc
))
# did we get an ssh connection up?
if self._ssh_tunnel:
# inject the SSH tunnel socketed into httplib via the proxy_info interface
self._http = httplib2.Http(proxy_info=self._get_proxy_info)
# preface our scheme with 'ssh+'; httplib2's SCHEME_TO_CONNECTION
# will invoke our custom connection objects and route the HTTP
# call across the SSH connection established or passed in above
self._endpoint = 'ssh+' + self._endpoint
else:
self._http = http
# if we've made it this far, we are ready to try to talk to fleet
# possibly through a proxy...
# generate a client binding using the google-api-python client.
# See https://developers.google.com/api-client-library/python/start/get_started
# For more infomation on how to use the generated client binding.
try:
discovery_url = self._endpoint + '/{api}/{apiVersion}/discovery'
self._service = build(
self._API,
self._VERSION,
cache_discovery=False,
discoveryServiceUrl=discovery_url,
http=self._http
)
except socket.error as exc: # pragma: no cover
raise ValueError('Unable to connect to endpoint {0}: {1}'.format(
self._endpoint,
exc
))
except googleapiclient.errors.UnknownApiNameOrVersion as exc:
raise ValueError(
'Connected to endpoint {0} but it is not a fleet v1 API endpoint. '
'This usually means a GET request to {0}/{1}/{2}/discovery failed.'.format(
self._endpoint,
self._API,
self._VERSION
))
def _endpoint_to_target(self, endpoint):
"""Convert a URL into a host / port, or into a path to a unix domain socket
Args:
endpoint (str): A URL parsable by urlparse
Returns:
3 item tuple: (host, port, path).
host and port will None, and path will be not None if a a unix domain socket URL is passed
path will be None if a normal TCP based URL is passed
"""
parsed = urlparse.urlparse(endpoint)
scheme = parsed[0]
hostport = parsed[1]
if 'unix' in scheme:
return (None, None, unquote(hostport))
if scheme == 'https':
target_port = 443
else:
target_port = 80
(target_host, target_port) = self._split_hostport(hostport, default_port=target_port)
return (target_host, target_port, None)
def _get_proxy_info(self, _=None):
"""Generate a ProxyInfo class from a connected SSH transport
Args:
_ (None): Ignored. This is just here as the ProxyInfo spec requires it.
Returns:
SSHTunnelProxyInfo: A ProxyInfo with an active socket tunneled through SSH
"""
# parse the fleet endpoint url, to establish a tunnel to that host
(target_host, target_port, target_path) = self._endpoint_to_target(self._endpoint)
# implement the proxy_info interface from httplib which requires
# that we accept a scheme, and return a ProxyInfo object
# we do :P
# This is called once per request, so we keep this here
# so that we can keep one ssh connection open, and allocate
# new channels as needed per-request
sock = None
if target_path:
sock = self._ssh_tunnel.forward_unix(path=target_path)
else:
sock = self._ssh_tunnel.forward_tcp(target_host, port=target_port)
# Return a ProxyInfo class with this socket
return SSHTunnelProxyInfo(sock=sock)
def _single_request(self, method, *args, **kwargs):
"""Make a single request to the fleet API endpoint
Args:
method (str): A dot delimited string indicating the method to call. Example: 'Machines.List'
*args: Passed directly to the method being called.
**kwargs: Passed directly to the method being called.
Returns:
dict: The response from the method called.
Raises:
fleet.v1.errors.APIError: Fleet returned a response code >= 400
"""
# The auto generated client binding require instantiating each object you want to call a method on
# For example to make a request to /machines for the list of machines you would do:
# self._service.Machines().List(**kwargs)
# This code iterates through the tokens in `method` and instantiates each object
# Passing the `*args` and `**kwargs` to the final method listed
# Start here
_method = self._service
# iterate over each token in the requested method
for item in method.split('.'):
# if it's the end of the line, pass our argument
if method.endswith(item):
_method = getattr(_method, item)(*args, **kwargs)
else:
# otherwise, just create an instance and move on
_method = getattr(_method, item)()
# Discovered endpoints look like r'$ENDPOINT/path/to/method' which isn't a valid URI
# Per the fleet API documentation:
# "Note that this discovery document intentionally ships with an unusable rootUrl;
# clients must initialize this as appropriate."
# So we follow the documentation, and replace the token with our actual endpoint
_method.uri = _method.uri.replace('$ENDPOINT', self._endpoint)
# Execute the method and return it's output directly
try:
return _method.execute(http=self._http)
except googleapiclient.errors.HttpError as exc:
response = json.loads(exc.content.decode('utf-8'))['error']
raise APIError(code=response['code'], message=response['message'], http_error=exc)
def _request(self, method, *args, **kwargs):
"""Make a request with automatic pagination handling
Args:
method (str): A dot delimited string indicating the method to call. Example: 'Machines.List'
*args: Passed directly to the method being called.
**kwargs: Passed directly to the method being called.
Note: This method will inject the 'nextPageToken' key into `**kwargs` as needed to handle
pagination overwriting any value specified by the caller. If you wish to handle pagination
manually use the `_single_request` method
Yields:
dict: The next page of responses from the method called.
Raises:
fleet.v1.errors.APIError: Fleet returned a response code >= 400
"""
# This is set to False and not None so that the while loop below will execute at least once
next_page_token = False
while next_page_token is not None:
# If bool(next_page_token), then include it in the request
# We do this so we don't pass it in the initial request as we set it to False above
if next_page_token:
kwargs['nextPageToken'] = next_page_token
# Make the request
response = self._single_request(method, *args, **kwargs)
# If there is a token for another page in the response, capture it for the next loop iteration
# If not, we set it to None so that the loop will terminate
next_page_token = response.get('nextPageToken', None)
# Return the current response
yield response
def create_unit(self, name, unit):
"""Create a new Unit in the cluster
Create and modify Unit entities to communicate to fleet the desired state of the cluster.
This simply declares what should be happening; the backend system still has to react to
the changes in this desired state. The actual state of the system is communicated with
UnitState entities.
Args:
name (str): The name of the unit to create
unit (Unit): The unit to submit to fleet
Returns:
Unit: The unit that was created
Raises:
fleet.v1.errors.APIError: Fleet returned a response code >= 400
"""
self._single_request('Units.Set', unitName=name, body={
'desiredState': unit.desiredState,
'options': unit.options
})
return self.get_unit(name)
def set_unit_desired_state(self, unit, desired_state):
"""Update the desired state of a unit running in the cluster
Args:
unit (str, Unit): The Unit, or name of the unit to update
desired_state: State the user wishes the Unit to be in
("inactive", "loaded", or "launched")
Returns:
Unit: The unit that was updated
Raises:
fleet.v1.errors.APIError: Fleet returned a response code >= 400
ValueError: An invalid value was provided for ``desired_state``
"""
if desired_state not in self._STATES:
raise ValueError('state must be one of: {0}'.format(
self._STATES
))
# if we are given an object, grab it's name property
# otherwise, convert to unicode
if isinstance(unit, Unit):
unit = unit.name
else:
unit = str(unit)
self._single_request('Units.Set', unitName=unit, body={
'desiredState': desired_state
})
return self.get_unit(unit)
def destroy_unit(self, unit):
"""Delete a unit from the cluster
Args:
unit (str, Unit): The Unit, or name of the unit to delete
Returns:
True: The unit was deleted
Raises:
fleet.v1.errors.APIError: Fleet returned a response code >= 400
"""
# if we are given an object, grab it's name property
# otherwise, convert to unicode
if isinstance(unit, Unit):
unit = unit.name
else:
unit = str(unit)
self._single_request('Units.Delete', unitName=unit)
return True
def list_units(self):
"""Return the current list of the Units in the fleet cluster
Yields:
Unit: The next Unit in the cluster
Raises:
fleet.v1.errors.APIError: Fleet returned a response code >= 400
"""
for page in self._request('Units.List'):
for unit in page.get('units', []):
yield Unit(client=self, data=unit)
def get_unit(self, name):
"""Retreive a specifi unit from the fleet cluster by name
Args:
name (str): If specified, only this unit name is returned
Returns:
Unit: The unit identified by ``name`` in the fleet cluster
Raises:
fleet.v1.errors.APIError: Fleet returned a response code >= 400
"""
return Unit(client=self, data=self._single_request('Units.Get', unitName=name))
def list_unit_states(self, machine_id=None, unit_name=None):
"""Return the current UnitState for the fleet cluster
Args:
machine_id (str): filter all UnitState objects to those
originating from a specific machine
unit_name (str): filter all UnitState objects to those related
to a specific unit
Yields:
UnitState: The next UnitState in the cluster
Raises:
fleet.v1.errors.APIError: Fleet returned a response code >= 400
"""
for page in self._request('UnitState.List', machineID=machine_id, unitName=unit_name):
for state in page.get('states', []):
yield UnitState(data=state)
def list_machines(self):
"""Retrieve a list of machines in the fleet cluster
Yields:
Machine: The next machine in the cluster
Raises:
fleet.v1.errors.APIError: Fleet returned a response code >= 400
"""
# loop through each page of results
for page in self._request('Machines.List'):
# return each machine in the current page
for machine in page.get('machines', []):
yield Machine(data=machine)
|
cnelson/python-fleet | fleet/v1/client.py | Client._endpoint_to_target | python | def _endpoint_to_target(self, endpoint):
parsed = urlparse.urlparse(endpoint)
scheme = parsed[0]
hostport = parsed[1]
if 'unix' in scheme:
return (None, None, unquote(hostport))
if scheme == 'https':
target_port = 443
else:
target_port = 80
(target_host, target_port) = self._split_hostport(hostport, default_port=target_port)
return (target_host, target_port, None) | Convert a URL into a host / port, or into a path to a unix domain socket
Args:
endpoint (str): A URL parsable by urlparse
Returns:
3 item tuple: (host, port, path).
host and port will None, and path will be not None if a a unix domain socket URL is passed
path will be None if a normal TCP based URL is passed | train | https://github.com/cnelson/python-fleet/blob/a11dcd8bb3986d1d8f0af90d2da7399c9cc54b4d/fleet/v1/client.py#L330-L355 | null | class Client(object):
"""A python wrapper for the fleet v1 API
The fleet v1 API is documented here: https://github.com/coreos/fleet/blob/master/Documentation/api-v1.md
"""
_API = 'fleet'
_VERSION = 'v1'
_STATES = ['inactive', 'loaded', 'launched']
def __init__(
self,
endpoint,
http=None,
ssh_tunnel=None,
ssh_username='core',
ssh_timeout=10,
ssh_known_hosts_file='~/.fleetctl/known_hosts',
ssh_strict_host_key_checking=True,
ssh_raw_transport=None
):
"""Connect to the fleet API and generate a client based on it's discovery document.
Args:
endpoint (str): A URL where the fleet API can be reached. Supported schemes are:
http: A HTTP connection over a TCP socket.
Example: http://127.0.0.1:49153
http+unix: A HTTP connection over a unix domain socket. You must escape the path (/ = %2F).
Example: http+unix://%2Fvar%2Frun%2Ffleet.sock
http (httplib2.Http): An instance of httplib2.Http (or something that acts like it) that HTTP requests will
be made through. You do not need to pass this unless you need to configure specific options for your
http client, or want to pass in a mock for testing.
ssh_tunnel (str '<host>[:<port>]'): Establish an SSH tunnel through the provided address for communication
with fleet. Defaults to None. If specified, the following other options adjust it's behaivor:
ssh_username (str): Username to use when connecting to SSH, defaults to 'core'.
ssh_timeout (float): Amount of time in seconds to allow for SSH connection initialization
before failing, defaults to 10.
ssh_known_hosts_file (str): File used to store remote machine fingerprints,
defaults to '~/.fleetctl/known_hosts'. Ignored if `ssh_strict_host_key_checking` is False
ssh_strict_host_key_checking (bool): Verify host keys presented by remote machines before
initiating SSH connections, defaults to True.
ssh_raw_transport (paramiko.transport.Transport): An active Transport on which open_channel() will be
called to establish connections.
See Advanced SSH Tunneling in docs/client.md for more information.
Raises:
ValueError: The endpoint provided was not accessible or your ssh configuration is incorrect
"""
# stash this for later
self._endpoint = endpoint.strip('/')
self._ssh_client = None
# we overload the http when our proxy enabled versin if they request ssh tunneling
# so we need to make sure they didn't give us both
if (ssh_tunnel or ssh_raw_transport) and http:
raise ValueError('You cannot specify your own http client, and request ssh tunneling.')
# only one way to connect, not both
if ssh_tunnel and ssh_raw_transport:
raise ValueError('If ssh_tunnel is specified, ssh_raw_transport must be None')
# see if we need to setup an ssh tunnel
self._ssh_tunnel = None
# if they handed us a transport, then we either bail or are good to go
if ssh_raw_transport:
if not isinstance(ssh_raw_transport, paramiko.transport.Transport):
raise ValueError('ssh_raw_transport must be an active instance of paramiko.transport.Transport.')
self._ssh_tunnel = SSHTunnel(host=ssh_raw_transport)
# otherwise we are connecting ourselves
elif ssh_tunnel:
(ssh_host, ssh_port) = self._split_hostport(ssh_tunnel, default_port=22)
try:
self._ssh_tunnel = SSHTunnel(
host=ssh_host,
port=ssh_port,
username=ssh_username,
timeout=ssh_timeout,
known_hosts_file=ssh_known_hosts_file,
strict_host_key_checking=ssh_strict_host_key_checking
)
except socket.gaierror as exc:
raise ValueError('{0} could not be resolved.'.format(ssh_host))
except socket.error as exc:
raise ValueError('Unable to connect to {0}:{1}: {2}'.format(
ssh_host,
ssh_port,
exc
))
except paramiko.ssh_exception.SSHException as exc:
raise ValueError('Unable to connect via ssh: {0}: {1}'.format(
exc.__class__.__name__,
exc
))
# did we get an ssh connection up?
if self._ssh_tunnel:
# inject the SSH tunnel socketed into httplib via the proxy_info interface
self._http = httplib2.Http(proxy_info=self._get_proxy_info)
# preface our scheme with 'ssh+'; httplib2's SCHEME_TO_CONNECTION
# will invoke our custom connection objects and route the HTTP
# call across the SSH connection established or passed in above
self._endpoint = 'ssh+' + self._endpoint
else:
self._http = http
# if we've made it this far, we are ready to try to talk to fleet
# possibly through a proxy...
# generate a client binding using the google-api-python client.
# See https://developers.google.com/api-client-library/python/start/get_started
# For more infomation on how to use the generated client binding.
try:
discovery_url = self._endpoint + '/{api}/{apiVersion}/discovery'
self._service = build(
self._API,
self._VERSION,
cache_discovery=False,
discoveryServiceUrl=discovery_url,
http=self._http
)
except socket.error as exc: # pragma: no cover
raise ValueError('Unable to connect to endpoint {0}: {1}'.format(
self._endpoint,
exc
))
except googleapiclient.errors.UnknownApiNameOrVersion as exc:
raise ValueError(
'Connected to endpoint {0} but it is not a fleet v1 API endpoint. '
'This usually means a GET request to {0}/{1}/{2}/discovery failed.'.format(
self._endpoint,
self._API,
self._VERSION
))
def _split_hostport(self, hostport, default_port=None):
"""Split a string in the format of '<host>:<port>' into it's component parts
default_port will be used if a port is not included in the string
Args:
str ('<host>' or '<host>:<port>'): A string to split into it's parts
Returns:
two item tuple: (host, port)
Raises:
ValueError: The string was in an invalid element
"""
try:
(host, port) = hostport.split(':', 1)
except ValueError: # no colon in the string so make our own port
host = hostport
if default_port is None:
raise ValueError('No port found in hostport, and default_port not provided.')
port = default_port
try:
port = int(port)
if port < 1 or port > 65535:
raise ValueError()
except ValueError:
raise ValueError("{0} is not a valid TCP port".format(port))
return (host, port)
def _get_proxy_info(self, _=None):
"""Generate a ProxyInfo class from a connected SSH transport
Args:
_ (None): Ignored. This is just here as the ProxyInfo spec requires it.
Returns:
SSHTunnelProxyInfo: A ProxyInfo with an active socket tunneled through SSH
"""
# parse the fleet endpoint url, to establish a tunnel to that host
(target_host, target_port, target_path) = self._endpoint_to_target(self._endpoint)
# implement the proxy_info interface from httplib which requires
# that we accept a scheme, and return a ProxyInfo object
# we do :P
# This is called once per request, so we keep this here
# so that we can keep one ssh connection open, and allocate
# new channels as needed per-request
sock = None
if target_path:
sock = self._ssh_tunnel.forward_unix(path=target_path)
else:
sock = self._ssh_tunnel.forward_tcp(target_host, port=target_port)
# Return a ProxyInfo class with this socket
return SSHTunnelProxyInfo(sock=sock)
def _single_request(self, method, *args, **kwargs):
"""Make a single request to the fleet API endpoint
Args:
method (str): A dot delimited string indicating the method to call. Example: 'Machines.List'
*args: Passed directly to the method being called.
**kwargs: Passed directly to the method being called.
Returns:
dict: The response from the method called.
Raises:
fleet.v1.errors.APIError: Fleet returned a response code >= 400
"""
# The auto generated client binding require instantiating each object you want to call a method on
# For example to make a request to /machines for the list of machines you would do:
# self._service.Machines().List(**kwargs)
# This code iterates through the tokens in `method` and instantiates each object
# Passing the `*args` and `**kwargs` to the final method listed
# Start here
_method = self._service
# iterate over each token in the requested method
for item in method.split('.'):
# if it's the end of the line, pass our argument
if method.endswith(item):
_method = getattr(_method, item)(*args, **kwargs)
else:
# otherwise, just create an instance and move on
_method = getattr(_method, item)()
# Discovered endpoints look like r'$ENDPOINT/path/to/method' which isn't a valid URI
# Per the fleet API documentation:
# "Note that this discovery document intentionally ships with an unusable rootUrl;
# clients must initialize this as appropriate."
# So we follow the documentation, and replace the token with our actual endpoint
_method.uri = _method.uri.replace('$ENDPOINT', self._endpoint)
# Execute the method and return it's output directly
try:
return _method.execute(http=self._http)
except googleapiclient.errors.HttpError as exc:
response = json.loads(exc.content.decode('utf-8'))['error']
raise APIError(code=response['code'], message=response['message'], http_error=exc)
def _request(self, method, *args, **kwargs):
"""Make a request with automatic pagination handling
Args:
method (str): A dot delimited string indicating the method to call. Example: 'Machines.List'
*args: Passed directly to the method being called.
**kwargs: Passed directly to the method being called.
Note: This method will inject the 'nextPageToken' key into `**kwargs` as needed to handle
pagination overwriting any value specified by the caller. If you wish to handle pagination
manually use the `_single_request` method
Yields:
dict: The next page of responses from the method called.
Raises:
fleet.v1.errors.APIError: Fleet returned a response code >= 400
"""
# This is set to False and not None so that the while loop below will execute at least once
next_page_token = False
while next_page_token is not None:
# If bool(next_page_token), then include it in the request
# We do this so we don't pass it in the initial request as we set it to False above
if next_page_token:
kwargs['nextPageToken'] = next_page_token
# Make the request
response = self._single_request(method, *args, **kwargs)
# If there is a token for another page in the response, capture it for the next loop iteration
# If not, we set it to None so that the loop will terminate
next_page_token = response.get('nextPageToken', None)
# Return the current response
yield response
def create_unit(self, name, unit):
"""Create a new Unit in the cluster
Create and modify Unit entities to communicate to fleet the desired state of the cluster.
This simply declares what should be happening; the backend system still has to react to
the changes in this desired state. The actual state of the system is communicated with
UnitState entities.
Args:
name (str): The name of the unit to create
unit (Unit): The unit to submit to fleet
Returns:
Unit: The unit that was created
Raises:
fleet.v1.errors.APIError: Fleet returned a response code >= 400
"""
self._single_request('Units.Set', unitName=name, body={
'desiredState': unit.desiredState,
'options': unit.options
})
return self.get_unit(name)
def set_unit_desired_state(self, unit, desired_state):
"""Update the desired state of a unit running in the cluster
Args:
unit (str, Unit): The Unit, or name of the unit to update
desired_state: State the user wishes the Unit to be in
("inactive", "loaded", or "launched")
Returns:
Unit: The unit that was updated
Raises:
fleet.v1.errors.APIError: Fleet returned a response code >= 400
ValueError: An invalid value was provided for ``desired_state``
"""
if desired_state not in self._STATES:
raise ValueError('state must be one of: {0}'.format(
self._STATES
))
# if we are given an object, grab it's name property
# otherwise, convert to unicode
if isinstance(unit, Unit):
unit = unit.name
else:
unit = str(unit)
self._single_request('Units.Set', unitName=unit, body={
'desiredState': desired_state
})
return self.get_unit(unit)
def destroy_unit(self, unit):
"""Delete a unit from the cluster
Args:
unit (str, Unit): The Unit, or name of the unit to delete
Returns:
True: The unit was deleted
Raises:
fleet.v1.errors.APIError: Fleet returned a response code >= 400
"""
# if we are given an object, grab it's name property
# otherwise, convert to unicode
if isinstance(unit, Unit):
unit = unit.name
else:
unit = str(unit)
self._single_request('Units.Delete', unitName=unit)
return True
def list_units(self):
"""Return the current list of the Units in the fleet cluster
Yields:
Unit: The next Unit in the cluster
Raises:
fleet.v1.errors.APIError: Fleet returned a response code >= 400
"""
for page in self._request('Units.List'):
for unit in page.get('units', []):
yield Unit(client=self, data=unit)
def get_unit(self, name):
"""Retreive a specifi unit from the fleet cluster by name
Args:
name (str): If specified, only this unit name is returned
Returns:
Unit: The unit identified by ``name`` in the fleet cluster
Raises:
fleet.v1.errors.APIError: Fleet returned a response code >= 400
"""
return Unit(client=self, data=self._single_request('Units.Get', unitName=name))
def list_unit_states(self, machine_id=None, unit_name=None):
"""Return the current UnitState for the fleet cluster
Args:
machine_id (str): filter all UnitState objects to those
originating from a specific machine
unit_name (str): filter all UnitState objects to those related
to a specific unit
Yields:
UnitState: The next UnitState in the cluster
Raises:
fleet.v1.errors.APIError: Fleet returned a response code >= 400
"""
for page in self._request('UnitState.List', machineID=machine_id, unitName=unit_name):
for state in page.get('states', []):
yield UnitState(data=state)
def list_machines(self):
"""Retrieve a list of machines in the fleet cluster
Yields:
Machine: The next machine in the cluster
Raises:
fleet.v1.errors.APIError: Fleet returned a response code >= 400
"""
# loop through each page of results
for page in self._request('Machines.List'):
# return each machine in the current page
for machine in page.get('machines', []):
yield Machine(data=machine)
|
cnelson/python-fleet | fleet/v1/client.py | Client._get_proxy_info | python | def _get_proxy_info(self, _=None):
# parse the fleet endpoint url, to establish a tunnel to that host
(target_host, target_port, target_path) = self._endpoint_to_target(self._endpoint)
# implement the proxy_info interface from httplib which requires
# that we accept a scheme, and return a ProxyInfo object
# we do :P
# This is called once per request, so we keep this here
# so that we can keep one ssh connection open, and allocate
# new channels as needed per-request
sock = None
if target_path:
sock = self._ssh_tunnel.forward_unix(path=target_path)
else:
sock = self._ssh_tunnel.forward_tcp(target_host, port=target_port)
# Return a ProxyInfo class with this socket
return SSHTunnelProxyInfo(sock=sock) | Generate a ProxyInfo class from a connected SSH transport
Args:
_ (None): Ignored. This is just here as the ProxyInfo spec requires it.
Returns:
SSHTunnelProxyInfo: A ProxyInfo with an active socket tunneled through SSH | train | https://github.com/cnelson/python-fleet/blob/a11dcd8bb3986d1d8f0af90d2da7399c9cc54b4d/fleet/v1/client.py#L357-L385 | null | class Client(object):
"""A python wrapper for the fleet v1 API
The fleet v1 API is documented here: https://github.com/coreos/fleet/blob/master/Documentation/api-v1.md
"""
_API = 'fleet'
_VERSION = 'v1'
_STATES = ['inactive', 'loaded', 'launched']
def __init__(
self,
endpoint,
http=None,
ssh_tunnel=None,
ssh_username='core',
ssh_timeout=10,
ssh_known_hosts_file='~/.fleetctl/known_hosts',
ssh_strict_host_key_checking=True,
ssh_raw_transport=None
):
"""Connect to the fleet API and generate a client based on it's discovery document.
Args:
endpoint (str): A URL where the fleet API can be reached. Supported schemes are:
http: A HTTP connection over a TCP socket.
Example: http://127.0.0.1:49153
http+unix: A HTTP connection over a unix domain socket. You must escape the path (/ = %2F).
Example: http+unix://%2Fvar%2Frun%2Ffleet.sock
http (httplib2.Http): An instance of httplib2.Http (or something that acts like it) that HTTP requests will
be made through. You do not need to pass this unless you need to configure specific options for your
http client, or want to pass in a mock for testing.
ssh_tunnel (str '<host>[:<port>]'): Establish an SSH tunnel through the provided address for communication
with fleet. Defaults to None. If specified, the following other options adjust it's behaivor:
ssh_username (str): Username to use when connecting to SSH, defaults to 'core'.
ssh_timeout (float): Amount of time in seconds to allow for SSH connection initialization
before failing, defaults to 10.
ssh_known_hosts_file (str): File used to store remote machine fingerprints,
defaults to '~/.fleetctl/known_hosts'. Ignored if `ssh_strict_host_key_checking` is False
ssh_strict_host_key_checking (bool): Verify host keys presented by remote machines before
initiating SSH connections, defaults to True.
ssh_raw_transport (paramiko.transport.Transport): An active Transport on which open_channel() will be
called to establish connections.
See Advanced SSH Tunneling in docs/client.md for more information.
Raises:
ValueError: The endpoint provided was not accessible or your ssh configuration is incorrect
"""
# stash this for later
self._endpoint = endpoint.strip('/')
self._ssh_client = None
# we overload the http when our proxy enabled versin if they request ssh tunneling
# so we need to make sure they didn't give us both
if (ssh_tunnel or ssh_raw_transport) and http:
raise ValueError('You cannot specify your own http client, and request ssh tunneling.')
# only one way to connect, not both
if ssh_tunnel and ssh_raw_transport:
raise ValueError('If ssh_tunnel is specified, ssh_raw_transport must be None')
# see if we need to setup an ssh tunnel
self._ssh_tunnel = None
# if they handed us a transport, then we either bail or are good to go
if ssh_raw_transport:
if not isinstance(ssh_raw_transport, paramiko.transport.Transport):
raise ValueError('ssh_raw_transport must be an active instance of paramiko.transport.Transport.')
self._ssh_tunnel = SSHTunnel(host=ssh_raw_transport)
# otherwise we are connecting ourselves
elif ssh_tunnel:
(ssh_host, ssh_port) = self._split_hostport(ssh_tunnel, default_port=22)
try:
self._ssh_tunnel = SSHTunnel(
host=ssh_host,
port=ssh_port,
username=ssh_username,
timeout=ssh_timeout,
known_hosts_file=ssh_known_hosts_file,
strict_host_key_checking=ssh_strict_host_key_checking
)
except socket.gaierror as exc:
raise ValueError('{0} could not be resolved.'.format(ssh_host))
except socket.error as exc:
raise ValueError('Unable to connect to {0}:{1}: {2}'.format(
ssh_host,
ssh_port,
exc
))
except paramiko.ssh_exception.SSHException as exc:
raise ValueError('Unable to connect via ssh: {0}: {1}'.format(
exc.__class__.__name__,
exc
))
# did we get an ssh connection up?
if self._ssh_tunnel:
# inject the SSH tunnel socketed into httplib via the proxy_info interface
self._http = httplib2.Http(proxy_info=self._get_proxy_info)
# preface our scheme with 'ssh+'; httplib2's SCHEME_TO_CONNECTION
# will invoke our custom connection objects and route the HTTP
# call across the SSH connection established or passed in above
self._endpoint = 'ssh+' + self._endpoint
else:
self._http = http
# if we've made it this far, we are ready to try to talk to fleet
# possibly through a proxy...
# generate a client binding using the google-api-python client.
# See https://developers.google.com/api-client-library/python/start/get_started
# For more infomation on how to use the generated client binding.
try:
discovery_url = self._endpoint + '/{api}/{apiVersion}/discovery'
self._service = build(
self._API,
self._VERSION,
cache_discovery=False,
discoveryServiceUrl=discovery_url,
http=self._http
)
except socket.error as exc: # pragma: no cover
raise ValueError('Unable to connect to endpoint {0}: {1}'.format(
self._endpoint,
exc
))
except googleapiclient.errors.UnknownApiNameOrVersion as exc:
raise ValueError(
'Connected to endpoint {0} but it is not a fleet v1 API endpoint. '
'This usually means a GET request to {0}/{1}/{2}/discovery failed.'.format(
self._endpoint,
self._API,
self._VERSION
))
def _split_hostport(self, hostport, default_port=None):
"""Split a string in the format of '<host>:<port>' into it's component parts
default_port will be used if a port is not included in the string
Args:
str ('<host>' or '<host>:<port>'): A string to split into it's parts
Returns:
two item tuple: (host, port)
Raises:
ValueError: The string was in an invalid element
"""
try:
(host, port) = hostport.split(':', 1)
except ValueError: # no colon in the string so make our own port
host = hostport
if default_port is None:
raise ValueError('No port found in hostport, and default_port not provided.')
port = default_port
try:
port = int(port)
if port < 1 or port > 65535:
raise ValueError()
except ValueError:
raise ValueError("{0} is not a valid TCP port".format(port))
return (host, port)
def _endpoint_to_target(self, endpoint):
"""Convert a URL into a host / port, or into a path to a unix domain socket
Args:
endpoint (str): A URL parsable by urlparse
Returns:
3 item tuple: (host, port, path).
host and port will None, and path will be not None if a a unix domain socket URL is passed
path will be None if a normal TCP based URL is passed
"""
parsed = urlparse.urlparse(endpoint)
scheme = parsed[0]
hostport = parsed[1]
if 'unix' in scheme:
return (None, None, unquote(hostport))
if scheme == 'https':
target_port = 443
else:
target_port = 80
(target_host, target_port) = self._split_hostport(hostport, default_port=target_port)
return (target_host, target_port, None)
def _single_request(self, method, *args, **kwargs):
"""Make a single request to the fleet API endpoint
Args:
method (str): A dot delimited string indicating the method to call. Example: 'Machines.List'
*args: Passed directly to the method being called.
**kwargs: Passed directly to the method being called.
Returns:
dict: The response from the method called.
Raises:
fleet.v1.errors.APIError: Fleet returned a response code >= 400
"""
# The auto generated client binding require instantiating each object you want to call a method on
# For example to make a request to /machines for the list of machines you would do:
# self._service.Machines().List(**kwargs)
# This code iterates through the tokens in `method` and instantiates each object
# Passing the `*args` and `**kwargs` to the final method listed
# Start here
_method = self._service
# iterate over each token in the requested method
for item in method.split('.'):
# if it's the end of the line, pass our argument
if method.endswith(item):
_method = getattr(_method, item)(*args, **kwargs)
else:
# otherwise, just create an instance and move on
_method = getattr(_method, item)()
# Discovered endpoints look like r'$ENDPOINT/path/to/method' which isn't a valid URI
# Per the fleet API documentation:
# "Note that this discovery document intentionally ships with an unusable rootUrl;
# clients must initialize this as appropriate."
# So we follow the documentation, and replace the token with our actual endpoint
_method.uri = _method.uri.replace('$ENDPOINT', self._endpoint)
# Execute the method and return it's output directly
try:
return _method.execute(http=self._http)
except googleapiclient.errors.HttpError as exc:
response = json.loads(exc.content.decode('utf-8'))['error']
raise APIError(code=response['code'], message=response['message'], http_error=exc)
def _request(self, method, *args, **kwargs):
"""Make a request with automatic pagination handling
Args:
method (str): A dot delimited string indicating the method to call. Example: 'Machines.List'
*args: Passed directly to the method being called.
**kwargs: Passed directly to the method being called.
Note: This method will inject the 'nextPageToken' key into `**kwargs` as needed to handle
pagination overwriting any value specified by the caller. If you wish to handle pagination
manually use the `_single_request` method
Yields:
dict: The next page of responses from the method called.
Raises:
fleet.v1.errors.APIError: Fleet returned a response code >= 400
"""
# This is set to False and not None so that the while loop below will execute at least once
next_page_token = False
while next_page_token is not None:
# If bool(next_page_token), then include it in the request
# We do this so we don't pass it in the initial request as we set it to False above
if next_page_token:
kwargs['nextPageToken'] = next_page_token
# Make the request
response = self._single_request(method, *args, **kwargs)
# If there is a token for another page in the response, capture it for the next loop iteration
# If not, we set it to None so that the loop will terminate
next_page_token = response.get('nextPageToken', None)
# Return the current response
yield response
def create_unit(self, name, unit):
"""Create a new Unit in the cluster
Create and modify Unit entities to communicate to fleet the desired state of the cluster.
This simply declares what should be happening; the backend system still has to react to
the changes in this desired state. The actual state of the system is communicated with
UnitState entities.
Args:
name (str): The name of the unit to create
unit (Unit): The unit to submit to fleet
Returns:
Unit: The unit that was created
Raises:
fleet.v1.errors.APIError: Fleet returned a response code >= 400
"""
self._single_request('Units.Set', unitName=name, body={
'desiredState': unit.desiredState,
'options': unit.options
})
return self.get_unit(name)
def set_unit_desired_state(self, unit, desired_state):
"""Update the desired state of a unit running in the cluster
Args:
unit (str, Unit): The Unit, or name of the unit to update
desired_state: State the user wishes the Unit to be in
("inactive", "loaded", or "launched")
Returns:
Unit: The unit that was updated
Raises:
fleet.v1.errors.APIError: Fleet returned a response code >= 400
ValueError: An invalid value was provided for ``desired_state``
"""
if desired_state not in self._STATES:
raise ValueError('state must be one of: {0}'.format(
self._STATES
))
# if we are given an object, grab it's name property
# otherwise, convert to unicode
if isinstance(unit, Unit):
unit = unit.name
else:
unit = str(unit)
self._single_request('Units.Set', unitName=unit, body={
'desiredState': desired_state
})
return self.get_unit(unit)
def destroy_unit(self, unit):
"""Delete a unit from the cluster
Args:
unit (str, Unit): The Unit, or name of the unit to delete
Returns:
True: The unit was deleted
Raises:
fleet.v1.errors.APIError: Fleet returned a response code >= 400
"""
# if we are given an object, grab it's name property
# otherwise, convert to unicode
if isinstance(unit, Unit):
unit = unit.name
else:
unit = str(unit)
self._single_request('Units.Delete', unitName=unit)
return True
def list_units(self):
"""Return the current list of the Units in the fleet cluster
Yields:
Unit: The next Unit in the cluster
Raises:
fleet.v1.errors.APIError: Fleet returned a response code >= 400
"""
for page in self._request('Units.List'):
for unit in page.get('units', []):
yield Unit(client=self, data=unit)
def get_unit(self, name):
"""Retreive a specifi unit from the fleet cluster by name
Args:
name (str): If specified, only this unit name is returned
Returns:
Unit: The unit identified by ``name`` in the fleet cluster
Raises:
fleet.v1.errors.APIError: Fleet returned a response code >= 400
"""
return Unit(client=self, data=self._single_request('Units.Get', unitName=name))
def list_unit_states(self, machine_id=None, unit_name=None):
"""Return the current UnitState for the fleet cluster
Args:
machine_id (str): filter all UnitState objects to those
originating from a specific machine
unit_name (str): filter all UnitState objects to those related
to a specific unit
Yields:
UnitState: The next UnitState in the cluster
Raises:
fleet.v1.errors.APIError: Fleet returned a response code >= 400
"""
for page in self._request('UnitState.List', machineID=machine_id, unitName=unit_name):
for state in page.get('states', []):
yield UnitState(data=state)
def list_machines(self):
"""Retrieve a list of machines in the fleet cluster
Yields:
Machine: The next machine in the cluster
Raises:
fleet.v1.errors.APIError: Fleet returned a response code >= 400
"""
# loop through each page of results
for page in self._request('Machines.List'):
# return each machine in the current page
for machine in page.get('machines', []):
yield Machine(data=machine)
|
cnelson/python-fleet | fleet/v1/client.py | Client._single_request | python | def _single_request(self, method, *args, **kwargs):
# The auto generated client binding require instantiating each object you want to call a method on
# For example to make a request to /machines for the list of machines you would do:
# self._service.Machines().List(**kwargs)
# This code iterates through the tokens in `method` and instantiates each object
# Passing the `*args` and `**kwargs` to the final method listed
# Start here
_method = self._service
# iterate over each token in the requested method
for item in method.split('.'):
# if it's the end of the line, pass our argument
if method.endswith(item):
_method = getattr(_method, item)(*args, **kwargs)
else:
# otherwise, just create an instance and move on
_method = getattr(_method, item)()
# Discovered endpoints look like r'$ENDPOINT/path/to/method' which isn't a valid URI
# Per the fleet API documentation:
# "Note that this discovery document intentionally ships with an unusable rootUrl;
# clients must initialize this as appropriate."
# So we follow the documentation, and replace the token with our actual endpoint
_method.uri = _method.uri.replace('$ENDPOINT', self._endpoint)
# Execute the method and return it's output directly
try:
return _method.execute(http=self._http)
except googleapiclient.errors.HttpError as exc:
response = json.loads(exc.content.decode('utf-8'))['error']
raise APIError(code=response['code'], message=response['message'], http_error=exc) | Make a single request to the fleet API endpoint
Args:
method (str): A dot delimited string indicating the method to call. Example: 'Machines.List'
*args: Passed directly to the method being called.
**kwargs: Passed directly to the method being called.
Returns:
dict: The response from the method called.
Raises:
fleet.v1.errors.APIError: Fleet returned a response code >= 400 | train | https://github.com/cnelson/python-fleet/blob/a11dcd8bb3986d1d8f0af90d2da7399c9cc54b4d/fleet/v1/client.py#L387-L435 | null | class Client(object):
"""A python wrapper for the fleet v1 API
The fleet v1 API is documented here: https://github.com/coreos/fleet/blob/master/Documentation/api-v1.md
"""
_API = 'fleet'
_VERSION = 'v1'
_STATES = ['inactive', 'loaded', 'launched']
def __init__(
self,
endpoint,
http=None,
ssh_tunnel=None,
ssh_username='core',
ssh_timeout=10,
ssh_known_hosts_file='~/.fleetctl/known_hosts',
ssh_strict_host_key_checking=True,
ssh_raw_transport=None
):
"""Connect to the fleet API and generate a client based on it's discovery document.
Args:
endpoint (str): A URL where the fleet API can be reached. Supported schemes are:
http: A HTTP connection over a TCP socket.
Example: http://127.0.0.1:49153
http+unix: A HTTP connection over a unix domain socket. You must escape the path (/ = %2F).
Example: http+unix://%2Fvar%2Frun%2Ffleet.sock
http (httplib2.Http): An instance of httplib2.Http (or something that acts like it) that HTTP requests will
be made through. You do not need to pass this unless you need to configure specific options for your
http client, or want to pass in a mock for testing.
ssh_tunnel (str '<host>[:<port>]'): Establish an SSH tunnel through the provided address for communication
with fleet. Defaults to None. If specified, the following other options adjust it's behaivor:
ssh_username (str): Username to use when connecting to SSH, defaults to 'core'.
ssh_timeout (float): Amount of time in seconds to allow for SSH connection initialization
before failing, defaults to 10.
ssh_known_hosts_file (str): File used to store remote machine fingerprints,
defaults to '~/.fleetctl/known_hosts'. Ignored if `ssh_strict_host_key_checking` is False
ssh_strict_host_key_checking (bool): Verify host keys presented by remote machines before
initiating SSH connections, defaults to True.
ssh_raw_transport (paramiko.transport.Transport): An active Transport on which open_channel() will be
called to establish connections.
See Advanced SSH Tunneling in docs/client.md for more information.
Raises:
ValueError: The endpoint provided was not accessible or your ssh configuration is incorrect
"""
# stash this for later
self._endpoint = endpoint.strip('/')
self._ssh_client = None
# we overload the http when our proxy enabled versin if they request ssh tunneling
# so we need to make sure they didn't give us both
if (ssh_tunnel or ssh_raw_transport) and http:
raise ValueError('You cannot specify your own http client, and request ssh tunneling.')
# only one way to connect, not both
if ssh_tunnel and ssh_raw_transport:
raise ValueError('If ssh_tunnel is specified, ssh_raw_transport must be None')
# see if we need to setup an ssh tunnel
self._ssh_tunnel = None
# if they handed us a transport, then we either bail or are good to go
if ssh_raw_transport:
if not isinstance(ssh_raw_transport, paramiko.transport.Transport):
raise ValueError('ssh_raw_transport must be an active instance of paramiko.transport.Transport.')
self._ssh_tunnel = SSHTunnel(host=ssh_raw_transport)
# otherwise we are connecting ourselves
elif ssh_tunnel:
(ssh_host, ssh_port) = self._split_hostport(ssh_tunnel, default_port=22)
try:
self._ssh_tunnel = SSHTunnel(
host=ssh_host,
port=ssh_port,
username=ssh_username,
timeout=ssh_timeout,
known_hosts_file=ssh_known_hosts_file,
strict_host_key_checking=ssh_strict_host_key_checking
)
except socket.gaierror as exc:
raise ValueError('{0} could not be resolved.'.format(ssh_host))
except socket.error as exc:
raise ValueError('Unable to connect to {0}:{1}: {2}'.format(
ssh_host,
ssh_port,
exc
))
except paramiko.ssh_exception.SSHException as exc:
raise ValueError('Unable to connect via ssh: {0}: {1}'.format(
exc.__class__.__name__,
exc
))
# did we get an ssh connection up?
if self._ssh_tunnel:
# inject the SSH tunnel socketed into httplib via the proxy_info interface
self._http = httplib2.Http(proxy_info=self._get_proxy_info)
# preface our scheme with 'ssh+'; httplib2's SCHEME_TO_CONNECTION
# will invoke our custom connection objects and route the HTTP
# call across the SSH connection established or passed in above
self._endpoint = 'ssh+' + self._endpoint
else:
self._http = http
# if we've made it this far, we are ready to try to talk to fleet
# possibly through a proxy...
# generate a client binding using the google-api-python client.
# See https://developers.google.com/api-client-library/python/start/get_started
# For more infomation on how to use the generated client binding.
try:
discovery_url = self._endpoint + '/{api}/{apiVersion}/discovery'
self._service = build(
self._API,
self._VERSION,
cache_discovery=False,
discoveryServiceUrl=discovery_url,
http=self._http
)
except socket.error as exc: # pragma: no cover
raise ValueError('Unable to connect to endpoint {0}: {1}'.format(
self._endpoint,
exc
))
except googleapiclient.errors.UnknownApiNameOrVersion as exc:
raise ValueError(
'Connected to endpoint {0} but it is not a fleet v1 API endpoint. '
'This usually means a GET request to {0}/{1}/{2}/discovery failed.'.format(
self._endpoint,
self._API,
self._VERSION
))
def _split_hostport(self, hostport, default_port=None):
"""Split a string in the format of '<host>:<port>' into it's component parts
default_port will be used if a port is not included in the string
Args:
str ('<host>' or '<host>:<port>'): A string to split into it's parts
Returns:
two item tuple: (host, port)
Raises:
ValueError: The string was in an invalid element
"""
try:
(host, port) = hostport.split(':', 1)
except ValueError: # no colon in the string so make our own port
host = hostport
if default_port is None:
raise ValueError('No port found in hostport, and default_port not provided.')
port = default_port
try:
port = int(port)
if port < 1 or port > 65535:
raise ValueError()
except ValueError:
raise ValueError("{0} is not a valid TCP port".format(port))
return (host, port)
def _endpoint_to_target(self, endpoint):
"""Convert a URL into a host / port, or into a path to a unix domain socket
Args:
endpoint (str): A URL parsable by urlparse
Returns:
3 item tuple: (host, port, path).
host and port will None, and path will be not None if a a unix domain socket URL is passed
path will be None if a normal TCP based URL is passed
"""
parsed = urlparse.urlparse(endpoint)
scheme = parsed[0]
hostport = parsed[1]
if 'unix' in scheme:
return (None, None, unquote(hostport))
if scheme == 'https':
target_port = 443
else:
target_port = 80
(target_host, target_port) = self._split_hostport(hostport, default_port=target_port)
return (target_host, target_port, None)
def _get_proxy_info(self, _=None):
"""Generate a ProxyInfo class from a connected SSH transport
Args:
_ (None): Ignored. This is just here as the ProxyInfo spec requires it.
Returns:
SSHTunnelProxyInfo: A ProxyInfo with an active socket tunneled through SSH
"""
# parse the fleet endpoint url, to establish a tunnel to that host
(target_host, target_port, target_path) = self._endpoint_to_target(self._endpoint)
# implement the proxy_info interface from httplib which requires
# that we accept a scheme, and return a ProxyInfo object
# we do :P
# This is called once per request, so we keep this here
# so that we can keep one ssh connection open, and allocate
# new channels as needed per-request
sock = None
if target_path:
sock = self._ssh_tunnel.forward_unix(path=target_path)
else:
sock = self._ssh_tunnel.forward_tcp(target_host, port=target_port)
# Return a ProxyInfo class with this socket
return SSHTunnelProxyInfo(sock=sock)
def _request(self, method, *args, **kwargs):
"""Make a request with automatic pagination handling
Args:
method (str): A dot delimited string indicating the method to call. Example: 'Machines.List'
*args: Passed directly to the method being called.
**kwargs: Passed directly to the method being called.
Note: This method will inject the 'nextPageToken' key into `**kwargs` as needed to handle
pagination overwriting any value specified by the caller. If you wish to handle pagination
manually use the `_single_request` method
Yields:
dict: The next page of responses from the method called.
Raises:
fleet.v1.errors.APIError: Fleet returned a response code >= 400
"""
# This is set to False and not None so that the while loop below will execute at least once
next_page_token = False
while next_page_token is not None:
# If bool(next_page_token), then include it in the request
# We do this so we don't pass it in the initial request as we set it to False above
if next_page_token:
kwargs['nextPageToken'] = next_page_token
# Make the request
response = self._single_request(method, *args, **kwargs)
# If there is a token for another page in the response, capture it for the next loop iteration
# If not, we set it to None so that the loop will terminate
next_page_token = response.get('nextPageToken', None)
# Return the current response
yield response
def create_unit(self, name, unit):
"""Create a new Unit in the cluster
Create and modify Unit entities to communicate to fleet the desired state of the cluster.
This simply declares what should be happening; the backend system still has to react to
the changes in this desired state. The actual state of the system is communicated with
UnitState entities.
Args:
name (str): The name of the unit to create
unit (Unit): The unit to submit to fleet
Returns:
Unit: The unit that was created
Raises:
fleet.v1.errors.APIError: Fleet returned a response code >= 400
"""
self._single_request('Units.Set', unitName=name, body={
'desiredState': unit.desiredState,
'options': unit.options
})
return self.get_unit(name)
def set_unit_desired_state(self, unit, desired_state):
"""Update the desired state of a unit running in the cluster
Args:
unit (str, Unit): The Unit, or name of the unit to update
desired_state: State the user wishes the Unit to be in
("inactive", "loaded", or "launched")
Returns:
Unit: The unit that was updated
Raises:
fleet.v1.errors.APIError: Fleet returned a response code >= 400
ValueError: An invalid value was provided for ``desired_state``
"""
if desired_state not in self._STATES:
raise ValueError('state must be one of: {0}'.format(
self._STATES
))
# if we are given an object, grab it's name property
# otherwise, convert to unicode
if isinstance(unit, Unit):
unit = unit.name
else:
unit = str(unit)
self._single_request('Units.Set', unitName=unit, body={
'desiredState': desired_state
})
return self.get_unit(unit)
def destroy_unit(self, unit):
"""Delete a unit from the cluster
Args:
unit (str, Unit): The Unit, or name of the unit to delete
Returns:
True: The unit was deleted
Raises:
fleet.v1.errors.APIError: Fleet returned a response code >= 400
"""
# if we are given an object, grab it's name property
# otherwise, convert to unicode
if isinstance(unit, Unit):
unit = unit.name
else:
unit = str(unit)
self._single_request('Units.Delete', unitName=unit)
return True
def list_units(self):
"""Return the current list of the Units in the fleet cluster
Yields:
Unit: The next Unit in the cluster
Raises:
fleet.v1.errors.APIError: Fleet returned a response code >= 400
"""
for page in self._request('Units.List'):
for unit in page.get('units', []):
yield Unit(client=self, data=unit)
def get_unit(self, name):
"""Retreive a specifi unit from the fleet cluster by name
Args:
name (str): If specified, only this unit name is returned
Returns:
Unit: The unit identified by ``name`` in the fleet cluster
Raises:
fleet.v1.errors.APIError: Fleet returned a response code >= 400
"""
return Unit(client=self, data=self._single_request('Units.Get', unitName=name))
def list_unit_states(self, machine_id=None, unit_name=None):
"""Return the current UnitState for the fleet cluster
Args:
machine_id (str): filter all UnitState objects to those
originating from a specific machine
unit_name (str): filter all UnitState objects to those related
to a specific unit
Yields:
UnitState: The next UnitState in the cluster
Raises:
fleet.v1.errors.APIError: Fleet returned a response code >= 400
"""
for page in self._request('UnitState.List', machineID=machine_id, unitName=unit_name):
for state in page.get('states', []):
yield UnitState(data=state)
def list_machines(self):
"""Retrieve a list of machines in the fleet cluster
Yields:
Machine: The next machine in the cluster
Raises:
fleet.v1.errors.APIError: Fleet returned a response code >= 400
"""
# loop through each page of results
for page in self._request('Machines.List'):
# return each machine in the current page
for machine in page.get('machines', []):
yield Machine(data=machine)
|
cnelson/python-fleet | fleet/v1/client.py | Client._request | python | def _request(self, method, *args, **kwargs):
# This is set to False and not None so that the while loop below will execute at least once
next_page_token = False
while next_page_token is not None:
# If bool(next_page_token), then include it in the request
# We do this so we don't pass it in the initial request as we set it to False above
if next_page_token:
kwargs['nextPageToken'] = next_page_token
# Make the request
response = self._single_request(method, *args, **kwargs)
# If there is a token for another page in the response, capture it for the next loop iteration
# If not, we set it to None so that the loop will terminate
next_page_token = response.get('nextPageToken', None)
# Return the current response
yield response | Make a request with automatic pagination handling
Args:
method (str): A dot delimited string indicating the method to call. Example: 'Machines.List'
*args: Passed directly to the method being called.
**kwargs: Passed directly to the method being called.
Note: This method will inject the 'nextPageToken' key into `**kwargs` as needed to handle
pagination overwriting any value specified by the caller. If you wish to handle pagination
manually use the `_single_request` method
Yields:
dict: The next page of responses from the method called.
Raises:
fleet.v1.errors.APIError: Fleet returned a response code >= 400 | train | https://github.com/cnelson/python-fleet/blob/a11dcd8bb3986d1d8f0af90d2da7399c9cc54b4d/fleet/v1/client.py#L437-L475 | [
"def _single_request(self, method, *args, **kwargs):\n \"\"\"Make a single request to the fleet API endpoint\n\n Args:\n method (str): A dot delimited string indicating the method to call. Example: 'Machines.List'\n *args: Passed directly to the method being called.\n **kwargs: Passed directly to the method being called.\n\n Returns:\n dict: The response from the method called.\n\n Raises:\n fleet.v1.errors.APIError: Fleet returned a response code >= 400\n \"\"\"\n\n # The auto generated client binding require instantiating each object you want to call a method on\n # For example to make a request to /machines for the list of machines you would do:\n # self._service.Machines().List(**kwargs)\n # This code iterates through the tokens in `method` and instantiates each object\n # Passing the `*args` and `**kwargs` to the final method listed\n\n # Start here\n _method = self._service\n\n # iterate over each token in the requested method\n for item in method.split('.'):\n\n # if it's the end of the line, pass our argument\n if method.endswith(item):\n _method = getattr(_method, item)(*args, **kwargs)\n else:\n # otherwise, just create an instance and move on\n _method = getattr(_method, item)()\n\n # Discovered endpoints look like r'$ENDPOINT/path/to/method' which isn't a valid URI\n # Per the fleet API documentation:\n # \"Note that this discovery document intentionally ships with an unusable rootUrl;\n # clients must initialize this as appropriate.\"\n\n # So we follow the documentation, and replace the token with our actual endpoint\n _method.uri = _method.uri.replace('$ENDPOINT', self._endpoint)\n\n # Execute the method and return it's output directly\n try:\n return _method.execute(http=self._http)\n except googleapiclient.errors.HttpError as exc:\n response = json.loads(exc.content.decode('utf-8'))['error']\n\n raise APIError(code=response['code'], message=response['message'], http_error=exc)\n"
] | class Client(object):
"""A python wrapper for the fleet v1 API
The fleet v1 API is documented here: https://github.com/coreos/fleet/blob/master/Documentation/api-v1.md
"""
_API = 'fleet'
_VERSION = 'v1'
_STATES = ['inactive', 'loaded', 'launched']
def __init__(
self,
endpoint,
http=None,
ssh_tunnel=None,
ssh_username='core',
ssh_timeout=10,
ssh_known_hosts_file='~/.fleetctl/known_hosts',
ssh_strict_host_key_checking=True,
ssh_raw_transport=None
):
"""Connect to the fleet API and generate a client based on it's discovery document.
Args:
endpoint (str): A URL where the fleet API can be reached. Supported schemes are:
http: A HTTP connection over a TCP socket.
Example: http://127.0.0.1:49153
http+unix: A HTTP connection over a unix domain socket. You must escape the path (/ = %2F).
Example: http+unix://%2Fvar%2Frun%2Ffleet.sock
http (httplib2.Http): An instance of httplib2.Http (or something that acts like it) that HTTP requests will
be made through. You do not need to pass this unless you need to configure specific options for your
http client, or want to pass in a mock for testing.
ssh_tunnel (str '<host>[:<port>]'): Establish an SSH tunnel through the provided address for communication
with fleet. Defaults to None. If specified, the following other options adjust it's behaivor:
ssh_username (str): Username to use when connecting to SSH, defaults to 'core'.
ssh_timeout (float): Amount of time in seconds to allow for SSH connection initialization
before failing, defaults to 10.
ssh_known_hosts_file (str): File used to store remote machine fingerprints,
defaults to '~/.fleetctl/known_hosts'. Ignored if `ssh_strict_host_key_checking` is False
ssh_strict_host_key_checking (bool): Verify host keys presented by remote machines before
initiating SSH connections, defaults to True.
ssh_raw_transport (paramiko.transport.Transport): An active Transport on which open_channel() will be
called to establish connections.
See Advanced SSH Tunneling in docs/client.md for more information.
Raises:
ValueError: The endpoint provided was not accessible or your ssh configuration is incorrect
"""
# stash this for later
self._endpoint = endpoint.strip('/')
self._ssh_client = None
# we overload the http when our proxy enabled versin if they request ssh tunneling
# so we need to make sure they didn't give us both
if (ssh_tunnel or ssh_raw_transport) and http:
raise ValueError('You cannot specify your own http client, and request ssh tunneling.')
# only one way to connect, not both
if ssh_tunnel and ssh_raw_transport:
raise ValueError('If ssh_tunnel is specified, ssh_raw_transport must be None')
# see if we need to setup an ssh tunnel
self._ssh_tunnel = None
# if they handed us a transport, then we either bail or are good to go
if ssh_raw_transport:
if not isinstance(ssh_raw_transport, paramiko.transport.Transport):
raise ValueError('ssh_raw_transport must be an active instance of paramiko.transport.Transport.')
self._ssh_tunnel = SSHTunnel(host=ssh_raw_transport)
# otherwise we are connecting ourselves
elif ssh_tunnel:
(ssh_host, ssh_port) = self._split_hostport(ssh_tunnel, default_port=22)
try:
self._ssh_tunnel = SSHTunnel(
host=ssh_host,
port=ssh_port,
username=ssh_username,
timeout=ssh_timeout,
known_hosts_file=ssh_known_hosts_file,
strict_host_key_checking=ssh_strict_host_key_checking
)
except socket.gaierror as exc:
raise ValueError('{0} could not be resolved.'.format(ssh_host))
except socket.error as exc:
raise ValueError('Unable to connect to {0}:{1}: {2}'.format(
ssh_host,
ssh_port,
exc
))
except paramiko.ssh_exception.SSHException as exc:
raise ValueError('Unable to connect via ssh: {0}: {1}'.format(
exc.__class__.__name__,
exc
))
# did we get an ssh connection up?
if self._ssh_tunnel:
# inject the SSH tunnel socketed into httplib via the proxy_info interface
self._http = httplib2.Http(proxy_info=self._get_proxy_info)
# preface our scheme with 'ssh+'; httplib2's SCHEME_TO_CONNECTION
# will invoke our custom connection objects and route the HTTP
# call across the SSH connection established or passed in above
self._endpoint = 'ssh+' + self._endpoint
else:
self._http = http
# if we've made it this far, we are ready to try to talk to fleet
# possibly through a proxy...
# generate a client binding using the google-api-python client.
# See https://developers.google.com/api-client-library/python/start/get_started
# For more infomation on how to use the generated client binding.
try:
discovery_url = self._endpoint + '/{api}/{apiVersion}/discovery'
self._service = build(
self._API,
self._VERSION,
cache_discovery=False,
discoveryServiceUrl=discovery_url,
http=self._http
)
except socket.error as exc: # pragma: no cover
raise ValueError('Unable to connect to endpoint {0}: {1}'.format(
self._endpoint,
exc
))
except googleapiclient.errors.UnknownApiNameOrVersion as exc:
raise ValueError(
'Connected to endpoint {0} but it is not a fleet v1 API endpoint. '
'This usually means a GET request to {0}/{1}/{2}/discovery failed.'.format(
self._endpoint,
self._API,
self._VERSION
))
def _split_hostport(self, hostport, default_port=None):
"""Split a string in the format of '<host>:<port>' into it's component parts
default_port will be used if a port is not included in the string
Args:
str ('<host>' or '<host>:<port>'): A string to split into it's parts
Returns:
two item tuple: (host, port)
Raises:
ValueError: The string was in an invalid element
"""
try:
(host, port) = hostport.split(':', 1)
except ValueError: # no colon in the string so make our own port
host = hostport
if default_port is None:
raise ValueError('No port found in hostport, and default_port not provided.')
port = default_port
try:
port = int(port)
if port < 1 or port > 65535:
raise ValueError()
except ValueError:
raise ValueError("{0} is not a valid TCP port".format(port))
return (host, port)
def _endpoint_to_target(self, endpoint):
"""Convert a URL into a host / port, or into a path to a unix domain socket
Args:
endpoint (str): A URL parsable by urlparse
Returns:
3 item tuple: (host, port, path).
host and port will None, and path will be not None if a a unix domain socket URL is passed
path will be None if a normal TCP based URL is passed
"""
parsed = urlparse.urlparse(endpoint)
scheme = parsed[0]
hostport = parsed[1]
if 'unix' in scheme:
return (None, None, unquote(hostport))
if scheme == 'https':
target_port = 443
else:
target_port = 80
(target_host, target_port) = self._split_hostport(hostport, default_port=target_port)
return (target_host, target_port, None)
def _get_proxy_info(self, _=None):
"""Generate a ProxyInfo class from a connected SSH transport
Args:
_ (None): Ignored. This is just here as the ProxyInfo spec requires it.
Returns:
SSHTunnelProxyInfo: A ProxyInfo with an active socket tunneled through SSH
"""
# parse the fleet endpoint url, to establish a tunnel to that host
(target_host, target_port, target_path) = self._endpoint_to_target(self._endpoint)
# implement the proxy_info interface from httplib which requires
# that we accept a scheme, and return a ProxyInfo object
# we do :P
# This is called once per request, so we keep this here
# so that we can keep one ssh connection open, and allocate
# new channels as needed per-request
sock = None
if target_path:
sock = self._ssh_tunnel.forward_unix(path=target_path)
else:
sock = self._ssh_tunnel.forward_tcp(target_host, port=target_port)
# Return a ProxyInfo class with this socket
return SSHTunnelProxyInfo(sock=sock)
def _single_request(self, method, *args, **kwargs):
"""Make a single request to the fleet API endpoint
Args:
method (str): A dot delimited string indicating the method to call. Example: 'Machines.List'
*args: Passed directly to the method being called.
**kwargs: Passed directly to the method being called.
Returns:
dict: The response from the method called.
Raises:
fleet.v1.errors.APIError: Fleet returned a response code >= 400
"""
# The auto generated client binding require instantiating each object you want to call a method on
# For example to make a request to /machines for the list of machines you would do:
# self._service.Machines().List(**kwargs)
# This code iterates through the tokens in `method` and instantiates each object
# Passing the `*args` and `**kwargs` to the final method listed
# Start here
_method = self._service
# iterate over each token in the requested method
for item in method.split('.'):
# if it's the end of the line, pass our argument
if method.endswith(item):
_method = getattr(_method, item)(*args, **kwargs)
else:
# otherwise, just create an instance and move on
_method = getattr(_method, item)()
# Discovered endpoints look like r'$ENDPOINT/path/to/method' which isn't a valid URI
# Per the fleet API documentation:
# "Note that this discovery document intentionally ships with an unusable rootUrl;
# clients must initialize this as appropriate."
# So we follow the documentation, and replace the token with our actual endpoint
_method.uri = _method.uri.replace('$ENDPOINT', self._endpoint)
# Execute the method and return it's output directly
try:
return _method.execute(http=self._http)
except googleapiclient.errors.HttpError as exc:
response = json.loads(exc.content.decode('utf-8'))['error']
raise APIError(code=response['code'], message=response['message'], http_error=exc)
def create_unit(self, name, unit):
"""Create a new Unit in the cluster
Create and modify Unit entities to communicate to fleet the desired state of the cluster.
This simply declares what should be happening; the backend system still has to react to
the changes in this desired state. The actual state of the system is communicated with
UnitState entities.
Args:
name (str): The name of the unit to create
unit (Unit): The unit to submit to fleet
Returns:
Unit: The unit that was created
Raises:
fleet.v1.errors.APIError: Fleet returned a response code >= 400
"""
self._single_request('Units.Set', unitName=name, body={
'desiredState': unit.desiredState,
'options': unit.options
})
return self.get_unit(name)
def set_unit_desired_state(self, unit, desired_state):
"""Update the desired state of a unit running in the cluster
Args:
unit (str, Unit): The Unit, or name of the unit to update
desired_state: State the user wishes the Unit to be in
("inactive", "loaded", or "launched")
Returns:
Unit: The unit that was updated
Raises:
fleet.v1.errors.APIError: Fleet returned a response code >= 400
ValueError: An invalid value was provided for ``desired_state``
"""
if desired_state not in self._STATES:
raise ValueError('state must be one of: {0}'.format(
self._STATES
))
# if we are given an object, grab it's name property
# otherwise, convert to unicode
if isinstance(unit, Unit):
unit = unit.name
else:
unit = str(unit)
self._single_request('Units.Set', unitName=unit, body={
'desiredState': desired_state
})
return self.get_unit(unit)
def destroy_unit(self, unit):
"""Delete a unit from the cluster
Args:
unit (str, Unit): The Unit, or name of the unit to delete
Returns:
True: The unit was deleted
Raises:
fleet.v1.errors.APIError: Fleet returned a response code >= 400
"""
# if we are given an object, grab it's name property
# otherwise, convert to unicode
if isinstance(unit, Unit):
unit = unit.name
else:
unit = str(unit)
self._single_request('Units.Delete', unitName=unit)
return True
def list_units(self):
"""Return the current list of the Units in the fleet cluster
Yields:
Unit: The next Unit in the cluster
Raises:
fleet.v1.errors.APIError: Fleet returned a response code >= 400
"""
for page in self._request('Units.List'):
for unit in page.get('units', []):
yield Unit(client=self, data=unit)
def get_unit(self, name):
"""Retreive a specifi unit from the fleet cluster by name
Args:
name (str): If specified, only this unit name is returned
Returns:
Unit: The unit identified by ``name`` in the fleet cluster
Raises:
fleet.v1.errors.APIError: Fleet returned a response code >= 400
"""
return Unit(client=self, data=self._single_request('Units.Get', unitName=name))
def list_unit_states(self, machine_id=None, unit_name=None):
"""Return the current UnitState for the fleet cluster
Args:
machine_id (str): filter all UnitState objects to those
originating from a specific machine
unit_name (str): filter all UnitState objects to those related
to a specific unit
Yields:
UnitState: The next UnitState in the cluster
Raises:
fleet.v1.errors.APIError: Fleet returned a response code >= 400
"""
for page in self._request('UnitState.List', machineID=machine_id, unitName=unit_name):
for state in page.get('states', []):
yield UnitState(data=state)
def list_machines(self):
"""Retrieve a list of machines in the fleet cluster
Yields:
Machine: The next machine in the cluster
Raises:
fleet.v1.errors.APIError: Fleet returned a response code >= 400
"""
# loop through each page of results
for page in self._request('Machines.List'):
# return each machine in the current page
for machine in page.get('machines', []):
yield Machine(data=machine)
|
cnelson/python-fleet | fleet/v1/client.py | Client.create_unit | python | def create_unit(self, name, unit):
self._single_request('Units.Set', unitName=name, body={
'desiredState': unit.desiredState,
'options': unit.options
})
return self.get_unit(name) | Create a new Unit in the cluster
Create and modify Unit entities to communicate to fleet the desired state of the cluster.
This simply declares what should be happening; the backend system still has to react to
the changes in this desired state. The actual state of the system is communicated with
UnitState entities.
Args:
name (str): The name of the unit to create
unit (Unit): The unit to submit to fleet
Returns:
Unit: The unit that was created
Raises:
fleet.v1.errors.APIError: Fleet returned a response code >= 400 | train | https://github.com/cnelson/python-fleet/blob/a11dcd8bb3986d1d8f0af90d2da7399c9cc54b4d/fleet/v1/client.py#L477-L503 | [
"def _single_request(self, method, *args, **kwargs):\n \"\"\"Make a single request to the fleet API endpoint\n\n Args:\n method (str): A dot delimited string indicating the method to call. Example: 'Machines.List'\n *args: Passed directly to the method being called.\n **kwargs: Passed directly to the method being called.\n\n Returns:\n dict: The response from the method called.\n\n Raises:\n fleet.v1.errors.APIError: Fleet returned a response code >= 400\n \"\"\"\n\n # The auto generated client binding require instantiating each object you want to call a method on\n # For example to make a request to /machines for the list of machines you would do:\n # self._service.Machines().List(**kwargs)\n # This code iterates through the tokens in `method` and instantiates each object\n # Passing the `*args` and `**kwargs` to the final method listed\n\n # Start here\n _method = self._service\n\n # iterate over each token in the requested method\n for item in method.split('.'):\n\n # if it's the end of the line, pass our argument\n if method.endswith(item):\n _method = getattr(_method, item)(*args, **kwargs)\n else:\n # otherwise, just create an instance and move on\n _method = getattr(_method, item)()\n\n # Discovered endpoints look like r'$ENDPOINT/path/to/method' which isn't a valid URI\n # Per the fleet API documentation:\n # \"Note that this discovery document intentionally ships with an unusable rootUrl;\n # clients must initialize this as appropriate.\"\n\n # So we follow the documentation, and replace the token with our actual endpoint\n _method.uri = _method.uri.replace('$ENDPOINT', self._endpoint)\n\n # Execute the method and return it's output directly\n try:\n return _method.execute(http=self._http)\n except googleapiclient.errors.HttpError as exc:\n response = json.loads(exc.content.decode('utf-8'))['error']\n\n raise APIError(code=response['code'], message=response['message'], http_error=exc)\n",
"def get_unit(self, name):\n \"\"\"Retreive a specifi unit from the fleet cluster by name\n\n Args:\n name (str): If specified, only this unit name is returned\n\n Returns:\n Unit: The unit identified by ``name`` in the fleet cluster\n\n Raises:\n fleet.v1.errors.APIError: Fleet returned a response code >= 400\n\n \"\"\"\n return Unit(client=self, data=self._single_request('Units.Get', unitName=name))\n"
] | class Client(object):
"""A python wrapper for the fleet v1 API
The fleet v1 API is documented here: https://github.com/coreos/fleet/blob/master/Documentation/api-v1.md
"""
_API = 'fleet'
_VERSION = 'v1'
_STATES = ['inactive', 'loaded', 'launched']
def __init__(
self,
endpoint,
http=None,
ssh_tunnel=None,
ssh_username='core',
ssh_timeout=10,
ssh_known_hosts_file='~/.fleetctl/known_hosts',
ssh_strict_host_key_checking=True,
ssh_raw_transport=None
):
"""Connect to the fleet API and generate a client based on it's discovery document.
Args:
endpoint (str): A URL where the fleet API can be reached. Supported schemes are:
http: A HTTP connection over a TCP socket.
Example: http://127.0.0.1:49153
http+unix: A HTTP connection over a unix domain socket. You must escape the path (/ = %2F).
Example: http+unix://%2Fvar%2Frun%2Ffleet.sock
http (httplib2.Http): An instance of httplib2.Http (or something that acts like it) that HTTP requests will
be made through. You do not need to pass this unless you need to configure specific options for your
http client, or want to pass in a mock for testing.
ssh_tunnel (str '<host>[:<port>]'): Establish an SSH tunnel through the provided address for communication
with fleet. Defaults to None. If specified, the following other options adjust it's behaivor:
ssh_username (str): Username to use when connecting to SSH, defaults to 'core'.
ssh_timeout (float): Amount of time in seconds to allow for SSH connection initialization
before failing, defaults to 10.
ssh_known_hosts_file (str): File used to store remote machine fingerprints,
defaults to '~/.fleetctl/known_hosts'. Ignored if `ssh_strict_host_key_checking` is False
ssh_strict_host_key_checking (bool): Verify host keys presented by remote machines before
initiating SSH connections, defaults to True.
ssh_raw_transport (paramiko.transport.Transport): An active Transport on which open_channel() will be
called to establish connections.
See Advanced SSH Tunneling in docs/client.md for more information.
Raises:
ValueError: The endpoint provided was not accessible or your ssh configuration is incorrect
"""
# stash this for later
self._endpoint = endpoint.strip('/')
self._ssh_client = None
# we overload the http when our proxy enabled versin if they request ssh tunneling
# so we need to make sure they didn't give us both
if (ssh_tunnel or ssh_raw_transport) and http:
raise ValueError('You cannot specify your own http client, and request ssh tunneling.')
# only one way to connect, not both
if ssh_tunnel and ssh_raw_transport:
raise ValueError('If ssh_tunnel is specified, ssh_raw_transport must be None')
# see if we need to setup an ssh tunnel
self._ssh_tunnel = None
# if they handed us a transport, then we either bail or are good to go
if ssh_raw_transport:
if not isinstance(ssh_raw_transport, paramiko.transport.Transport):
raise ValueError('ssh_raw_transport must be an active instance of paramiko.transport.Transport.')
self._ssh_tunnel = SSHTunnel(host=ssh_raw_transport)
# otherwise we are connecting ourselves
elif ssh_tunnel:
(ssh_host, ssh_port) = self._split_hostport(ssh_tunnel, default_port=22)
try:
self._ssh_tunnel = SSHTunnel(
host=ssh_host,
port=ssh_port,
username=ssh_username,
timeout=ssh_timeout,
known_hosts_file=ssh_known_hosts_file,
strict_host_key_checking=ssh_strict_host_key_checking
)
except socket.gaierror as exc:
raise ValueError('{0} could not be resolved.'.format(ssh_host))
except socket.error as exc:
raise ValueError('Unable to connect to {0}:{1}: {2}'.format(
ssh_host,
ssh_port,
exc
))
except paramiko.ssh_exception.SSHException as exc:
raise ValueError('Unable to connect via ssh: {0}: {1}'.format(
exc.__class__.__name__,
exc
))
# did we get an ssh connection up?
if self._ssh_tunnel:
# inject the SSH tunnel socketed into httplib via the proxy_info interface
self._http = httplib2.Http(proxy_info=self._get_proxy_info)
# preface our scheme with 'ssh+'; httplib2's SCHEME_TO_CONNECTION
# will invoke our custom connection objects and route the HTTP
# call across the SSH connection established or passed in above
self._endpoint = 'ssh+' + self._endpoint
else:
self._http = http
# if we've made it this far, we are ready to try to talk to fleet
# possibly through a proxy...
# generate a client binding using the google-api-python client.
# See https://developers.google.com/api-client-library/python/start/get_started
# For more infomation on how to use the generated client binding.
try:
discovery_url = self._endpoint + '/{api}/{apiVersion}/discovery'
self._service = build(
self._API,
self._VERSION,
cache_discovery=False,
discoveryServiceUrl=discovery_url,
http=self._http
)
except socket.error as exc: # pragma: no cover
raise ValueError('Unable to connect to endpoint {0}: {1}'.format(
self._endpoint,
exc
))
except googleapiclient.errors.UnknownApiNameOrVersion as exc:
raise ValueError(
'Connected to endpoint {0} but it is not a fleet v1 API endpoint. '
'This usually means a GET request to {0}/{1}/{2}/discovery failed.'.format(
self._endpoint,
self._API,
self._VERSION
))
def _split_hostport(self, hostport, default_port=None):
"""Split a string in the format of '<host>:<port>' into it's component parts
default_port will be used if a port is not included in the string
Args:
str ('<host>' or '<host>:<port>'): A string to split into it's parts
Returns:
two item tuple: (host, port)
Raises:
ValueError: The string was in an invalid element
"""
try:
(host, port) = hostport.split(':', 1)
except ValueError: # no colon in the string so make our own port
host = hostport
if default_port is None:
raise ValueError('No port found in hostport, and default_port not provided.')
port = default_port
try:
port = int(port)
if port < 1 or port > 65535:
raise ValueError()
except ValueError:
raise ValueError("{0} is not a valid TCP port".format(port))
return (host, port)
def _endpoint_to_target(self, endpoint):
"""Convert a URL into a host / port, or into a path to a unix domain socket
Args:
endpoint (str): A URL parsable by urlparse
Returns:
3 item tuple: (host, port, path).
host and port will None, and path will be not None if a a unix domain socket URL is passed
path will be None if a normal TCP based URL is passed
"""
parsed = urlparse.urlparse(endpoint)
scheme = parsed[0]
hostport = parsed[1]
if 'unix' in scheme:
return (None, None, unquote(hostport))
if scheme == 'https':
target_port = 443
else:
target_port = 80
(target_host, target_port) = self._split_hostport(hostport, default_port=target_port)
return (target_host, target_port, None)
def _get_proxy_info(self, _=None):
"""Generate a ProxyInfo class from a connected SSH transport
Args:
_ (None): Ignored. This is just here as the ProxyInfo spec requires it.
Returns:
SSHTunnelProxyInfo: A ProxyInfo with an active socket tunneled through SSH
"""
# parse the fleet endpoint url, to establish a tunnel to that host
(target_host, target_port, target_path) = self._endpoint_to_target(self._endpoint)
# implement the proxy_info interface from httplib which requires
# that we accept a scheme, and return a ProxyInfo object
# we do :P
# This is called once per request, so we keep this here
# so that we can keep one ssh connection open, and allocate
# new channels as needed per-request
sock = None
if target_path:
sock = self._ssh_tunnel.forward_unix(path=target_path)
else:
sock = self._ssh_tunnel.forward_tcp(target_host, port=target_port)
# Return a ProxyInfo class with this socket
return SSHTunnelProxyInfo(sock=sock)
def _single_request(self, method, *args, **kwargs):
"""Make a single request to the fleet API endpoint
Args:
method (str): A dot delimited string indicating the method to call. Example: 'Machines.List'
*args: Passed directly to the method being called.
**kwargs: Passed directly to the method being called.
Returns:
dict: The response from the method called.
Raises:
fleet.v1.errors.APIError: Fleet returned a response code >= 400
"""
# The auto generated client binding require instantiating each object you want to call a method on
# For example to make a request to /machines for the list of machines you would do:
# self._service.Machines().List(**kwargs)
# This code iterates through the tokens in `method` and instantiates each object
# Passing the `*args` and `**kwargs` to the final method listed
# Start here
_method = self._service
# iterate over each token in the requested method
for item in method.split('.'):
# if it's the end of the line, pass our argument
if method.endswith(item):
_method = getattr(_method, item)(*args, **kwargs)
else:
# otherwise, just create an instance and move on
_method = getattr(_method, item)()
# Discovered endpoints look like r'$ENDPOINT/path/to/method' which isn't a valid URI
# Per the fleet API documentation:
# "Note that this discovery document intentionally ships with an unusable rootUrl;
# clients must initialize this as appropriate."
# So we follow the documentation, and replace the token with our actual endpoint
_method.uri = _method.uri.replace('$ENDPOINT', self._endpoint)
# Execute the method and return it's output directly
try:
return _method.execute(http=self._http)
except googleapiclient.errors.HttpError as exc:
response = json.loads(exc.content.decode('utf-8'))['error']
raise APIError(code=response['code'], message=response['message'], http_error=exc)
def _request(self, method, *args, **kwargs):
"""Make a request with automatic pagination handling
Args:
method (str): A dot delimited string indicating the method to call. Example: 'Machines.List'
*args: Passed directly to the method being called.
**kwargs: Passed directly to the method being called.
Note: This method will inject the 'nextPageToken' key into `**kwargs` as needed to handle
pagination overwriting any value specified by the caller. If you wish to handle pagination
manually use the `_single_request` method
Yields:
dict: The next page of responses from the method called.
Raises:
fleet.v1.errors.APIError: Fleet returned a response code >= 400
"""
# This is set to False and not None so that the while loop below will execute at least once
next_page_token = False
while next_page_token is not None:
# If bool(next_page_token), then include it in the request
# We do this so we don't pass it in the initial request as we set it to False above
if next_page_token:
kwargs['nextPageToken'] = next_page_token
# Make the request
response = self._single_request(method, *args, **kwargs)
# If there is a token for another page in the response, capture it for the next loop iteration
# If not, we set it to None so that the loop will terminate
next_page_token = response.get('nextPageToken', None)
# Return the current response
yield response
def set_unit_desired_state(self, unit, desired_state):
"""Update the desired state of a unit running in the cluster
Args:
unit (str, Unit): The Unit, or name of the unit to update
desired_state: State the user wishes the Unit to be in
("inactive", "loaded", or "launched")
Returns:
Unit: The unit that was updated
Raises:
fleet.v1.errors.APIError: Fleet returned a response code >= 400
ValueError: An invalid value was provided for ``desired_state``
"""
if desired_state not in self._STATES:
raise ValueError('state must be one of: {0}'.format(
self._STATES
))
# if we are given an object, grab it's name property
# otherwise, convert to unicode
if isinstance(unit, Unit):
unit = unit.name
else:
unit = str(unit)
self._single_request('Units.Set', unitName=unit, body={
'desiredState': desired_state
})
return self.get_unit(unit)
def destroy_unit(self, unit):
"""Delete a unit from the cluster
Args:
unit (str, Unit): The Unit, or name of the unit to delete
Returns:
True: The unit was deleted
Raises:
fleet.v1.errors.APIError: Fleet returned a response code >= 400
"""
# if we are given an object, grab it's name property
# otherwise, convert to unicode
if isinstance(unit, Unit):
unit = unit.name
else:
unit = str(unit)
self._single_request('Units.Delete', unitName=unit)
return True
def list_units(self):
"""Return the current list of the Units in the fleet cluster
Yields:
Unit: The next Unit in the cluster
Raises:
fleet.v1.errors.APIError: Fleet returned a response code >= 400
"""
for page in self._request('Units.List'):
for unit in page.get('units', []):
yield Unit(client=self, data=unit)
def get_unit(self, name):
"""Retreive a specifi unit from the fleet cluster by name
Args:
name (str): If specified, only this unit name is returned
Returns:
Unit: The unit identified by ``name`` in the fleet cluster
Raises:
fleet.v1.errors.APIError: Fleet returned a response code >= 400
"""
return Unit(client=self, data=self._single_request('Units.Get', unitName=name))
def list_unit_states(self, machine_id=None, unit_name=None):
"""Return the current UnitState for the fleet cluster
Args:
machine_id (str): filter all UnitState objects to those
originating from a specific machine
unit_name (str): filter all UnitState objects to those related
to a specific unit
Yields:
UnitState: The next UnitState in the cluster
Raises:
fleet.v1.errors.APIError: Fleet returned a response code >= 400
"""
for page in self._request('UnitState.List', machineID=machine_id, unitName=unit_name):
for state in page.get('states', []):
yield UnitState(data=state)
def list_machines(self):
"""Retrieve a list of machines in the fleet cluster
Yields:
Machine: The next machine in the cluster
Raises:
fleet.v1.errors.APIError: Fleet returned a response code >= 400
"""
# loop through each page of results
for page in self._request('Machines.List'):
# return each machine in the current page
for machine in page.get('machines', []):
yield Machine(data=machine)
|
cnelson/python-fleet | fleet/v1/client.py | Client.set_unit_desired_state | python | def set_unit_desired_state(self, unit, desired_state):
if desired_state not in self._STATES:
raise ValueError('state must be one of: {0}'.format(
self._STATES
))
# if we are given an object, grab it's name property
# otherwise, convert to unicode
if isinstance(unit, Unit):
unit = unit.name
else:
unit = str(unit)
self._single_request('Units.Set', unitName=unit, body={
'desiredState': desired_state
})
return self.get_unit(unit) | Update the desired state of a unit running in the cluster
Args:
unit (str, Unit): The Unit, or name of the unit to update
desired_state: State the user wishes the Unit to be in
("inactive", "loaded", or "launched")
Returns:
Unit: The unit that was updated
Raises:
fleet.v1.errors.APIError: Fleet returned a response code >= 400
ValueError: An invalid value was provided for ``desired_state`` | train | https://github.com/cnelson/python-fleet/blob/a11dcd8bb3986d1d8f0af90d2da7399c9cc54b4d/fleet/v1/client.py#L505-L538 | [
"def _single_request(self, method, *args, **kwargs):\n \"\"\"Make a single request to the fleet API endpoint\n\n Args:\n method (str): A dot delimited string indicating the method to call. Example: 'Machines.List'\n *args: Passed directly to the method being called.\n **kwargs: Passed directly to the method being called.\n\n Returns:\n dict: The response from the method called.\n\n Raises:\n fleet.v1.errors.APIError: Fleet returned a response code >= 400\n \"\"\"\n\n # The auto generated client binding require instantiating each object you want to call a method on\n # For example to make a request to /machines for the list of machines you would do:\n # self._service.Machines().List(**kwargs)\n # This code iterates through the tokens in `method` and instantiates each object\n # Passing the `*args` and `**kwargs` to the final method listed\n\n # Start here\n _method = self._service\n\n # iterate over each token in the requested method\n for item in method.split('.'):\n\n # if it's the end of the line, pass our argument\n if method.endswith(item):\n _method = getattr(_method, item)(*args, **kwargs)\n else:\n # otherwise, just create an instance and move on\n _method = getattr(_method, item)()\n\n # Discovered endpoints look like r'$ENDPOINT/path/to/method' which isn't a valid URI\n # Per the fleet API documentation:\n # \"Note that this discovery document intentionally ships with an unusable rootUrl;\n # clients must initialize this as appropriate.\"\n\n # So we follow the documentation, and replace the token with our actual endpoint\n _method.uri = _method.uri.replace('$ENDPOINT', self._endpoint)\n\n # Execute the method and return it's output directly\n try:\n return _method.execute(http=self._http)\n except googleapiclient.errors.HttpError as exc:\n response = json.loads(exc.content.decode('utf-8'))['error']\n\n raise APIError(code=response['code'], message=response['message'], http_error=exc)\n",
"def get_unit(self, name):\n \"\"\"Retreive a specifi unit from the fleet cluster by name\n\n Args:\n name (str): If specified, only this unit name is returned\n\n Returns:\n Unit: The unit identified by ``name`` in the fleet cluster\n\n Raises:\n fleet.v1.errors.APIError: Fleet returned a response code >= 400\n\n \"\"\"\n return Unit(client=self, data=self._single_request('Units.Get', unitName=name))\n"
] | class Client(object):
"""A python wrapper for the fleet v1 API
The fleet v1 API is documented here: https://github.com/coreos/fleet/blob/master/Documentation/api-v1.md
"""
_API = 'fleet'
_VERSION = 'v1'
_STATES = ['inactive', 'loaded', 'launched']
def __init__(
self,
endpoint,
http=None,
ssh_tunnel=None,
ssh_username='core',
ssh_timeout=10,
ssh_known_hosts_file='~/.fleetctl/known_hosts',
ssh_strict_host_key_checking=True,
ssh_raw_transport=None
):
"""Connect to the fleet API and generate a client based on it's discovery document.
Args:
endpoint (str): A URL where the fleet API can be reached. Supported schemes are:
http: A HTTP connection over a TCP socket.
Example: http://127.0.0.1:49153
http+unix: A HTTP connection over a unix domain socket. You must escape the path (/ = %2F).
Example: http+unix://%2Fvar%2Frun%2Ffleet.sock
http (httplib2.Http): An instance of httplib2.Http (or something that acts like it) that HTTP requests will
be made through. You do not need to pass this unless you need to configure specific options for your
http client, or want to pass in a mock for testing.
ssh_tunnel (str '<host>[:<port>]'): Establish an SSH tunnel through the provided address for communication
with fleet. Defaults to None. If specified, the following other options adjust it's behaivor:
ssh_username (str): Username to use when connecting to SSH, defaults to 'core'.
ssh_timeout (float): Amount of time in seconds to allow for SSH connection initialization
before failing, defaults to 10.
ssh_known_hosts_file (str): File used to store remote machine fingerprints,
defaults to '~/.fleetctl/known_hosts'. Ignored if `ssh_strict_host_key_checking` is False
ssh_strict_host_key_checking (bool): Verify host keys presented by remote machines before
initiating SSH connections, defaults to True.
ssh_raw_transport (paramiko.transport.Transport): An active Transport on which open_channel() will be
called to establish connections.
See Advanced SSH Tunneling in docs/client.md for more information.
Raises:
ValueError: The endpoint provided was not accessible or your ssh configuration is incorrect
"""
# stash this for later
self._endpoint = endpoint.strip('/')
self._ssh_client = None
# we overload the http when our proxy enabled versin if they request ssh tunneling
# so we need to make sure they didn't give us both
if (ssh_tunnel or ssh_raw_transport) and http:
raise ValueError('You cannot specify your own http client, and request ssh tunneling.')
# only one way to connect, not both
if ssh_tunnel and ssh_raw_transport:
raise ValueError('If ssh_tunnel is specified, ssh_raw_transport must be None')
# see if we need to setup an ssh tunnel
self._ssh_tunnel = None
# if they handed us a transport, then we either bail or are good to go
if ssh_raw_transport:
if not isinstance(ssh_raw_transport, paramiko.transport.Transport):
raise ValueError('ssh_raw_transport must be an active instance of paramiko.transport.Transport.')
self._ssh_tunnel = SSHTunnel(host=ssh_raw_transport)
# otherwise we are connecting ourselves
elif ssh_tunnel:
(ssh_host, ssh_port) = self._split_hostport(ssh_tunnel, default_port=22)
try:
self._ssh_tunnel = SSHTunnel(
host=ssh_host,
port=ssh_port,
username=ssh_username,
timeout=ssh_timeout,
known_hosts_file=ssh_known_hosts_file,
strict_host_key_checking=ssh_strict_host_key_checking
)
except socket.gaierror as exc:
raise ValueError('{0} could not be resolved.'.format(ssh_host))
except socket.error as exc:
raise ValueError('Unable to connect to {0}:{1}: {2}'.format(
ssh_host,
ssh_port,
exc
))
except paramiko.ssh_exception.SSHException as exc:
raise ValueError('Unable to connect via ssh: {0}: {1}'.format(
exc.__class__.__name__,
exc
))
# did we get an ssh connection up?
if self._ssh_tunnel:
# inject the SSH tunnel socketed into httplib via the proxy_info interface
self._http = httplib2.Http(proxy_info=self._get_proxy_info)
# preface our scheme with 'ssh+'; httplib2's SCHEME_TO_CONNECTION
# will invoke our custom connection objects and route the HTTP
# call across the SSH connection established or passed in above
self._endpoint = 'ssh+' + self._endpoint
else:
self._http = http
# if we've made it this far, we are ready to try to talk to fleet
# possibly through a proxy...
# generate a client binding using the google-api-python client.
# See https://developers.google.com/api-client-library/python/start/get_started
# For more infomation on how to use the generated client binding.
try:
discovery_url = self._endpoint + '/{api}/{apiVersion}/discovery'
self._service = build(
self._API,
self._VERSION,
cache_discovery=False,
discoveryServiceUrl=discovery_url,
http=self._http
)
except socket.error as exc: # pragma: no cover
raise ValueError('Unable to connect to endpoint {0}: {1}'.format(
self._endpoint,
exc
))
except googleapiclient.errors.UnknownApiNameOrVersion as exc:
raise ValueError(
'Connected to endpoint {0} but it is not a fleet v1 API endpoint. '
'This usually means a GET request to {0}/{1}/{2}/discovery failed.'.format(
self._endpoint,
self._API,
self._VERSION
))
def _split_hostport(self, hostport, default_port=None):
"""Split a string in the format of '<host>:<port>' into it's component parts
default_port will be used if a port is not included in the string
Args:
str ('<host>' or '<host>:<port>'): A string to split into it's parts
Returns:
two item tuple: (host, port)
Raises:
ValueError: The string was in an invalid element
"""
try:
(host, port) = hostport.split(':', 1)
except ValueError: # no colon in the string so make our own port
host = hostport
if default_port is None:
raise ValueError('No port found in hostport, and default_port not provided.')
port = default_port
try:
port = int(port)
if port < 1 or port > 65535:
raise ValueError()
except ValueError:
raise ValueError("{0} is not a valid TCP port".format(port))
return (host, port)
def _endpoint_to_target(self, endpoint):
"""Convert a URL into a host / port, or into a path to a unix domain socket
Args:
endpoint (str): A URL parsable by urlparse
Returns:
3 item tuple: (host, port, path).
host and port will None, and path will be not None if a a unix domain socket URL is passed
path will be None if a normal TCP based URL is passed
"""
parsed = urlparse.urlparse(endpoint)
scheme = parsed[0]
hostport = parsed[1]
if 'unix' in scheme:
return (None, None, unquote(hostport))
if scheme == 'https':
target_port = 443
else:
target_port = 80
(target_host, target_port) = self._split_hostport(hostport, default_port=target_port)
return (target_host, target_port, None)
def _get_proxy_info(self, _=None):
"""Generate a ProxyInfo class from a connected SSH transport
Args:
_ (None): Ignored. This is just here as the ProxyInfo spec requires it.
Returns:
SSHTunnelProxyInfo: A ProxyInfo with an active socket tunneled through SSH
"""
# parse the fleet endpoint url, to establish a tunnel to that host
(target_host, target_port, target_path) = self._endpoint_to_target(self._endpoint)
# implement the proxy_info interface from httplib which requires
# that we accept a scheme, and return a ProxyInfo object
# we do :P
# This is called once per request, so we keep this here
# so that we can keep one ssh connection open, and allocate
# new channels as needed per-request
sock = None
if target_path:
sock = self._ssh_tunnel.forward_unix(path=target_path)
else:
sock = self._ssh_tunnel.forward_tcp(target_host, port=target_port)
# Return a ProxyInfo class with this socket
return SSHTunnelProxyInfo(sock=sock)
def _single_request(self, method, *args, **kwargs):
"""Make a single request to the fleet API endpoint
Args:
method (str): A dot delimited string indicating the method to call. Example: 'Machines.List'
*args: Passed directly to the method being called.
**kwargs: Passed directly to the method being called.
Returns:
dict: The response from the method called.
Raises:
fleet.v1.errors.APIError: Fleet returned a response code >= 400
"""
# The auto generated client binding require instantiating each object you want to call a method on
# For example to make a request to /machines for the list of machines you would do:
# self._service.Machines().List(**kwargs)
# This code iterates through the tokens in `method` and instantiates each object
# Passing the `*args` and `**kwargs` to the final method listed
# Start here
_method = self._service
# iterate over each token in the requested method
for item in method.split('.'):
# if it's the end of the line, pass our argument
if method.endswith(item):
_method = getattr(_method, item)(*args, **kwargs)
else:
# otherwise, just create an instance and move on
_method = getattr(_method, item)()
# Discovered endpoints look like r'$ENDPOINT/path/to/method' which isn't a valid URI
# Per the fleet API documentation:
# "Note that this discovery document intentionally ships with an unusable rootUrl;
# clients must initialize this as appropriate."
# So we follow the documentation, and replace the token with our actual endpoint
_method.uri = _method.uri.replace('$ENDPOINT', self._endpoint)
# Execute the method and return it's output directly
try:
return _method.execute(http=self._http)
except googleapiclient.errors.HttpError as exc:
response = json.loads(exc.content.decode('utf-8'))['error']
raise APIError(code=response['code'], message=response['message'], http_error=exc)
def _request(self, method, *args, **kwargs):
"""Make a request with automatic pagination handling
Args:
method (str): A dot delimited string indicating the method to call. Example: 'Machines.List'
*args: Passed directly to the method being called.
**kwargs: Passed directly to the method being called.
Note: This method will inject the 'nextPageToken' key into `**kwargs` as needed to handle
pagination overwriting any value specified by the caller. If you wish to handle pagination
manually use the `_single_request` method
Yields:
dict: The next page of responses from the method called.
Raises:
fleet.v1.errors.APIError: Fleet returned a response code >= 400
"""
# This is set to False and not None so that the while loop below will execute at least once
next_page_token = False
while next_page_token is not None:
# If bool(next_page_token), then include it in the request
# We do this so we don't pass it in the initial request as we set it to False above
if next_page_token:
kwargs['nextPageToken'] = next_page_token
# Make the request
response = self._single_request(method, *args, **kwargs)
# If there is a token for another page in the response, capture it for the next loop iteration
# If not, we set it to None so that the loop will terminate
next_page_token = response.get('nextPageToken', None)
# Return the current response
yield response
def create_unit(self, name, unit):
"""Create a new Unit in the cluster
Create and modify Unit entities to communicate to fleet the desired state of the cluster.
This simply declares what should be happening; the backend system still has to react to
the changes in this desired state. The actual state of the system is communicated with
UnitState entities.
Args:
name (str): The name of the unit to create
unit (Unit): The unit to submit to fleet
Returns:
Unit: The unit that was created
Raises:
fleet.v1.errors.APIError: Fleet returned a response code >= 400
"""
self._single_request('Units.Set', unitName=name, body={
'desiredState': unit.desiredState,
'options': unit.options
})
return self.get_unit(name)
def destroy_unit(self, unit):
"""Delete a unit from the cluster
Args:
unit (str, Unit): The Unit, or name of the unit to delete
Returns:
True: The unit was deleted
Raises:
fleet.v1.errors.APIError: Fleet returned a response code >= 400
"""
# if we are given an object, grab it's name property
# otherwise, convert to unicode
if isinstance(unit, Unit):
unit = unit.name
else:
unit = str(unit)
self._single_request('Units.Delete', unitName=unit)
return True
def list_units(self):
"""Return the current list of the Units in the fleet cluster
Yields:
Unit: The next Unit in the cluster
Raises:
fleet.v1.errors.APIError: Fleet returned a response code >= 400
"""
for page in self._request('Units.List'):
for unit in page.get('units', []):
yield Unit(client=self, data=unit)
def get_unit(self, name):
"""Retreive a specifi unit from the fleet cluster by name
Args:
name (str): If specified, only this unit name is returned
Returns:
Unit: The unit identified by ``name`` in the fleet cluster
Raises:
fleet.v1.errors.APIError: Fleet returned a response code >= 400
"""
return Unit(client=self, data=self._single_request('Units.Get', unitName=name))
def list_unit_states(self, machine_id=None, unit_name=None):
"""Return the current UnitState for the fleet cluster
Args:
machine_id (str): filter all UnitState objects to those
originating from a specific machine
unit_name (str): filter all UnitState objects to those related
to a specific unit
Yields:
UnitState: The next UnitState in the cluster
Raises:
fleet.v1.errors.APIError: Fleet returned a response code >= 400
"""
for page in self._request('UnitState.List', machineID=machine_id, unitName=unit_name):
for state in page.get('states', []):
yield UnitState(data=state)
def list_machines(self):
"""Retrieve a list of machines in the fleet cluster
Yields:
Machine: The next machine in the cluster
Raises:
fleet.v1.errors.APIError: Fleet returned a response code >= 400
"""
# loop through each page of results
for page in self._request('Machines.List'):
# return each machine in the current page
for machine in page.get('machines', []):
yield Machine(data=machine)
|
cnelson/python-fleet | fleet/v1/client.py | Client.destroy_unit | python | def destroy_unit(self, unit):
# if we are given an object, grab it's name property
# otherwise, convert to unicode
if isinstance(unit, Unit):
unit = unit.name
else:
unit = str(unit)
self._single_request('Units.Delete', unitName=unit)
return True | Delete a unit from the cluster
Args:
unit (str, Unit): The Unit, or name of the unit to delete
Returns:
True: The unit was deleted
Raises:
fleet.v1.errors.APIError: Fleet returned a response code >= 400 | train | https://github.com/cnelson/python-fleet/blob/a11dcd8bb3986d1d8f0af90d2da7399c9cc54b4d/fleet/v1/client.py#L540-L562 | [
"def _single_request(self, method, *args, **kwargs):\n \"\"\"Make a single request to the fleet API endpoint\n\n Args:\n method (str): A dot delimited string indicating the method to call. Example: 'Machines.List'\n *args: Passed directly to the method being called.\n **kwargs: Passed directly to the method being called.\n\n Returns:\n dict: The response from the method called.\n\n Raises:\n fleet.v1.errors.APIError: Fleet returned a response code >= 400\n \"\"\"\n\n # The auto generated client binding require instantiating each object you want to call a method on\n # For example to make a request to /machines for the list of machines you would do:\n # self._service.Machines().List(**kwargs)\n # This code iterates through the tokens in `method` and instantiates each object\n # Passing the `*args` and `**kwargs` to the final method listed\n\n # Start here\n _method = self._service\n\n # iterate over each token in the requested method\n for item in method.split('.'):\n\n # if it's the end of the line, pass our argument\n if method.endswith(item):\n _method = getattr(_method, item)(*args, **kwargs)\n else:\n # otherwise, just create an instance and move on\n _method = getattr(_method, item)()\n\n # Discovered endpoints look like r'$ENDPOINT/path/to/method' which isn't a valid URI\n # Per the fleet API documentation:\n # \"Note that this discovery document intentionally ships with an unusable rootUrl;\n # clients must initialize this as appropriate.\"\n\n # So we follow the documentation, and replace the token with our actual endpoint\n _method.uri = _method.uri.replace('$ENDPOINT', self._endpoint)\n\n # Execute the method and return it's output directly\n try:\n return _method.execute(http=self._http)\n except googleapiclient.errors.HttpError as exc:\n response = json.loads(exc.content.decode('utf-8'))['error']\n\n raise APIError(code=response['code'], message=response['message'], http_error=exc)\n"
] | class Client(object):
"""A python wrapper for the fleet v1 API
The fleet v1 API is documented here: https://github.com/coreos/fleet/blob/master/Documentation/api-v1.md
"""
_API = 'fleet'
_VERSION = 'v1'
_STATES = ['inactive', 'loaded', 'launched']
def __init__(
self,
endpoint,
http=None,
ssh_tunnel=None,
ssh_username='core',
ssh_timeout=10,
ssh_known_hosts_file='~/.fleetctl/known_hosts',
ssh_strict_host_key_checking=True,
ssh_raw_transport=None
):
"""Connect to the fleet API and generate a client based on it's discovery document.
Args:
endpoint (str): A URL where the fleet API can be reached. Supported schemes are:
http: A HTTP connection over a TCP socket.
Example: http://127.0.0.1:49153
http+unix: A HTTP connection over a unix domain socket. You must escape the path (/ = %2F).
Example: http+unix://%2Fvar%2Frun%2Ffleet.sock
http (httplib2.Http): An instance of httplib2.Http (or something that acts like it) that HTTP requests will
be made through. You do not need to pass this unless you need to configure specific options for your
http client, or want to pass in a mock for testing.
ssh_tunnel (str '<host>[:<port>]'): Establish an SSH tunnel through the provided address for communication
with fleet. Defaults to None. If specified, the following other options adjust it's behaivor:
ssh_username (str): Username to use when connecting to SSH, defaults to 'core'.
ssh_timeout (float): Amount of time in seconds to allow for SSH connection initialization
before failing, defaults to 10.
ssh_known_hosts_file (str): File used to store remote machine fingerprints,
defaults to '~/.fleetctl/known_hosts'. Ignored if `ssh_strict_host_key_checking` is False
ssh_strict_host_key_checking (bool): Verify host keys presented by remote machines before
initiating SSH connections, defaults to True.
ssh_raw_transport (paramiko.transport.Transport): An active Transport on which open_channel() will be
called to establish connections.
See Advanced SSH Tunneling in docs/client.md for more information.
Raises:
ValueError: The endpoint provided was not accessible or your ssh configuration is incorrect
"""
# stash this for later
self._endpoint = endpoint.strip('/')
self._ssh_client = None
# we overload the http when our proxy enabled versin if they request ssh tunneling
# so we need to make sure they didn't give us both
if (ssh_tunnel or ssh_raw_transport) and http:
raise ValueError('You cannot specify your own http client, and request ssh tunneling.')
# only one way to connect, not both
if ssh_tunnel and ssh_raw_transport:
raise ValueError('If ssh_tunnel is specified, ssh_raw_transport must be None')
# see if we need to setup an ssh tunnel
self._ssh_tunnel = None
# if they handed us a transport, then we either bail or are good to go
if ssh_raw_transport:
if not isinstance(ssh_raw_transport, paramiko.transport.Transport):
raise ValueError('ssh_raw_transport must be an active instance of paramiko.transport.Transport.')
self._ssh_tunnel = SSHTunnel(host=ssh_raw_transport)
# otherwise we are connecting ourselves
elif ssh_tunnel:
(ssh_host, ssh_port) = self._split_hostport(ssh_tunnel, default_port=22)
try:
self._ssh_tunnel = SSHTunnel(
host=ssh_host,
port=ssh_port,
username=ssh_username,
timeout=ssh_timeout,
known_hosts_file=ssh_known_hosts_file,
strict_host_key_checking=ssh_strict_host_key_checking
)
except socket.gaierror as exc:
raise ValueError('{0} could not be resolved.'.format(ssh_host))
except socket.error as exc:
raise ValueError('Unable to connect to {0}:{1}: {2}'.format(
ssh_host,
ssh_port,
exc
))
except paramiko.ssh_exception.SSHException as exc:
raise ValueError('Unable to connect via ssh: {0}: {1}'.format(
exc.__class__.__name__,
exc
))
# did we get an ssh connection up?
if self._ssh_tunnel:
# inject the SSH tunnel socketed into httplib via the proxy_info interface
self._http = httplib2.Http(proxy_info=self._get_proxy_info)
# preface our scheme with 'ssh+'; httplib2's SCHEME_TO_CONNECTION
# will invoke our custom connection objects and route the HTTP
# call across the SSH connection established or passed in above
self._endpoint = 'ssh+' + self._endpoint
else:
self._http = http
# if we've made it this far, we are ready to try to talk to fleet
# possibly through a proxy...
# generate a client binding using the google-api-python client.
# See https://developers.google.com/api-client-library/python/start/get_started
# For more infomation on how to use the generated client binding.
try:
discovery_url = self._endpoint + '/{api}/{apiVersion}/discovery'
self._service = build(
self._API,
self._VERSION,
cache_discovery=False,
discoveryServiceUrl=discovery_url,
http=self._http
)
except socket.error as exc: # pragma: no cover
raise ValueError('Unable to connect to endpoint {0}: {1}'.format(
self._endpoint,
exc
))
except googleapiclient.errors.UnknownApiNameOrVersion as exc:
raise ValueError(
'Connected to endpoint {0} but it is not a fleet v1 API endpoint. '
'This usually means a GET request to {0}/{1}/{2}/discovery failed.'.format(
self._endpoint,
self._API,
self._VERSION
))
def _split_hostport(self, hostport, default_port=None):
"""Split a string in the format of '<host>:<port>' into it's component parts
default_port will be used if a port is not included in the string
Args:
str ('<host>' or '<host>:<port>'): A string to split into it's parts
Returns:
two item tuple: (host, port)
Raises:
ValueError: The string was in an invalid element
"""
try:
(host, port) = hostport.split(':', 1)
except ValueError: # no colon in the string so make our own port
host = hostport
if default_port is None:
raise ValueError('No port found in hostport, and default_port not provided.')
port = default_port
try:
port = int(port)
if port < 1 or port > 65535:
raise ValueError()
except ValueError:
raise ValueError("{0} is not a valid TCP port".format(port))
return (host, port)
def _endpoint_to_target(self, endpoint):
"""Convert a URL into a host / port, or into a path to a unix domain socket
Args:
endpoint (str): A URL parsable by urlparse
Returns:
3 item tuple: (host, port, path).
host and port will None, and path will be not None if a a unix domain socket URL is passed
path will be None if a normal TCP based URL is passed
"""
parsed = urlparse.urlparse(endpoint)
scheme = parsed[0]
hostport = parsed[1]
if 'unix' in scheme:
return (None, None, unquote(hostport))
if scheme == 'https':
target_port = 443
else:
target_port = 80
(target_host, target_port) = self._split_hostport(hostport, default_port=target_port)
return (target_host, target_port, None)
def _get_proxy_info(self, _=None):
"""Generate a ProxyInfo class from a connected SSH transport
Args:
_ (None): Ignored. This is just here as the ProxyInfo spec requires it.
Returns:
SSHTunnelProxyInfo: A ProxyInfo with an active socket tunneled through SSH
"""
# parse the fleet endpoint url, to establish a tunnel to that host
(target_host, target_port, target_path) = self._endpoint_to_target(self._endpoint)
# implement the proxy_info interface from httplib which requires
# that we accept a scheme, and return a ProxyInfo object
# we do :P
# This is called once per request, so we keep this here
# so that we can keep one ssh connection open, and allocate
# new channels as needed per-request
sock = None
if target_path:
sock = self._ssh_tunnel.forward_unix(path=target_path)
else:
sock = self._ssh_tunnel.forward_tcp(target_host, port=target_port)
# Return a ProxyInfo class with this socket
return SSHTunnelProxyInfo(sock=sock)
def _single_request(self, method, *args, **kwargs):
"""Make a single request to the fleet API endpoint
Args:
method (str): A dot delimited string indicating the method to call. Example: 'Machines.List'
*args: Passed directly to the method being called.
**kwargs: Passed directly to the method being called.
Returns:
dict: The response from the method called.
Raises:
fleet.v1.errors.APIError: Fleet returned a response code >= 400
"""
# The auto generated client binding require instantiating each object you want to call a method on
# For example to make a request to /machines for the list of machines you would do:
# self._service.Machines().List(**kwargs)
# This code iterates through the tokens in `method` and instantiates each object
# Passing the `*args` and `**kwargs` to the final method listed
# Start here
_method = self._service
# iterate over each token in the requested method
for item in method.split('.'):
# if it's the end of the line, pass our argument
if method.endswith(item):
_method = getattr(_method, item)(*args, **kwargs)
else:
# otherwise, just create an instance and move on
_method = getattr(_method, item)()
# Discovered endpoints look like r'$ENDPOINT/path/to/method' which isn't a valid URI
# Per the fleet API documentation:
# "Note that this discovery document intentionally ships with an unusable rootUrl;
# clients must initialize this as appropriate."
# So we follow the documentation, and replace the token with our actual endpoint
_method.uri = _method.uri.replace('$ENDPOINT', self._endpoint)
# Execute the method and return it's output directly
try:
return _method.execute(http=self._http)
except googleapiclient.errors.HttpError as exc:
response = json.loads(exc.content.decode('utf-8'))['error']
raise APIError(code=response['code'], message=response['message'], http_error=exc)
def _request(self, method, *args, **kwargs):
"""Make a request with automatic pagination handling
Args:
method (str): A dot delimited string indicating the method to call. Example: 'Machines.List'
*args: Passed directly to the method being called.
**kwargs: Passed directly to the method being called.
Note: This method will inject the 'nextPageToken' key into `**kwargs` as needed to handle
pagination overwriting any value specified by the caller. If you wish to handle pagination
manually use the `_single_request` method
Yields:
dict: The next page of responses from the method called.
Raises:
fleet.v1.errors.APIError: Fleet returned a response code >= 400
"""
# This is set to False and not None so that the while loop below will execute at least once
next_page_token = False
while next_page_token is not None:
# If bool(next_page_token), then include it in the request
# We do this so we don't pass it in the initial request as we set it to False above
if next_page_token:
kwargs['nextPageToken'] = next_page_token
# Make the request
response = self._single_request(method, *args, **kwargs)
# If there is a token for another page in the response, capture it for the next loop iteration
# If not, we set it to None so that the loop will terminate
next_page_token = response.get('nextPageToken', None)
# Return the current response
yield response
def create_unit(self, name, unit):
"""Create a new Unit in the cluster
Create and modify Unit entities to communicate to fleet the desired state of the cluster.
This simply declares what should be happening; the backend system still has to react to
the changes in this desired state. The actual state of the system is communicated with
UnitState entities.
Args:
name (str): The name of the unit to create
unit (Unit): The unit to submit to fleet
Returns:
Unit: The unit that was created
Raises:
fleet.v1.errors.APIError: Fleet returned a response code >= 400
"""
self._single_request('Units.Set', unitName=name, body={
'desiredState': unit.desiredState,
'options': unit.options
})
return self.get_unit(name)
def set_unit_desired_state(self, unit, desired_state):
"""Update the desired state of a unit running in the cluster
Args:
unit (str, Unit): The Unit, or name of the unit to update
desired_state: State the user wishes the Unit to be in
("inactive", "loaded", or "launched")
Returns:
Unit: The unit that was updated
Raises:
fleet.v1.errors.APIError: Fleet returned a response code >= 400
ValueError: An invalid value was provided for ``desired_state``
"""
if desired_state not in self._STATES:
raise ValueError('state must be one of: {0}'.format(
self._STATES
))
# if we are given an object, grab it's name property
# otherwise, convert to unicode
if isinstance(unit, Unit):
unit = unit.name
else:
unit = str(unit)
self._single_request('Units.Set', unitName=unit, body={
'desiredState': desired_state
})
return self.get_unit(unit)
def list_units(self):
"""Return the current list of the Units in the fleet cluster
Yields:
Unit: The next Unit in the cluster
Raises:
fleet.v1.errors.APIError: Fleet returned a response code >= 400
"""
for page in self._request('Units.List'):
for unit in page.get('units', []):
yield Unit(client=self, data=unit)
def get_unit(self, name):
"""Retreive a specifi unit from the fleet cluster by name
Args:
name (str): If specified, only this unit name is returned
Returns:
Unit: The unit identified by ``name`` in the fleet cluster
Raises:
fleet.v1.errors.APIError: Fleet returned a response code >= 400
"""
return Unit(client=self, data=self._single_request('Units.Get', unitName=name))
def list_unit_states(self, machine_id=None, unit_name=None):
"""Return the current UnitState for the fleet cluster
Args:
machine_id (str): filter all UnitState objects to those
originating from a specific machine
unit_name (str): filter all UnitState objects to those related
to a specific unit
Yields:
UnitState: The next UnitState in the cluster
Raises:
fleet.v1.errors.APIError: Fleet returned a response code >= 400
"""
for page in self._request('UnitState.List', machineID=machine_id, unitName=unit_name):
for state in page.get('states', []):
yield UnitState(data=state)
def list_machines(self):
"""Retrieve a list of machines in the fleet cluster
Yields:
Machine: The next machine in the cluster
Raises:
fleet.v1.errors.APIError: Fleet returned a response code >= 400
"""
# loop through each page of results
for page in self._request('Machines.List'):
# return each machine in the current page
for machine in page.get('machines', []):
yield Machine(data=machine)
|
cnelson/python-fleet | fleet/v1/client.py | Client.list_units | python | def list_units(self):
for page in self._request('Units.List'):
for unit in page.get('units', []):
yield Unit(client=self, data=unit) | Return the current list of the Units in the fleet cluster
Yields:
Unit: The next Unit in the cluster
Raises:
fleet.v1.errors.APIError: Fleet returned a response code >= 400 | train | https://github.com/cnelson/python-fleet/blob/a11dcd8bb3986d1d8f0af90d2da7399c9cc54b4d/fleet/v1/client.py#L564-L576 | [
"def _request(self, method, *args, **kwargs):\n \"\"\"Make a request with automatic pagination handling\n\n Args:\n method (str): A dot delimited string indicating the method to call. Example: 'Machines.List'\n *args: Passed directly to the method being called.\n **kwargs: Passed directly to the method being called.\n Note: This method will inject the 'nextPageToken' key into `**kwargs` as needed to handle\n pagination overwriting any value specified by the caller. If you wish to handle pagination\n manually use the `_single_request` method\n\n\n Yields:\n dict: The next page of responses from the method called.\n\n\n Raises:\n fleet.v1.errors.APIError: Fleet returned a response code >= 400\n\n \"\"\"\n\n # This is set to False and not None so that the while loop below will execute at least once\n next_page_token = False\n\n while next_page_token is not None:\n # If bool(next_page_token), then include it in the request\n # We do this so we don't pass it in the initial request as we set it to False above\n if next_page_token:\n kwargs['nextPageToken'] = next_page_token\n\n # Make the request\n response = self._single_request(method, *args, **kwargs)\n\n # If there is a token for another page in the response, capture it for the next loop iteration\n # If not, we set it to None so that the loop will terminate\n next_page_token = response.get('nextPageToken', None)\n\n # Return the current response\n yield response\n"
] | class Client(object):
"""A python wrapper for the fleet v1 API
The fleet v1 API is documented here: https://github.com/coreos/fleet/blob/master/Documentation/api-v1.md
"""
_API = 'fleet'
_VERSION = 'v1'
_STATES = ['inactive', 'loaded', 'launched']
def __init__(
self,
endpoint,
http=None,
ssh_tunnel=None,
ssh_username='core',
ssh_timeout=10,
ssh_known_hosts_file='~/.fleetctl/known_hosts',
ssh_strict_host_key_checking=True,
ssh_raw_transport=None
):
"""Connect to the fleet API and generate a client based on it's discovery document.
Args:
endpoint (str): A URL where the fleet API can be reached. Supported schemes are:
http: A HTTP connection over a TCP socket.
Example: http://127.0.0.1:49153
http+unix: A HTTP connection over a unix domain socket. You must escape the path (/ = %2F).
Example: http+unix://%2Fvar%2Frun%2Ffleet.sock
http (httplib2.Http): An instance of httplib2.Http (or something that acts like it) that HTTP requests will
be made through. You do not need to pass this unless you need to configure specific options for your
http client, or want to pass in a mock for testing.
ssh_tunnel (str '<host>[:<port>]'): Establish an SSH tunnel through the provided address for communication
with fleet. Defaults to None. If specified, the following other options adjust it's behaivor:
ssh_username (str): Username to use when connecting to SSH, defaults to 'core'.
ssh_timeout (float): Amount of time in seconds to allow for SSH connection initialization
before failing, defaults to 10.
ssh_known_hosts_file (str): File used to store remote machine fingerprints,
defaults to '~/.fleetctl/known_hosts'. Ignored if `ssh_strict_host_key_checking` is False
ssh_strict_host_key_checking (bool): Verify host keys presented by remote machines before
initiating SSH connections, defaults to True.
ssh_raw_transport (paramiko.transport.Transport): An active Transport on which open_channel() will be
called to establish connections.
See Advanced SSH Tunneling in docs/client.md for more information.
Raises:
ValueError: The endpoint provided was not accessible or your ssh configuration is incorrect
"""
# stash this for later
self._endpoint = endpoint.strip('/')
self._ssh_client = None
# we overload the http when our proxy enabled versin if they request ssh tunneling
# so we need to make sure they didn't give us both
if (ssh_tunnel or ssh_raw_transport) and http:
raise ValueError('You cannot specify your own http client, and request ssh tunneling.')
# only one way to connect, not both
if ssh_tunnel and ssh_raw_transport:
raise ValueError('If ssh_tunnel is specified, ssh_raw_transport must be None')
# see if we need to setup an ssh tunnel
self._ssh_tunnel = None
# if they handed us a transport, then we either bail or are good to go
if ssh_raw_transport:
if not isinstance(ssh_raw_transport, paramiko.transport.Transport):
raise ValueError('ssh_raw_transport must be an active instance of paramiko.transport.Transport.')
self._ssh_tunnel = SSHTunnel(host=ssh_raw_transport)
# otherwise we are connecting ourselves
elif ssh_tunnel:
(ssh_host, ssh_port) = self._split_hostport(ssh_tunnel, default_port=22)
try:
self._ssh_tunnel = SSHTunnel(
host=ssh_host,
port=ssh_port,
username=ssh_username,
timeout=ssh_timeout,
known_hosts_file=ssh_known_hosts_file,
strict_host_key_checking=ssh_strict_host_key_checking
)
except socket.gaierror as exc:
raise ValueError('{0} could not be resolved.'.format(ssh_host))
except socket.error as exc:
raise ValueError('Unable to connect to {0}:{1}: {2}'.format(
ssh_host,
ssh_port,
exc
))
except paramiko.ssh_exception.SSHException as exc:
raise ValueError('Unable to connect via ssh: {0}: {1}'.format(
exc.__class__.__name__,
exc
))
# did we get an ssh connection up?
if self._ssh_tunnel:
# inject the SSH tunnel socketed into httplib via the proxy_info interface
self._http = httplib2.Http(proxy_info=self._get_proxy_info)
# preface our scheme with 'ssh+'; httplib2's SCHEME_TO_CONNECTION
# will invoke our custom connection objects and route the HTTP
# call across the SSH connection established or passed in above
self._endpoint = 'ssh+' + self._endpoint
else:
self._http = http
# if we've made it this far, we are ready to try to talk to fleet
# possibly through a proxy...
# generate a client binding using the google-api-python client.
# See https://developers.google.com/api-client-library/python/start/get_started
# For more infomation on how to use the generated client binding.
try:
discovery_url = self._endpoint + '/{api}/{apiVersion}/discovery'
self._service = build(
self._API,
self._VERSION,
cache_discovery=False,
discoveryServiceUrl=discovery_url,
http=self._http
)
except socket.error as exc: # pragma: no cover
raise ValueError('Unable to connect to endpoint {0}: {1}'.format(
self._endpoint,
exc
))
except googleapiclient.errors.UnknownApiNameOrVersion as exc:
raise ValueError(
'Connected to endpoint {0} but it is not a fleet v1 API endpoint. '
'This usually means a GET request to {0}/{1}/{2}/discovery failed.'.format(
self._endpoint,
self._API,
self._VERSION
))
def _split_hostport(self, hostport, default_port=None):
"""Split a string in the format of '<host>:<port>' into it's component parts
default_port will be used if a port is not included in the string
Args:
str ('<host>' or '<host>:<port>'): A string to split into it's parts
Returns:
two item tuple: (host, port)
Raises:
ValueError: The string was in an invalid element
"""
try:
(host, port) = hostport.split(':', 1)
except ValueError: # no colon in the string so make our own port
host = hostport
if default_port is None:
raise ValueError('No port found in hostport, and default_port not provided.')
port = default_port
try:
port = int(port)
if port < 1 or port > 65535:
raise ValueError()
except ValueError:
raise ValueError("{0} is not a valid TCP port".format(port))
return (host, port)
def _endpoint_to_target(self, endpoint):
"""Convert a URL into a host / port, or into a path to a unix domain socket
Args:
endpoint (str): A URL parsable by urlparse
Returns:
3 item tuple: (host, port, path).
host and port will None, and path will be not None if a a unix domain socket URL is passed
path will be None if a normal TCP based URL is passed
"""
parsed = urlparse.urlparse(endpoint)
scheme = parsed[0]
hostport = parsed[1]
if 'unix' in scheme:
return (None, None, unquote(hostport))
if scheme == 'https':
target_port = 443
else:
target_port = 80
(target_host, target_port) = self._split_hostport(hostport, default_port=target_port)
return (target_host, target_port, None)
def _get_proxy_info(self, _=None):
"""Generate a ProxyInfo class from a connected SSH transport
Args:
_ (None): Ignored. This is just here as the ProxyInfo spec requires it.
Returns:
SSHTunnelProxyInfo: A ProxyInfo with an active socket tunneled through SSH
"""
# parse the fleet endpoint url, to establish a tunnel to that host
(target_host, target_port, target_path) = self._endpoint_to_target(self._endpoint)
# implement the proxy_info interface from httplib which requires
# that we accept a scheme, and return a ProxyInfo object
# we do :P
# This is called once per request, so we keep this here
# so that we can keep one ssh connection open, and allocate
# new channels as needed per-request
sock = None
if target_path:
sock = self._ssh_tunnel.forward_unix(path=target_path)
else:
sock = self._ssh_tunnel.forward_tcp(target_host, port=target_port)
# Return a ProxyInfo class with this socket
return SSHTunnelProxyInfo(sock=sock)
def _single_request(self, method, *args, **kwargs):
"""Make a single request to the fleet API endpoint
Args:
method (str): A dot delimited string indicating the method to call. Example: 'Machines.List'
*args: Passed directly to the method being called.
**kwargs: Passed directly to the method being called.
Returns:
dict: The response from the method called.
Raises:
fleet.v1.errors.APIError: Fleet returned a response code >= 400
"""
# The auto generated client binding require instantiating each object you want to call a method on
# For example to make a request to /machines for the list of machines you would do:
# self._service.Machines().List(**kwargs)
# This code iterates through the tokens in `method` and instantiates each object
# Passing the `*args` and `**kwargs` to the final method listed
# Start here
_method = self._service
# iterate over each token in the requested method
for item in method.split('.'):
# if it's the end of the line, pass our argument
if method.endswith(item):
_method = getattr(_method, item)(*args, **kwargs)
else:
# otherwise, just create an instance and move on
_method = getattr(_method, item)()
# Discovered endpoints look like r'$ENDPOINT/path/to/method' which isn't a valid URI
# Per the fleet API documentation:
# "Note that this discovery document intentionally ships with an unusable rootUrl;
# clients must initialize this as appropriate."
# So we follow the documentation, and replace the token with our actual endpoint
_method.uri = _method.uri.replace('$ENDPOINT', self._endpoint)
# Execute the method and return it's output directly
try:
return _method.execute(http=self._http)
except googleapiclient.errors.HttpError as exc:
response = json.loads(exc.content.decode('utf-8'))['error']
raise APIError(code=response['code'], message=response['message'], http_error=exc)
def _request(self, method, *args, **kwargs):
"""Make a request with automatic pagination handling
Args:
method (str): A dot delimited string indicating the method to call. Example: 'Machines.List'
*args: Passed directly to the method being called.
**kwargs: Passed directly to the method being called.
Note: This method will inject the 'nextPageToken' key into `**kwargs` as needed to handle
pagination overwriting any value specified by the caller. If you wish to handle pagination
manually use the `_single_request` method
Yields:
dict: The next page of responses from the method called.
Raises:
fleet.v1.errors.APIError: Fleet returned a response code >= 400
"""
# This is set to False and not None so that the while loop below will execute at least once
next_page_token = False
while next_page_token is not None:
# If bool(next_page_token), then include it in the request
# We do this so we don't pass it in the initial request as we set it to False above
if next_page_token:
kwargs['nextPageToken'] = next_page_token
# Make the request
response = self._single_request(method, *args, **kwargs)
# If there is a token for another page in the response, capture it for the next loop iteration
# If not, we set it to None so that the loop will terminate
next_page_token = response.get('nextPageToken', None)
# Return the current response
yield response
def create_unit(self, name, unit):
"""Create a new Unit in the cluster
Create and modify Unit entities to communicate to fleet the desired state of the cluster.
This simply declares what should be happening; the backend system still has to react to
the changes in this desired state. The actual state of the system is communicated with
UnitState entities.
Args:
name (str): The name of the unit to create
unit (Unit): The unit to submit to fleet
Returns:
Unit: The unit that was created
Raises:
fleet.v1.errors.APIError: Fleet returned a response code >= 400
"""
self._single_request('Units.Set', unitName=name, body={
'desiredState': unit.desiredState,
'options': unit.options
})
return self.get_unit(name)
def set_unit_desired_state(self, unit, desired_state):
"""Update the desired state of a unit running in the cluster
Args:
unit (str, Unit): The Unit, or name of the unit to update
desired_state: State the user wishes the Unit to be in
("inactive", "loaded", or "launched")
Returns:
Unit: The unit that was updated
Raises:
fleet.v1.errors.APIError: Fleet returned a response code >= 400
ValueError: An invalid value was provided for ``desired_state``
"""
if desired_state not in self._STATES:
raise ValueError('state must be one of: {0}'.format(
self._STATES
))
# if we are given an object, grab it's name property
# otherwise, convert to unicode
if isinstance(unit, Unit):
unit = unit.name
else:
unit = str(unit)
self._single_request('Units.Set', unitName=unit, body={
'desiredState': desired_state
})
return self.get_unit(unit)
def destroy_unit(self, unit):
"""Delete a unit from the cluster
Args:
unit (str, Unit): The Unit, or name of the unit to delete
Returns:
True: The unit was deleted
Raises:
fleet.v1.errors.APIError: Fleet returned a response code >= 400
"""
# if we are given an object, grab it's name property
# otherwise, convert to unicode
if isinstance(unit, Unit):
unit = unit.name
else:
unit = str(unit)
self._single_request('Units.Delete', unitName=unit)
return True
def get_unit(self, name):
"""Retreive a specifi unit from the fleet cluster by name
Args:
name (str): If specified, only this unit name is returned
Returns:
Unit: The unit identified by ``name`` in the fleet cluster
Raises:
fleet.v1.errors.APIError: Fleet returned a response code >= 400
"""
return Unit(client=self, data=self._single_request('Units.Get', unitName=name))
def list_unit_states(self, machine_id=None, unit_name=None):
"""Return the current UnitState for the fleet cluster
Args:
machine_id (str): filter all UnitState objects to those
originating from a specific machine
unit_name (str): filter all UnitState objects to those related
to a specific unit
Yields:
UnitState: The next UnitState in the cluster
Raises:
fleet.v1.errors.APIError: Fleet returned a response code >= 400
"""
for page in self._request('UnitState.List', machineID=machine_id, unitName=unit_name):
for state in page.get('states', []):
yield UnitState(data=state)
def list_machines(self):
"""Retrieve a list of machines in the fleet cluster
Yields:
Machine: The next machine in the cluster
Raises:
fleet.v1.errors.APIError: Fleet returned a response code >= 400
"""
# loop through each page of results
for page in self._request('Machines.List'):
# return each machine in the current page
for machine in page.get('machines', []):
yield Machine(data=machine)
|
cnelson/python-fleet | fleet/v1/client.py | Client.get_unit | python | def get_unit(self, name):
return Unit(client=self, data=self._single_request('Units.Get', unitName=name)) | Retreive a specifi unit from the fleet cluster by name
Args:
name (str): If specified, only this unit name is returned
Returns:
Unit: The unit identified by ``name`` in the fleet cluster
Raises:
fleet.v1.errors.APIError: Fleet returned a response code >= 400 | train | https://github.com/cnelson/python-fleet/blob/a11dcd8bb3986d1d8f0af90d2da7399c9cc54b4d/fleet/v1/client.py#L578-L591 | [
"def _single_request(self, method, *args, **kwargs):\n \"\"\"Make a single request to the fleet API endpoint\n\n Args:\n method (str): A dot delimited string indicating the method to call. Example: 'Machines.List'\n *args: Passed directly to the method being called.\n **kwargs: Passed directly to the method being called.\n\n Returns:\n dict: The response from the method called.\n\n Raises:\n fleet.v1.errors.APIError: Fleet returned a response code >= 400\n \"\"\"\n\n # The auto generated client binding require instantiating each object you want to call a method on\n # For example to make a request to /machines for the list of machines you would do:\n # self._service.Machines().List(**kwargs)\n # This code iterates through the tokens in `method` and instantiates each object\n # Passing the `*args` and `**kwargs` to the final method listed\n\n # Start here\n _method = self._service\n\n # iterate over each token in the requested method\n for item in method.split('.'):\n\n # if it's the end of the line, pass our argument\n if method.endswith(item):\n _method = getattr(_method, item)(*args, **kwargs)\n else:\n # otherwise, just create an instance and move on\n _method = getattr(_method, item)()\n\n # Discovered endpoints look like r'$ENDPOINT/path/to/method' which isn't a valid URI\n # Per the fleet API documentation:\n # \"Note that this discovery document intentionally ships with an unusable rootUrl;\n # clients must initialize this as appropriate.\"\n\n # So we follow the documentation, and replace the token with our actual endpoint\n _method.uri = _method.uri.replace('$ENDPOINT', self._endpoint)\n\n # Execute the method and return it's output directly\n try:\n return _method.execute(http=self._http)\n except googleapiclient.errors.HttpError as exc:\n response = json.loads(exc.content.decode('utf-8'))['error']\n\n raise APIError(code=response['code'], message=response['message'], http_error=exc)\n"
] | class Client(object):
"""A python wrapper for the fleet v1 API
The fleet v1 API is documented here: https://github.com/coreos/fleet/blob/master/Documentation/api-v1.md
"""
_API = 'fleet'
_VERSION = 'v1'
_STATES = ['inactive', 'loaded', 'launched']
def __init__(
self,
endpoint,
http=None,
ssh_tunnel=None,
ssh_username='core',
ssh_timeout=10,
ssh_known_hosts_file='~/.fleetctl/known_hosts',
ssh_strict_host_key_checking=True,
ssh_raw_transport=None
):
"""Connect to the fleet API and generate a client based on it's discovery document.
Args:
endpoint (str): A URL where the fleet API can be reached. Supported schemes are:
http: A HTTP connection over a TCP socket.
Example: http://127.0.0.1:49153
http+unix: A HTTP connection over a unix domain socket. You must escape the path (/ = %2F).
Example: http+unix://%2Fvar%2Frun%2Ffleet.sock
http (httplib2.Http): An instance of httplib2.Http (or something that acts like it) that HTTP requests will
be made through. You do not need to pass this unless you need to configure specific options for your
http client, or want to pass in a mock for testing.
ssh_tunnel (str '<host>[:<port>]'): Establish an SSH tunnel through the provided address for communication
with fleet. Defaults to None. If specified, the following other options adjust it's behaivor:
ssh_username (str): Username to use when connecting to SSH, defaults to 'core'.
ssh_timeout (float): Amount of time in seconds to allow for SSH connection initialization
before failing, defaults to 10.
ssh_known_hosts_file (str): File used to store remote machine fingerprints,
defaults to '~/.fleetctl/known_hosts'. Ignored if `ssh_strict_host_key_checking` is False
ssh_strict_host_key_checking (bool): Verify host keys presented by remote machines before
initiating SSH connections, defaults to True.
ssh_raw_transport (paramiko.transport.Transport): An active Transport on which open_channel() will be
called to establish connections.
See Advanced SSH Tunneling in docs/client.md for more information.
Raises:
ValueError: The endpoint provided was not accessible or your ssh configuration is incorrect
"""
# stash this for later
self._endpoint = endpoint.strip('/')
self._ssh_client = None
# we overload the http when our proxy enabled versin if they request ssh tunneling
# so we need to make sure they didn't give us both
if (ssh_tunnel or ssh_raw_transport) and http:
raise ValueError('You cannot specify your own http client, and request ssh tunneling.')
# only one way to connect, not both
if ssh_tunnel and ssh_raw_transport:
raise ValueError('If ssh_tunnel is specified, ssh_raw_transport must be None')
# see if we need to setup an ssh tunnel
self._ssh_tunnel = None
# if they handed us a transport, then we either bail or are good to go
if ssh_raw_transport:
if not isinstance(ssh_raw_transport, paramiko.transport.Transport):
raise ValueError('ssh_raw_transport must be an active instance of paramiko.transport.Transport.')
self._ssh_tunnel = SSHTunnel(host=ssh_raw_transport)
# otherwise we are connecting ourselves
elif ssh_tunnel:
(ssh_host, ssh_port) = self._split_hostport(ssh_tunnel, default_port=22)
try:
self._ssh_tunnel = SSHTunnel(
host=ssh_host,
port=ssh_port,
username=ssh_username,
timeout=ssh_timeout,
known_hosts_file=ssh_known_hosts_file,
strict_host_key_checking=ssh_strict_host_key_checking
)
except socket.gaierror as exc:
raise ValueError('{0} could not be resolved.'.format(ssh_host))
except socket.error as exc:
raise ValueError('Unable to connect to {0}:{1}: {2}'.format(
ssh_host,
ssh_port,
exc
))
except paramiko.ssh_exception.SSHException as exc:
raise ValueError('Unable to connect via ssh: {0}: {1}'.format(
exc.__class__.__name__,
exc
))
# did we get an ssh connection up?
if self._ssh_tunnel:
# inject the SSH tunnel socketed into httplib via the proxy_info interface
self._http = httplib2.Http(proxy_info=self._get_proxy_info)
# preface our scheme with 'ssh+'; httplib2's SCHEME_TO_CONNECTION
# will invoke our custom connection objects and route the HTTP
# call across the SSH connection established or passed in above
self._endpoint = 'ssh+' + self._endpoint
else:
self._http = http
# if we've made it this far, we are ready to try to talk to fleet
# possibly through a proxy...
# generate a client binding using the google-api-python client.
# See https://developers.google.com/api-client-library/python/start/get_started
# For more infomation on how to use the generated client binding.
try:
discovery_url = self._endpoint + '/{api}/{apiVersion}/discovery'
self._service = build(
self._API,
self._VERSION,
cache_discovery=False,
discoveryServiceUrl=discovery_url,
http=self._http
)
except socket.error as exc: # pragma: no cover
raise ValueError('Unable to connect to endpoint {0}: {1}'.format(
self._endpoint,
exc
))
except googleapiclient.errors.UnknownApiNameOrVersion as exc:
raise ValueError(
'Connected to endpoint {0} but it is not a fleet v1 API endpoint. '
'This usually means a GET request to {0}/{1}/{2}/discovery failed.'.format(
self._endpoint,
self._API,
self._VERSION
))
def _split_hostport(self, hostport, default_port=None):
"""Split a string in the format of '<host>:<port>' into it's component parts
default_port will be used if a port is not included in the string
Args:
str ('<host>' or '<host>:<port>'): A string to split into it's parts
Returns:
two item tuple: (host, port)
Raises:
ValueError: The string was in an invalid element
"""
try:
(host, port) = hostport.split(':', 1)
except ValueError: # no colon in the string so make our own port
host = hostport
if default_port is None:
raise ValueError('No port found in hostport, and default_port not provided.')
port = default_port
try:
port = int(port)
if port < 1 or port > 65535:
raise ValueError()
except ValueError:
raise ValueError("{0} is not a valid TCP port".format(port))
return (host, port)
def _endpoint_to_target(self, endpoint):
"""Convert a URL into a host / port, or into a path to a unix domain socket
Args:
endpoint (str): A URL parsable by urlparse
Returns:
3 item tuple: (host, port, path).
host and port will None, and path will be not None if a a unix domain socket URL is passed
path will be None if a normal TCP based URL is passed
"""
parsed = urlparse.urlparse(endpoint)
scheme = parsed[0]
hostport = parsed[1]
if 'unix' in scheme:
return (None, None, unquote(hostport))
if scheme == 'https':
target_port = 443
else:
target_port = 80
(target_host, target_port) = self._split_hostport(hostport, default_port=target_port)
return (target_host, target_port, None)
def _get_proxy_info(self, _=None):
"""Generate a ProxyInfo class from a connected SSH transport
Args:
_ (None): Ignored. This is just here as the ProxyInfo spec requires it.
Returns:
SSHTunnelProxyInfo: A ProxyInfo with an active socket tunneled through SSH
"""
# parse the fleet endpoint url, to establish a tunnel to that host
(target_host, target_port, target_path) = self._endpoint_to_target(self._endpoint)
# implement the proxy_info interface from httplib which requires
# that we accept a scheme, and return a ProxyInfo object
# we do :P
# This is called once per request, so we keep this here
# so that we can keep one ssh connection open, and allocate
# new channels as needed per-request
sock = None
if target_path:
sock = self._ssh_tunnel.forward_unix(path=target_path)
else:
sock = self._ssh_tunnel.forward_tcp(target_host, port=target_port)
# Return a ProxyInfo class with this socket
return SSHTunnelProxyInfo(sock=sock)
def _single_request(self, method, *args, **kwargs):
"""Make a single request to the fleet API endpoint
Args:
method (str): A dot delimited string indicating the method to call. Example: 'Machines.List'
*args: Passed directly to the method being called.
**kwargs: Passed directly to the method being called.
Returns:
dict: The response from the method called.
Raises:
fleet.v1.errors.APIError: Fleet returned a response code >= 400
"""
# The auto generated client binding require instantiating each object you want to call a method on
# For example to make a request to /machines for the list of machines you would do:
# self._service.Machines().List(**kwargs)
# This code iterates through the tokens in `method` and instantiates each object
# Passing the `*args` and `**kwargs` to the final method listed
# Start here
_method = self._service
# iterate over each token in the requested method
for item in method.split('.'):
# if it's the end of the line, pass our argument
if method.endswith(item):
_method = getattr(_method, item)(*args, **kwargs)
else:
# otherwise, just create an instance and move on
_method = getattr(_method, item)()
# Discovered endpoints look like r'$ENDPOINT/path/to/method' which isn't a valid URI
# Per the fleet API documentation:
# "Note that this discovery document intentionally ships with an unusable rootUrl;
# clients must initialize this as appropriate."
# So we follow the documentation, and replace the token with our actual endpoint
_method.uri = _method.uri.replace('$ENDPOINT', self._endpoint)
# Execute the method and return it's output directly
try:
return _method.execute(http=self._http)
except googleapiclient.errors.HttpError as exc:
response = json.loads(exc.content.decode('utf-8'))['error']
raise APIError(code=response['code'], message=response['message'], http_error=exc)
def _request(self, method, *args, **kwargs):
"""Make a request with automatic pagination handling
Args:
method (str): A dot delimited string indicating the method to call. Example: 'Machines.List'
*args: Passed directly to the method being called.
**kwargs: Passed directly to the method being called.
Note: This method will inject the 'nextPageToken' key into `**kwargs` as needed to handle
pagination overwriting any value specified by the caller. If you wish to handle pagination
manually use the `_single_request` method
Yields:
dict: The next page of responses from the method called.
Raises:
fleet.v1.errors.APIError: Fleet returned a response code >= 400
"""
# This is set to False and not None so that the while loop below will execute at least once
next_page_token = False
while next_page_token is not None:
# If bool(next_page_token), then include it in the request
# We do this so we don't pass it in the initial request as we set it to False above
if next_page_token:
kwargs['nextPageToken'] = next_page_token
# Make the request
response = self._single_request(method, *args, **kwargs)
# If there is a token for another page in the response, capture it for the next loop iteration
# If not, we set it to None so that the loop will terminate
next_page_token = response.get('nextPageToken', None)
# Return the current response
yield response
def create_unit(self, name, unit):
"""Create a new Unit in the cluster
Create and modify Unit entities to communicate to fleet the desired state of the cluster.
This simply declares what should be happening; the backend system still has to react to
the changes in this desired state. The actual state of the system is communicated with
UnitState entities.
Args:
name (str): The name of the unit to create
unit (Unit): The unit to submit to fleet
Returns:
Unit: The unit that was created
Raises:
fleet.v1.errors.APIError: Fleet returned a response code >= 400
"""
self._single_request('Units.Set', unitName=name, body={
'desiredState': unit.desiredState,
'options': unit.options
})
return self.get_unit(name)
def set_unit_desired_state(self, unit, desired_state):
"""Update the desired state of a unit running in the cluster
Args:
unit (str, Unit): The Unit, or name of the unit to update
desired_state: State the user wishes the Unit to be in
("inactive", "loaded", or "launched")
Returns:
Unit: The unit that was updated
Raises:
fleet.v1.errors.APIError: Fleet returned a response code >= 400
ValueError: An invalid value was provided for ``desired_state``
"""
if desired_state not in self._STATES:
raise ValueError('state must be one of: {0}'.format(
self._STATES
))
# if we are given an object, grab it's name property
# otherwise, convert to unicode
if isinstance(unit, Unit):
unit = unit.name
else:
unit = str(unit)
self._single_request('Units.Set', unitName=unit, body={
'desiredState': desired_state
})
return self.get_unit(unit)
def destroy_unit(self, unit):
"""Delete a unit from the cluster
Args:
unit (str, Unit): The Unit, or name of the unit to delete
Returns:
True: The unit was deleted
Raises:
fleet.v1.errors.APIError: Fleet returned a response code >= 400
"""
# if we are given an object, grab it's name property
# otherwise, convert to unicode
if isinstance(unit, Unit):
unit = unit.name
else:
unit = str(unit)
self._single_request('Units.Delete', unitName=unit)
return True
def list_units(self):
"""Return the current list of the Units in the fleet cluster
Yields:
Unit: The next Unit in the cluster
Raises:
fleet.v1.errors.APIError: Fleet returned a response code >= 400
"""
for page in self._request('Units.List'):
for unit in page.get('units', []):
yield Unit(client=self, data=unit)
def list_unit_states(self, machine_id=None, unit_name=None):
"""Return the current UnitState for the fleet cluster
Args:
machine_id (str): filter all UnitState objects to those
originating from a specific machine
unit_name (str): filter all UnitState objects to those related
to a specific unit
Yields:
UnitState: The next UnitState in the cluster
Raises:
fleet.v1.errors.APIError: Fleet returned a response code >= 400
"""
for page in self._request('UnitState.List', machineID=machine_id, unitName=unit_name):
for state in page.get('states', []):
yield UnitState(data=state)
def list_machines(self):
"""Retrieve a list of machines in the fleet cluster
Yields:
Machine: The next machine in the cluster
Raises:
fleet.v1.errors.APIError: Fleet returned a response code >= 400
"""
# loop through each page of results
for page in self._request('Machines.List'):
# return each machine in the current page
for machine in page.get('machines', []):
yield Machine(data=machine)
|
cnelson/python-fleet | fleet/v1/client.py | Client.list_unit_states | python | def list_unit_states(self, machine_id=None, unit_name=None):
for page in self._request('UnitState.List', machineID=machine_id, unitName=unit_name):
for state in page.get('states', []):
yield UnitState(data=state) | Return the current UnitState for the fleet cluster
Args:
machine_id (str): filter all UnitState objects to those
originating from a specific machine
unit_name (str): filter all UnitState objects to those related
to a specific unit
Yields:
UnitState: The next UnitState in the cluster
Raises:
fleet.v1.errors.APIError: Fleet returned a response code >= 400 | train | https://github.com/cnelson/python-fleet/blob/a11dcd8bb3986d1d8f0af90d2da7399c9cc54b4d/fleet/v1/client.py#L593-L612 | [
"def _request(self, method, *args, **kwargs):\n \"\"\"Make a request with automatic pagination handling\n\n Args:\n method (str): A dot delimited string indicating the method to call. Example: 'Machines.List'\n *args: Passed directly to the method being called.\n **kwargs: Passed directly to the method being called.\n Note: This method will inject the 'nextPageToken' key into `**kwargs` as needed to handle\n pagination overwriting any value specified by the caller. If you wish to handle pagination\n manually use the `_single_request` method\n\n\n Yields:\n dict: The next page of responses from the method called.\n\n\n Raises:\n fleet.v1.errors.APIError: Fleet returned a response code >= 400\n\n \"\"\"\n\n # This is set to False and not None so that the while loop below will execute at least once\n next_page_token = False\n\n while next_page_token is not None:\n # If bool(next_page_token), then include it in the request\n # We do this so we don't pass it in the initial request as we set it to False above\n if next_page_token:\n kwargs['nextPageToken'] = next_page_token\n\n # Make the request\n response = self._single_request(method, *args, **kwargs)\n\n # If there is a token for another page in the response, capture it for the next loop iteration\n # If not, we set it to None so that the loop will terminate\n next_page_token = response.get('nextPageToken', None)\n\n # Return the current response\n yield response\n"
] | class Client(object):
"""A python wrapper for the fleet v1 API
The fleet v1 API is documented here: https://github.com/coreos/fleet/blob/master/Documentation/api-v1.md
"""
_API = 'fleet'
_VERSION = 'v1'
_STATES = ['inactive', 'loaded', 'launched']
def __init__(
self,
endpoint,
http=None,
ssh_tunnel=None,
ssh_username='core',
ssh_timeout=10,
ssh_known_hosts_file='~/.fleetctl/known_hosts',
ssh_strict_host_key_checking=True,
ssh_raw_transport=None
):
"""Connect to the fleet API and generate a client based on it's discovery document.
Args:
endpoint (str): A URL where the fleet API can be reached. Supported schemes are:
http: A HTTP connection over a TCP socket.
Example: http://127.0.0.1:49153
http+unix: A HTTP connection over a unix domain socket. You must escape the path (/ = %2F).
Example: http+unix://%2Fvar%2Frun%2Ffleet.sock
http (httplib2.Http): An instance of httplib2.Http (or something that acts like it) that HTTP requests will
be made through. You do not need to pass this unless you need to configure specific options for your
http client, or want to pass in a mock for testing.
ssh_tunnel (str '<host>[:<port>]'): Establish an SSH tunnel through the provided address for communication
with fleet. Defaults to None. If specified, the following other options adjust it's behaivor:
ssh_username (str): Username to use when connecting to SSH, defaults to 'core'.
ssh_timeout (float): Amount of time in seconds to allow for SSH connection initialization
before failing, defaults to 10.
ssh_known_hosts_file (str): File used to store remote machine fingerprints,
defaults to '~/.fleetctl/known_hosts'. Ignored if `ssh_strict_host_key_checking` is False
ssh_strict_host_key_checking (bool): Verify host keys presented by remote machines before
initiating SSH connections, defaults to True.
ssh_raw_transport (paramiko.transport.Transport): An active Transport on which open_channel() will be
called to establish connections.
See Advanced SSH Tunneling in docs/client.md for more information.
Raises:
ValueError: The endpoint provided was not accessible or your ssh configuration is incorrect
"""
# stash this for later
self._endpoint = endpoint.strip('/')
self._ssh_client = None
# we overload the http when our proxy enabled versin if they request ssh tunneling
# so we need to make sure they didn't give us both
if (ssh_tunnel or ssh_raw_transport) and http:
raise ValueError('You cannot specify your own http client, and request ssh tunneling.')
# only one way to connect, not both
if ssh_tunnel and ssh_raw_transport:
raise ValueError('If ssh_tunnel is specified, ssh_raw_transport must be None')
# see if we need to setup an ssh tunnel
self._ssh_tunnel = None
# if they handed us a transport, then we either bail or are good to go
if ssh_raw_transport:
if not isinstance(ssh_raw_transport, paramiko.transport.Transport):
raise ValueError('ssh_raw_transport must be an active instance of paramiko.transport.Transport.')
self._ssh_tunnel = SSHTunnel(host=ssh_raw_transport)
# otherwise we are connecting ourselves
elif ssh_tunnel:
(ssh_host, ssh_port) = self._split_hostport(ssh_tunnel, default_port=22)
try:
self._ssh_tunnel = SSHTunnel(
host=ssh_host,
port=ssh_port,
username=ssh_username,
timeout=ssh_timeout,
known_hosts_file=ssh_known_hosts_file,
strict_host_key_checking=ssh_strict_host_key_checking
)
except socket.gaierror as exc:
raise ValueError('{0} could not be resolved.'.format(ssh_host))
except socket.error as exc:
raise ValueError('Unable to connect to {0}:{1}: {2}'.format(
ssh_host,
ssh_port,
exc
))
except paramiko.ssh_exception.SSHException as exc:
raise ValueError('Unable to connect via ssh: {0}: {1}'.format(
exc.__class__.__name__,
exc
))
# did we get an ssh connection up?
if self._ssh_tunnel:
# inject the SSH tunnel socketed into httplib via the proxy_info interface
self._http = httplib2.Http(proxy_info=self._get_proxy_info)
# preface our scheme with 'ssh+'; httplib2's SCHEME_TO_CONNECTION
# will invoke our custom connection objects and route the HTTP
# call across the SSH connection established or passed in above
self._endpoint = 'ssh+' + self._endpoint
else:
self._http = http
# if we've made it this far, we are ready to try to talk to fleet
# possibly through a proxy...
# generate a client binding using the google-api-python client.
# See https://developers.google.com/api-client-library/python/start/get_started
# For more infomation on how to use the generated client binding.
try:
discovery_url = self._endpoint + '/{api}/{apiVersion}/discovery'
self._service = build(
self._API,
self._VERSION,
cache_discovery=False,
discoveryServiceUrl=discovery_url,
http=self._http
)
except socket.error as exc: # pragma: no cover
raise ValueError('Unable to connect to endpoint {0}: {1}'.format(
self._endpoint,
exc
))
except googleapiclient.errors.UnknownApiNameOrVersion as exc:
raise ValueError(
'Connected to endpoint {0} but it is not a fleet v1 API endpoint. '
'This usually means a GET request to {0}/{1}/{2}/discovery failed.'.format(
self._endpoint,
self._API,
self._VERSION
))
def _split_hostport(self, hostport, default_port=None):
"""Split a string in the format of '<host>:<port>' into it's component parts
default_port will be used if a port is not included in the string
Args:
str ('<host>' or '<host>:<port>'): A string to split into it's parts
Returns:
two item tuple: (host, port)
Raises:
ValueError: The string was in an invalid element
"""
try:
(host, port) = hostport.split(':', 1)
except ValueError: # no colon in the string so make our own port
host = hostport
if default_port is None:
raise ValueError('No port found in hostport, and default_port not provided.')
port = default_port
try:
port = int(port)
if port < 1 or port > 65535:
raise ValueError()
except ValueError:
raise ValueError("{0} is not a valid TCP port".format(port))
return (host, port)
def _endpoint_to_target(self, endpoint):
"""Convert a URL into a host / port, or into a path to a unix domain socket
Args:
endpoint (str): A URL parsable by urlparse
Returns:
3 item tuple: (host, port, path).
host and port will None, and path will be not None if a a unix domain socket URL is passed
path will be None if a normal TCP based URL is passed
"""
parsed = urlparse.urlparse(endpoint)
scheme = parsed[0]
hostport = parsed[1]
if 'unix' in scheme:
return (None, None, unquote(hostport))
if scheme == 'https':
target_port = 443
else:
target_port = 80
(target_host, target_port) = self._split_hostport(hostport, default_port=target_port)
return (target_host, target_port, None)
def _get_proxy_info(self, _=None):
"""Generate a ProxyInfo class from a connected SSH transport
Args:
_ (None): Ignored. This is just here as the ProxyInfo spec requires it.
Returns:
SSHTunnelProxyInfo: A ProxyInfo with an active socket tunneled through SSH
"""
# parse the fleet endpoint url, to establish a tunnel to that host
(target_host, target_port, target_path) = self._endpoint_to_target(self._endpoint)
# implement the proxy_info interface from httplib which requires
# that we accept a scheme, and return a ProxyInfo object
# we do :P
# This is called once per request, so we keep this here
# so that we can keep one ssh connection open, and allocate
# new channels as needed per-request
sock = None
if target_path:
sock = self._ssh_tunnel.forward_unix(path=target_path)
else:
sock = self._ssh_tunnel.forward_tcp(target_host, port=target_port)
# Return a ProxyInfo class with this socket
return SSHTunnelProxyInfo(sock=sock)
def _single_request(self, method, *args, **kwargs):
"""Make a single request to the fleet API endpoint
Args:
method (str): A dot delimited string indicating the method to call. Example: 'Machines.List'
*args: Passed directly to the method being called.
**kwargs: Passed directly to the method being called.
Returns:
dict: The response from the method called.
Raises:
fleet.v1.errors.APIError: Fleet returned a response code >= 400
"""
# The auto generated client binding require instantiating each object you want to call a method on
# For example to make a request to /machines for the list of machines you would do:
# self._service.Machines().List(**kwargs)
# This code iterates through the tokens in `method` and instantiates each object
# Passing the `*args` and `**kwargs` to the final method listed
# Start here
_method = self._service
# iterate over each token in the requested method
for item in method.split('.'):
# if it's the end of the line, pass our argument
if method.endswith(item):
_method = getattr(_method, item)(*args, **kwargs)
else:
# otherwise, just create an instance and move on
_method = getattr(_method, item)()
# Discovered endpoints look like r'$ENDPOINT/path/to/method' which isn't a valid URI
# Per the fleet API documentation:
# "Note that this discovery document intentionally ships with an unusable rootUrl;
# clients must initialize this as appropriate."
# So we follow the documentation, and replace the token with our actual endpoint
_method.uri = _method.uri.replace('$ENDPOINT', self._endpoint)
# Execute the method and return it's output directly
try:
return _method.execute(http=self._http)
except googleapiclient.errors.HttpError as exc:
response = json.loads(exc.content.decode('utf-8'))['error']
raise APIError(code=response['code'], message=response['message'], http_error=exc)
def _request(self, method, *args, **kwargs):
"""Make a request with automatic pagination handling
Args:
method (str): A dot delimited string indicating the method to call. Example: 'Machines.List'
*args: Passed directly to the method being called.
**kwargs: Passed directly to the method being called.
Note: This method will inject the 'nextPageToken' key into `**kwargs` as needed to handle
pagination overwriting any value specified by the caller. If you wish to handle pagination
manually use the `_single_request` method
Yields:
dict: The next page of responses from the method called.
Raises:
fleet.v1.errors.APIError: Fleet returned a response code >= 400
"""
# This is set to False and not None so that the while loop below will execute at least once
next_page_token = False
while next_page_token is not None:
# If bool(next_page_token), then include it in the request
# We do this so we don't pass it in the initial request as we set it to False above
if next_page_token:
kwargs['nextPageToken'] = next_page_token
# Make the request
response = self._single_request(method, *args, **kwargs)
# If there is a token for another page in the response, capture it for the next loop iteration
# If not, we set it to None so that the loop will terminate
next_page_token = response.get('nextPageToken', None)
# Return the current response
yield response
def create_unit(self, name, unit):
"""Create a new Unit in the cluster
Create and modify Unit entities to communicate to fleet the desired state of the cluster.
This simply declares what should be happening; the backend system still has to react to
the changes in this desired state. The actual state of the system is communicated with
UnitState entities.
Args:
name (str): The name of the unit to create
unit (Unit): The unit to submit to fleet
Returns:
Unit: The unit that was created
Raises:
fleet.v1.errors.APIError: Fleet returned a response code >= 400
"""
self._single_request('Units.Set', unitName=name, body={
'desiredState': unit.desiredState,
'options': unit.options
})
return self.get_unit(name)
def set_unit_desired_state(self, unit, desired_state):
"""Update the desired state of a unit running in the cluster
Args:
unit (str, Unit): The Unit, or name of the unit to update
desired_state: State the user wishes the Unit to be in
("inactive", "loaded", or "launched")
Returns:
Unit: The unit that was updated
Raises:
fleet.v1.errors.APIError: Fleet returned a response code >= 400
ValueError: An invalid value was provided for ``desired_state``
"""
if desired_state not in self._STATES:
raise ValueError('state must be one of: {0}'.format(
self._STATES
))
# if we are given an object, grab it's name property
# otherwise, convert to unicode
if isinstance(unit, Unit):
unit = unit.name
else:
unit = str(unit)
self._single_request('Units.Set', unitName=unit, body={
'desiredState': desired_state
})
return self.get_unit(unit)
def destroy_unit(self, unit):
"""Delete a unit from the cluster
Args:
unit (str, Unit): The Unit, or name of the unit to delete
Returns:
True: The unit was deleted
Raises:
fleet.v1.errors.APIError: Fleet returned a response code >= 400
"""
# if we are given an object, grab it's name property
# otherwise, convert to unicode
if isinstance(unit, Unit):
unit = unit.name
else:
unit = str(unit)
self._single_request('Units.Delete', unitName=unit)
return True
def list_units(self):
"""Return the current list of the Units in the fleet cluster
Yields:
Unit: The next Unit in the cluster
Raises:
fleet.v1.errors.APIError: Fleet returned a response code >= 400
"""
for page in self._request('Units.List'):
for unit in page.get('units', []):
yield Unit(client=self, data=unit)
def get_unit(self, name):
"""Retreive a specifi unit from the fleet cluster by name
Args:
name (str): If specified, only this unit name is returned
Returns:
Unit: The unit identified by ``name`` in the fleet cluster
Raises:
fleet.v1.errors.APIError: Fleet returned a response code >= 400
"""
return Unit(client=self, data=self._single_request('Units.Get', unitName=name))
def list_machines(self):
"""Retrieve a list of machines in the fleet cluster
Yields:
Machine: The next machine in the cluster
Raises:
fleet.v1.errors.APIError: Fleet returned a response code >= 400
"""
# loop through each page of results
for page in self._request('Machines.List'):
# return each machine in the current page
for machine in page.get('machines', []):
yield Machine(data=machine)
|
cnelson/python-fleet | fleet/v1/client.py | Client.list_machines | python | def list_machines(self):
# loop through each page of results
for page in self._request('Machines.List'):
# return each machine in the current page
for machine in page.get('machines', []):
yield Machine(data=machine) | Retrieve a list of machines in the fleet cluster
Yields:
Machine: The next machine in the cluster
Raises:
fleet.v1.errors.APIError: Fleet returned a response code >= 400 | train | https://github.com/cnelson/python-fleet/blob/a11dcd8bb3986d1d8f0af90d2da7399c9cc54b4d/fleet/v1/client.py#L614-L628 | [
"def _request(self, method, *args, **kwargs):\n \"\"\"Make a request with automatic pagination handling\n\n Args:\n method (str): A dot delimited string indicating the method to call. Example: 'Machines.List'\n *args: Passed directly to the method being called.\n **kwargs: Passed directly to the method being called.\n Note: This method will inject the 'nextPageToken' key into `**kwargs` as needed to handle\n pagination overwriting any value specified by the caller. If you wish to handle pagination\n manually use the `_single_request` method\n\n\n Yields:\n dict: The next page of responses from the method called.\n\n\n Raises:\n fleet.v1.errors.APIError: Fleet returned a response code >= 400\n\n \"\"\"\n\n # This is set to False and not None so that the while loop below will execute at least once\n next_page_token = False\n\n while next_page_token is not None:\n # If bool(next_page_token), then include it in the request\n # We do this so we don't pass it in the initial request as we set it to False above\n if next_page_token:\n kwargs['nextPageToken'] = next_page_token\n\n # Make the request\n response = self._single_request(method, *args, **kwargs)\n\n # If there is a token for another page in the response, capture it for the next loop iteration\n # If not, we set it to None so that the loop will terminate\n next_page_token = response.get('nextPageToken', None)\n\n # Return the current response\n yield response\n"
] | class Client(object):
"""A python wrapper for the fleet v1 API
The fleet v1 API is documented here: https://github.com/coreos/fleet/blob/master/Documentation/api-v1.md
"""
_API = 'fleet'
_VERSION = 'v1'
_STATES = ['inactive', 'loaded', 'launched']
def __init__(
self,
endpoint,
http=None,
ssh_tunnel=None,
ssh_username='core',
ssh_timeout=10,
ssh_known_hosts_file='~/.fleetctl/known_hosts',
ssh_strict_host_key_checking=True,
ssh_raw_transport=None
):
"""Connect to the fleet API and generate a client based on it's discovery document.
Args:
endpoint (str): A URL where the fleet API can be reached. Supported schemes are:
http: A HTTP connection over a TCP socket.
Example: http://127.0.0.1:49153
http+unix: A HTTP connection over a unix domain socket. You must escape the path (/ = %2F).
Example: http+unix://%2Fvar%2Frun%2Ffleet.sock
http (httplib2.Http): An instance of httplib2.Http (or something that acts like it) that HTTP requests will
be made through. You do not need to pass this unless you need to configure specific options for your
http client, or want to pass in a mock for testing.
ssh_tunnel (str '<host>[:<port>]'): Establish an SSH tunnel through the provided address for communication
with fleet. Defaults to None. If specified, the following other options adjust it's behaivor:
ssh_username (str): Username to use when connecting to SSH, defaults to 'core'.
ssh_timeout (float): Amount of time in seconds to allow for SSH connection initialization
before failing, defaults to 10.
ssh_known_hosts_file (str): File used to store remote machine fingerprints,
defaults to '~/.fleetctl/known_hosts'. Ignored if `ssh_strict_host_key_checking` is False
ssh_strict_host_key_checking (bool): Verify host keys presented by remote machines before
initiating SSH connections, defaults to True.
ssh_raw_transport (paramiko.transport.Transport): An active Transport on which open_channel() will be
called to establish connections.
See Advanced SSH Tunneling in docs/client.md for more information.
Raises:
ValueError: The endpoint provided was not accessible or your ssh configuration is incorrect
"""
# stash this for later
self._endpoint = endpoint.strip('/')
self._ssh_client = None
# we overload the http when our proxy enabled versin if they request ssh tunneling
# so we need to make sure they didn't give us both
if (ssh_tunnel or ssh_raw_transport) and http:
raise ValueError('You cannot specify your own http client, and request ssh tunneling.')
# only one way to connect, not both
if ssh_tunnel and ssh_raw_transport:
raise ValueError('If ssh_tunnel is specified, ssh_raw_transport must be None')
# see if we need to setup an ssh tunnel
self._ssh_tunnel = None
# if they handed us a transport, then we either bail or are good to go
if ssh_raw_transport:
if not isinstance(ssh_raw_transport, paramiko.transport.Transport):
raise ValueError('ssh_raw_transport must be an active instance of paramiko.transport.Transport.')
self._ssh_tunnel = SSHTunnel(host=ssh_raw_transport)
# otherwise we are connecting ourselves
elif ssh_tunnel:
(ssh_host, ssh_port) = self._split_hostport(ssh_tunnel, default_port=22)
try:
self._ssh_tunnel = SSHTunnel(
host=ssh_host,
port=ssh_port,
username=ssh_username,
timeout=ssh_timeout,
known_hosts_file=ssh_known_hosts_file,
strict_host_key_checking=ssh_strict_host_key_checking
)
except socket.gaierror as exc:
raise ValueError('{0} could not be resolved.'.format(ssh_host))
except socket.error as exc:
raise ValueError('Unable to connect to {0}:{1}: {2}'.format(
ssh_host,
ssh_port,
exc
))
except paramiko.ssh_exception.SSHException as exc:
raise ValueError('Unable to connect via ssh: {0}: {1}'.format(
exc.__class__.__name__,
exc
))
# did we get an ssh connection up?
if self._ssh_tunnel:
# inject the SSH tunnel socketed into httplib via the proxy_info interface
self._http = httplib2.Http(proxy_info=self._get_proxy_info)
# preface our scheme with 'ssh+'; httplib2's SCHEME_TO_CONNECTION
# will invoke our custom connection objects and route the HTTP
# call across the SSH connection established or passed in above
self._endpoint = 'ssh+' + self._endpoint
else:
self._http = http
# if we've made it this far, we are ready to try to talk to fleet
# possibly through a proxy...
# generate a client binding using the google-api-python client.
# See https://developers.google.com/api-client-library/python/start/get_started
# For more infomation on how to use the generated client binding.
try:
discovery_url = self._endpoint + '/{api}/{apiVersion}/discovery'
self._service = build(
self._API,
self._VERSION,
cache_discovery=False,
discoveryServiceUrl=discovery_url,
http=self._http
)
except socket.error as exc: # pragma: no cover
raise ValueError('Unable to connect to endpoint {0}: {1}'.format(
self._endpoint,
exc
))
except googleapiclient.errors.UnknownApiNameOrVersion as exc:
raise ValueError(
'Connected to endpoint {0} but it is not a fleet v1 API endpoint. '
'This usually means a GET request to {0}/{1}/{2}/discovery failed.'.format(
self._endpoint,
self._API,
self._VERSION
))
def _split_hostport(self, hostport, default_port=None):
"""Split a string in the format of '<host>:<port>' into it's component parts
default_port will be used if a port is not included in the string
Args:
str ('<host>' or '<host>:<port>'): A string to split into it's parts
Returns:
two item tuple: (host, port)
Raises:
ValueError: The string was in an invalid element
"""
try:
(host, port) = hostport.split(':', 1)
except ValueError: # no colon in the string so make our own port
host = hostport
if default_port is None:
raise ValueError('No port found in hostport, and default_port not provided.')
port = default_port
try:
port = int(port)
if port < 1 or port > 65535:
raise ValueError()
except ValueError:
raise ValueError("{0} is not a valid TCP port".format(port))
return (host, port)
def _endpoint_to_target(self, endpoint):
"""Convert a URL into a host / port, or into a path to a unix domain socket
Args:
endpoint (str): A URL parsable by urlparse
Returns:
3 item tuple: (host, port, path).
host and port will None, and path will be not None if a a unix domain socket URL is passed
path will be None if a normal TCP based URL is passed
"""
parsed = urlparse.urlparse(endpoint)
scheme = parsed[0]
hostport = parsed[1]
if 'unix' in scheme:
return (None, None, unquote(hostport))
if scheme == 'https':
target_port = 443
else:
target_port = 80
(target_host, target_port) = self._split_hostport(hostport, default_port=target_port)
return (target_host, target_port, None)
def _get_proxy_info(self, _=None):
"""Generate a ProxyInfo class from a connected SSH transport
Args:
_ (None): Ignored. This is just here as the ProxyInfo spec requires it.
Returns:
SSHTunnelProxyInfo: A ProxyInfo with an active socket tunneled through SSH
"""
# parse the fleet endpoint url, to establish a tunnel to that host
(target_host, target_port, target_path) = self._endpoint_to_target(self._endpoint)
# implement the proxy_info interface from httplib which requires
# that we accept a scheme, and return a ProxyInfo object
# we do :P
# This is called once per request, so we keep this here
# so that we can keep one ssh connection open, and allocate
# new channels as needed per-request
sock = None
if target_path:
sock = self._ssh_tunnel.forward_unix(path=target_path)
else:
sock = self._ssh_tunnel.forward_tcp(target_host, port=target_port)
# Return a ProxyInfo class with this socket
return SSHTunnelProxyInfo(sock=sock)
def _single_request(self, method, *args, **kwargs):
"""Make a single request to the fleet API endpoint
Args:
method (str): A dot delimited string indicating the method to call. Example: 'Machines.List'
*args: Passed directly to the method being called.
**kwargs: Passed directly to the method being called.
Returns:
dict: The response from the method called.
Raises:
fleet.v1.errors.APIError: Fleet returned a response code >= 400
"""
# The auto generated client binding require instantiating each object you want to call a method on
# For example to make a request to /machines for the list of machines you would do:
# self._service.Machines().List(**kwargs)
# This code iterates through the tokens in `method` and instantiates each object
# Passing the `*args` and `**kwargs` to the final method listed
# Start here
_method = self._service
# iterate over each token in the requested method
for item in method.split('.'):
# if it's the end of the line, pass our argument
if method.endswith(item):
_method = getattr(_method, item)(*args, **kwargs)
else:
# otherwise, just create an instance and move on
_method = getattr(_method, item)()
# Discovered endpoints look like r'$ENDPOINT/path/to/method' which isn't a valid URI
# Per the fleet API documentation:
# "Note that this discovery document intentionally ships with an unusable rootUrl;
# clients must initialize this as appropriate."
# So we follow the documentation, and replace the token with our actual endpoint
_method.uri = _method.uri.replace('$ENDPOINT', self._endpoint)
# Execute the method and return it's output directly
try:
return _method.execute(http=self._http)
except googleapiclient.errors.HttpError as exc:
response = json.loads(exc.content.decode('utf-8'))['error']
raise APIError(code=response['code'], message=response['message'], http_error=exc)
def _request(self, method, *args, **kwargs):
"""Make a request with automatic pagination handling
Args:
method (str): A dot delimited string indicating the method to call. Example: 'Machines.List'
*args: Passed directly to the method being called.
**kwargs: Passed directly to the method being called.
Note: This method will inject the 'nextPageToken' key into `**kwargs` as needed to handle
pagination overwriting any value specified by the caller. If you wish to handle pagination
manually use the `_single_request` method
Yields:
dict: The next page of responses from the method called.
Raises:
fleet.v1.errors.APIError: Fleet returned a response code >= 400
"""
# This is set to False and not None so that the while loop below will execute at least once
next_page_token = False
while next_page_token is not None:
# If bool(next_page_token), then include it in the request
# We do this so we don't pass it in the initial request as we set it to False above
if next_page_token:
kwargs['nextPageToken'] = next_page_token
# Make the request
response = self._single_request(method, *args, **kwargs)
# If there is a token for another page in the response, capture it for the next loop iteration
# If not, we set it to None so that the loop will terminate
next_page_token = response.get('nextPageToken', None)
# Return the current response
yield response
def create_unit(self, name, unit):
"""Create a new Unit in the cluster
Create and modify Unit entities to communicate to fleet the desired state of the cluster.
This simply declares what should be happening; the backend system still has to react to
the changes in this desired state. The actual state of the system is communicated with
UnitState entities.
Args:
name (str): The name of the unit to create
unit (Unit): The unit to submit to fleet
Returns:
Unit: The unit that was created
Raises:
fleet.v1.errors.APIError: Fleet returned a response code >= 400
"""
self._single_request('Units.Set', unitName=name, body={
'desiredState': unit.desiredState,
'options': unit.options
})
return self.get_unit(name)
def set_unit_desired_state(self, unit, desired_state):
"""Update the desired state of a unit running in the cluster
Args:
unit (str, Unit): The Unit, or name of the unit to update
desired_state: State the user wishes the Unit to be in
("inactive", "loaded", or "launched")
Returns:
Unit: The unit that was updated
Raises:
fleet.v1.errors.APIError: Fleet returned a response code >= 400
ValueError: An invalid value was provided for ``desired_state``
"""
if desired_state not in self._STATES:
raise ValueError('state must be one of: {0}'.format(
self._STATES
))
# if we are given an object, grab it's name property
# otherwise, convert to unicode
if isinstance(unit, Unit):
unit = unit.name
else:
unit = str(unit)
self._single_request('Units.Set', unitName=unit, body={
'desiredState': desired_state
})
return self.get_unit(unit)
def destroy_unit(self, unit):
"""Delete a unit from the cluster
Args:
unit (str, Unit): The Unit, or name of the unit to delete
Returns:
True: The unit was deleted
Raises:
fleet.v1.errors.APIError: Fleet returned a response code >= 400
"""
# if we are given an object, grab it's name property
# otherwise, convert to unicode
if isinstance(unit, Unit):
unit = unit.name
else:
unit = str(unit)
self._single_request('Units.Delete', unitName=unit)
return True
def list_units(self):
"""Return the current list of the Units in the fleet cluster
Yields:
Unit: The next Unit in the cluster
Raises:
fleet.v1.errors.APIError: Fleet returned a response code >= 400
"""
for page in self._request('Units.List'):
for unit in page.get('units', []):
yield Unit(client=self, data=unit)
def get_unit(self, name):
"""Retreive a specifi unit from the fleet cluster by name
Args:
name (str): If specified, only this unit name is returned
Returns:
Unit: The unit identified by ``name`` in the fleet cluster
Raises:
fleet.v1.errors.APIError: Fleet returned a response code >= 400
"""
return Unit(client=self, data=self._single_request('Units.Get', unitName=name))
def list_unit_states(self, machine_id=None, unit_name=None):
"""Return the current UnitState for the fleet cluster
Args:
machine_id (str): filter all UnitState objects to those
originating from a specific machine
unit_name (str): filter all UnitState objects to those related
to a specific unit
Yields:
UnitState: The next UnitState in the cluster
Raises:
fleet.v1.errors.APIError: Fleet returned a response code >= 400
"""
for page in self._request('UnitState.List', machineID=machine_id, unitName=unit_name):
for state in page.get('states', []):
yield UnitState(data=state)
|
cnelson/python-fleet | fleet/http/unix_socket.py | UnixConnectionWithTimeout.connect | python | def connect(self):
"""Connect to the unix domain socket, which is passed to us as self.host
This is in host because the format we use for the unix domain socket is:
http+unix://%2Fpath%2Fto%2Fsocket.sock
"""
try:
self.sock = socket.socket(socket.AF_UNIX, socket.SOCK_STREAM)
if has_timeout(self.timeout):
self.sock.settimeout(self.timeout)
self.sock.connect(unquote(self.host))
except socket.error as msg:
if self.sock:
self.sock.close()
self.sock = None
raise socket.error(msg) | Connect to the unix domain socket, which is passed to us as self.host
This is in host because the format we use for the unix domain socket is:
http+unix://%2Fpath%2Fto%2Fsocket.sock | train | https://github.com/cnelson/python-fleet/blob/a11dcd8bb3986d1d8f0af90d2da7399c9cc54b4d/fleet/http/unix_socket.py#L37-L57 | [
"def has_timeout(timeout): # pragma: no cover\n if hasattr(socket, '_GLOBAL_DEFAULT_TIMEOUT'):\n return (timeout is not None and timeout is not socket._GLOBAL_DEFAULT_TIMEOUT)\n return (timeout is not None)\n"
] | class UnixConnectionWithTimeout(httplib.HTTPConnection):
"""
HTTP over UNIX Domain Sockets
"""
def __init__(self, host, port=None, strict=None, timeout=None, proxy_info=None):
httplib.HTTPConnection.__init__(self, host, port)
self.timeout = timeout
def connect(self):
"""Connect to the unix domain socket, which is passed to us as self.host
This is in host because the format we use for the unix domain socket is:
http+unix://%2Fpath%2Fto%2Fsocket.sock
"""
try:
self.sock = socket.socket(socket.AF_UNIX, socket.SOCK_STREAM)
if has_timeout(self.timeout):
self.sock.settimeout(self.timeout)
self.sock.connect(unquote(self.host))
except socket.error as msg:
if self.sock:
self.sock.close()
self.sock = None
raise socket.error(msg)
|
ascribe/pyspool | spool/wallet.py | Wallet.address_from_path | python | def address_from_path(self, path=None):
path = path if path else self._unique_hierarchical_string()
return path, self.wallet.subkey_for_path(path).address() | Args:
path (str): Path for the HD wallet. If path is ``None`` it
will generate a unique path based on time.
Returns:
A ``tuple`` with the path and leaf address. | train | https://github.com/ascribe/pyspool/blob/f8b10df1e7d2ea7950dde433c1cb6d5225112f4f/spool/wallet.py#L42-L53 | [
"def _unique_hierarchical_string(self):\n \"\"\"\n Returns:\n str: a representation of time such as::\n\n '2014/2/23/15/26/8/9877978'\n\n The last part (microsecond) is needed to avoid duplicates in\n rapid-fire transactions e.g. ``> 1`` edition.\n\n \"\"\"\n t = datetime.now()\n return '%s/%s/%s/%s/%s/%s/%s' % (t.year, t.month, t.day, t.hour,\n t.minute, t.second, t.microsecond)\n"
] | class Wallet(object):
"""
Represents a BIP32 wallet.
Attributes:
wallet (BIP32Node): :class:`BIP32NOde` instance.
root_address (Tuple[str]): Root address of the HD Wallet.
"""
def __init__(self, password, testnet=False):
"""
Initializes a BIP32 wallet.
Addresses returned by the wallet are of the form ``(path, address)``.
Args:
password (bytes): Master secret for the wallet. The password can
also be passed as a string (``str``).
testnet (bool): Wwether to use the bitcoin testnet or mainnet.
Defaults to ``False``.
"""
netcode = 'XTN' if testnet else 'BTC'
if isinstance(password, str):
password = password.encode()
self.wallet = BIP32Node.from_master_secret(password, netcode=netcode)
self.root_address = ('', self.wallet.address())
def _unique_hierarchical_string(self):
"""
Returns:
str: a representation of time such as::
'2014/2/23/15/26/8/9877978'
The last part (microsecond) is needed to avoid duplicates in
rapid-fire transactions e.g. ``> 1`` edition.
"""
t = datetime.now()
return '%s/%s/%s/%s/%s/%s/%s' % (t.year, t.month, t.day, t.hour,
t.minute, t.second, t.microsecond)
|
ascribe/pyspool | spool/wallet.py | Wallet._unique_hierarchical_string | python | def _unique_hierarchical_string(self):
t = datetime.now()
return '%s/%s/%s/%s/%s/%s/%s' % (t.year, t.month, t.day, t.hour,
t.minute, t.second, t.microsecond) | Returns:
str: a representation of time such as::
'2014/2/23/15/26/8/9877978'
The last part (microsecond) is needed to avoid duplicates in
rapid-fire transactions e.g. ``> 1`` edition. | train | https://github.com/ascribe/pyspool/blob/f8b10df1e7d2ea7950dde433c1cb6d5225112f4f/spool/wallet.py#L55-L68 | null | class Wallet(object):
"""
Represents a BIP32 wallet.
Attributes:
wallet (BIP32Node): :class:`BIP32NOde` instance.
root_address (Tuple[str]): Root address of the HD Wallet.
"""
def __init__(self, password, testnet=False):
"""
Initializes a BIP32 wallet.
Addresses returned by the wallet are of the form ``(path, address)``.
Args:
password (bytes): Master secret for the wallet. The password can
also be passed as a string (``str``).
testnet (bool): Wwether to use the bitcoin testnet or mainnet.
Defaults to ``False``.
"""
netcode = 'XTN' if testnet else 'BTC'
if isinstance(password, str):
password = password.encode()
self.wallet = BIP32Node.from_master_secret(password, netcode=netcode)
self.root_address = ('', self.wallet.address())
def address_from_path(self, path=None):
"""
Args:
path (str): Path for the HD wallet. If path is ``None`` it
will generate a unique path based on time.
Returns:
A ``tuple`` with the path and leaf address.
"""
path = path if path else self._unique_hierarchical_string()
return path, self.wallet.subkey_for_path(path).address()
|
ascribe/pyspool | spool/ownership.py | Ownership.can_transfer | python | def can_transfer(self):
# 1. The address needs to own the edition
chain = BlockchainSpider.chain(self._tree, self.edition_number)
if len(chain) == 0:
self.reason = 'The edition number {} does not exist in the blockchain'.format(self.edition_number)
return False
chain = BlockchainSpider.strip_loan(chain)
to_address = chain[-1]['to_address']
if to_address != self.address:
self.reason = 'Address {} does not own the edition number {}'.format(self.address, self.edition_number)
return False
return True | bool: :const:`True` if :attr:`address` can transfer the edition
:attr:`edition_number` of :attr:`piece_address` else :const:`False`. | train | https://github.com/ascribe/pyspool/blob/f8b10df1e7d2ea7950dde433c1cb6d5225112f4f/spool/ownership.py#L85-L104 | [
"def chain(tree, edition_number):\n \"\"\"\n Args:\n tree (dict): Tree history of all editions of a piece.\n edition_number (int): The edition number to check for.\n In the case of a piece (master edition), an empty\n string (``''``) or zero (``0``) can be passed.\n\n Returns:\n list: The chain of ownsership of a particular\n edition of the piece ordered by time.\n\n \"\"\"\n # return the chain for an edition_number sorted by the timestamp\n return sorted(tree.get(edition_number, []), key=lambda d: d['timestamp_utc'])\n",
"def strip_loan(chain):\n \"\"\"\n Returns the chain without loan. This way we can\n look at the last transaction to establish ownership.\n\n Args:\n chain (list): Chain for a particular edition.\n\n Returns:\n list: Chain with loan transactions striped\n from the end of the chain.\n\n \"\"\"\n while chain[-1]['action'] == 'LOAN':\n chain.pop()\n\n return chain\n"
] | class Ownership(object):
"""
Checks the actions that an address can make on a piece.
Attributes:
address (str): Bitcoin address to check ownership over
:attr:`piece_address`.
piece_address (str): Bitcoin address of the piece to check.
edition_number (int): The edition number of the piece.
testnet (bool): Bitcoin network. :const:`True` for
``testnet`` or :const:`False` for ``mainnet``.
reason (str): Message indicating the reason
for the failure of an ownership property.
"""
def __init__(self, address, piece_address, edition_number, testnet=False,
service='blockr', username='', password='', host='', port=''):
"""
Args:
address (str): Bitcoin address to check ownership over
``piece_address``.
piece_address (str): Bitcoin address of the piece to check.
edition_number (int): The edition number of the piece.
testnet (Optional[boo]l): Whether to use the testnet
(:const:`True`) or the mainnet (:const:`False`).
Defaults to :const:`False`.
service (Optional[str]): Name of service to use to connect
to the bitcoin network. Possible names are
``('blockr', 'daemon', 'regtest')``. Defaults to ``'blockr'``.
username (Optional[str]): Username to connect to a bitcoin node
via json-rpc based services: ``('daemon', 'regtest')``.
password (Optional[str]): Password to connect to a bitcoin node
via json-rpc based services: ``('daemon', 'regtest').``
host (Optional[str]): Host of the bitcoin node to connect to
via json-rpc based services: ``('daemon', 'regtest')``.
port (Optional[str]): Port of the bitcoin node to connect to
via json-rpc based services: ``('daemon', 'regtest')``.
"""
self.address = address
self.piece_address = piece_address
self.edition_number = edition_number
self.testnet = testnet
self._bcs = BlockchainSpider(service=service, testnet=testnet, username=username,
password=password, host=host, port=port)
self._tree = self._bcs.history(piece_address)
self.reason = ''
@property
@property
def can_consign(self):
"""
bool: :const:`True` if :attr:`address` can consign the edition
:attr:`edition_number` of :attr:`piece_address` else :const:`False`.
"""
return self.can_transfer
@property
def can_loan(self):
"""
bool: :const:`True` if :attr:`address` can loan the edition
:attr:`edition_number` of :attr:`piece_address` else :const:`False`.
"""
return self.can_transfer
@property
def can_unconsign(self):
"""
bool: :const:`True` if :attr:`address` can unconsign the edition
:attr:`edition_number` of :attr:`piece_address` else :const:`False`.
If the last transaction is a consignment of the edition to the user.
"""
chain = BlockchainSpider.chain(self._tree, self.edition_number)
if len(chain) == 0:
self.reason = 'Master edition not yet registered'
return False
chain = BlockchainSpider.strip_loan(chain)
action = chain[-1]['action']
piece_address = chain[-1]['piece_address']
edition_number = chain[-1]['edition_number']
to_address = chain[-1]['to_address']
if action != 'CONSIGN' or piece_address != self.piece_address or edition_number != self.edition_number or to_address != self.address:
self.reason = 'Edition number {} is not consigned to {}'.format(self.edition_number, self.address)
return False
return True
@property
def can_register(self):
"""
bool: :const:`True` if :attr:`address` can register the edition
:attr:`edition_number` of :attr:`piece_address` else :const:`False`.
In order to register an edition:
1. The master piece needs to be registered.
2. The number of editions needs to be registered.
3. The :attr:`edition_number` should not have been registered yet.
.. todo:: Also check that the root address owns the piece.
Right now we cannot do this because we only receive
the leaf address when registering an edition.
"""
chain = BlockchainSpider.chain(self._tree, REGISTERED_PIECE_CODE)
# edition 0 should only have two transactions: REGISTER and EDITIONS
if len(chain) == 0:
self.reason = 'Master edition not yet registered'
return False
chain = BlockchainSpider.strip_loan(chain)
number_editions = chain[0]['number_editions']
if number_editions == 0:
self.reason = 'Number of editions not yet registered'
return False
if self.edition_number > number_editions:
self.reason = 'You can only register {} editions. You are trying to register edition {}'.format(number_editions, self.edition_number)
return False
if self.edition_number in self._tree:
self.reason = 'Edition number {} is already registered in the blockchain'. format(self.edition_number)
return False
return True
@property
def can_register_master(self):
"""
bool: :const:`True` if :attr:`address` can register the master
edition of :attr:`piece_address` else :const:`False`.
To register a master edition the piece address cannot exist in the
bitcoin network.
"""
if self._tree != {}:
self.reason = 'Master piece already registered in the blockchain'
return False
return True
@property
def can_editions(self):
"""
bool: :const:`True` if :attr:`address` can register the number of
editions of :attr:`piece_address` else :const:`False`.
In order to register the number of editions:
1. There needs to a least one transaction for the :attr:`piece_address`
(the registration of the master edition).
2. A piece with address :attr:`piece_address` needs to be registered
with ``'ASCRIBESPOOL01PIECE'`` (master edition).
3. The number of editions should have not been set yet (no tx with
verb ``'ASCRIBESPOOLEDITIONS'``).
"""
chain = BlockchainSpider.chain(self._tree, REGISTERED_PIECE_CODE)
if len(chain) == 0:
self.reason = 'Master edition not yet registered'
return False
number_editions = chain[0]['number_editions']
if number_editions != 0:
self.reason = 'Number of editions was already registered for this piece'
return False
return True
|
ascribe/pyspool | spool/ownership.py | Ownership.can_unconsign | python | def can_unconsign(self):
chain = BlockchainSpider.chain(self._tree, self.edition_number)
if len(chain) == 0:
self.reason = 'Master edition not yet registered'
return False
chain = BlockchainSpider.strip_loan(chain)
action = chain[-1]['action']
piece_address = chain[-1]['piece_address']
edition_number = chain[-1]['edition_number']
to_address = chain[-1]['to_address']
if action != 'CONSIGN' or piece_address != self.piece_address or edition_number != self.edition_number or to_address != self.address:
self.reason = 'Edition number {} is not consigned to {}'.format(self.edition_number, self.address)
return False
return True | bool: :const:`True` if :attr:`address` can unconsign the edition
:attr:`edition_number` of :attr:`piece_address` else :const:`False`.
If the last transaction is a consignment of the edition to the user. | train | https://github.com/ascribe/pyspool/blob/f8b10df1e7d2ea7950dde433c1cb6d5225112f4f/spool/ownership.py#L125-L148 | [
"def chain(tree, edition_number):\n \"\"\"\n Args:\n tree (dict): Tree history of all editions of a piece.\n edition_number (int): The edition number to check for.\n In the case of a piece (master edition), an empty\n string (``''``) or zero (``0``) can be passed.\n\n Returns:\n list: The chain of ownsership of a particular\n edition of the piece ordered by time.\n\n \"\"\"\n # return the chain for an edition_number sorted by the timestamp\n return sorted(tree.get(edition_number, []), key=lambda d: d['timestamp_utc'])\n",
"def strip_loan(chain):\n \"\"\"\n Returns the chain without loan. This way we can\n look at the last transaction to establish ownership.\n\n Args:\n chain (list): Chain for a particular edition.\n\n Returns:\n list: Chain with loan transactions striped\n from the end of the chain.\n\n \"\"\"\n while chain[-1]['action'] == 'LOAN':\n chain.pop()\n\n return chain\n"
] | class Ownership(object):
"""
Checks the actions that an address can make on a piece.
Attributes:
address (str): Bitcoin address to check ownership over
:attr:`piece_address`.
piece_address (str): Bitcoin address of the piece to check.
edition_number (int): The edition number of the piece.
testnet (bool): Bitcoin network. :const:`True` for
``testnet`` or :const:`False` for ``mainnet``.
reason (str): Message indicating the reason
for the failure of an ownership property.
"""
def __init__(self, address, piece_address, edition_number, testnet=False,
service='blockr', username='', password='', host='', port=''):
"""
Args:
address (str): Bitcoin address to check ownership over
``piece_address``.
piece_address (str): Bitcoin address of the piece to check.
edition_number (int): The edition number of the piece.
testnet (Optional[boo]l): Whether to use the testnet
(:const:`True`) or the mainnet (:const:`False`).
Defaults to :const:`False`.
service (Optional[str]): Name of service to use to connect
to the bitcoin network. Possible names are
``('blockr', 'daemon', 'regtest')``. Defaults to ``'blockr'``.
username (Optional[str]): Username to connect to a bitcoin node
via json-rpc based services: ``('daemon', 'regtest')``.
password (Optional[str]): Password to connect to a bitcoin node
via json-rpc based services: ``('daemon', 'regtest').``
host (Optional[str]): Host of the bitcoin node to connect to
via json-rpc based services: ``('daemon', 'regtest')``.
port (Optional[str]): Port of the bitcoin node to connect to
via json-rpc based services: ``('daemon', 'regtest')``.
"""
self.address = address
self.piece_address = piece_address
self.edition_number = edition_number
self.testnet = testnet
self._bcs = BlockchainSpider(service=service, testnet=testnet, username=username,
password=password, host=host, port=port)
self._tree = self._bcs.history(piece_address)
self.reason = ''
@property
def can_transfer(self):
"""
bool: :const:`True` if :attr:`address` can transfer the edition
:attr:`edition_number` of :attr:`piece_address` else :const:`False`.
"""
# 1. The address needs to own the edition
chain = BlockchainSpider.chain(self._tree, self.edition_number)
if len(chain) == 0:
self.reason = 'The edition number {} does not exist in the blockchain'.format(self.edition_number)
return False
chain = BlockchainSpider.strip_loan(chain)
to_address = chain[-1]['to_address']
if to_address != self.address:
self.reason = 'Address {} does not own the edition number {}'.format(self.address, self.edition_number)
return False
return True
@property
def can_consign(self):
"""
bool: :const:`True` if :attr:`address` can consign the edition
:attr:`edition_number` of :attr:`piece_address` else :const:`False`.
"""
return self.can_transfer
@property
def can_loan(self):
"""
bool: :const:`True` if :attr:`address` can loan the edition
:attr:`edition_number` of :attr:`piece_address` else :const:`False`.
"""
return self.can_transfer
@property
@property
def can_register(self):
"""
bool: :const:`True` if :attr:`address` can register the edition
:attr:`edition_number` of :attr:`piece_address` else :const:`False`.
In order to register an edition:
1. The master piece needs to be registered.
2. The number of editions needs to be registered.
3. The :attr:`edition_number` should not have been registered yet.
.. todo:: Also check that the root address owns the piece.
Right now we cannot do this because we only receive
the leaf address when registering an edition.
"""
chain = BlockchainSpider.chain(self._tree, REGISTERED_PIECE_CODE)
# edition 0 should only have two transactions: REGISTER and EDITIONS
if len(chain) == 0:
self.reason = 'Master edition not yet registered'
return False
chain = BlockchainSpider.strip_loan(chain)
number_editions = chain[0]['number_editions']
if number_editions == 0:
self.reason = 'Number of editions not yet registered'
return False
if self.edition_number > number_editions:
self.reason = 'You can only register {} editions. You are trying to register edition {}'.format(number_editions, self.edition_number)
return False
if self.edition_number in self._tree:
self.reason = 'Edition number {} is already registered in the blockchain'. format(self.edition_number)
return False
return True
@property
def can_register_master(self):
"""
bool: :const:`True` if :attr:`address` can register the master
edition of :attr:`piece_address` else :const:`False`.
To register a master edition the piece address cannot exist in the
bitcoin network.
"""
if self._tree != {}:
self.reason = 'Master piece already registered in the blockchain'
return False
return True
@property
def can_editions(self):
"""
bool: :const:`True` if :attr:`address` can register the number of
editions of :attr:`piece_address` else :const:`False`.
In order to register the number of editions:
1. There needs to a least one transaction for the :attr:`piece_address`
(the registration of the master edition).
2. A piece with address :attr:`piece_address` needs to be registered
with ``'ASCRIBESPOOL01PIECE'`` (master edition).
3. The number of editions should have not been set yet (no tx with
verb ``'ASCRIBESPOOLEDITIONS'``).
"""
chain = BlockchainSpider.chain(self._tree, REGISTERED_PIECE_CODE)
if len(chain) == 0:
self.reason = 'Master edition not yet registered'
return False
number_editions = chain[0]['number_editions']
if number_editions != 0:
self.reason = 'Number of editions was already registered for this piece'
return False
return True
|
ascribe/pyspool | spool/ownership.py | Ownership.can_register | python | def can_register(self):
chain = BlockchainSpider.chain(self._tree, REGISTERED_PIECE_CODE)
# edition 0 should only have two transactions: REGISTER and EDITIONS
if len(chain) == 0:
self.reason = 'Master edition not yet registered'
return False
chain = BlockchainSpider.strip_loan(chain)
number_editions = chain[0]['number_editions']
if number_editions == 0:
self.reason = 'Number of editions not yet registered'
return False
if self.edition_number > number_editions:
self.reason = 'You can only register {} editions. You are trying to register edition {}'.format(number_editions, self.edition_number)
return False
if self.edition_number in self._tree:
self.reason = 'Edition number {} is already registered in the blockchain'. format(self.edition_number)
return False
return True | bool: :const:`True` if :attr:`address` can register the edition
:attr:`edition_number` of :attr:`piece_address` else :const:`False`.
In order to register an edition:
1. The master piece needs to be registered.
2. The number of editions needs to be registered.
3. The :attr:`edition_number` should not have been registered yet.
.. todo:: Also check that the root address owns the piece.
Right now we cannot do this because we only receive
the leaf address when registering an edition. | train | https://github.com/ascribe/pyspool/blob/f8b10df1e7d2ea7950dde433c1cb6d5225112f4f/spool/ownership.py#L151-L188 | [
"def chain(tree, edition_number):\n \"\"\"\n Args:\n tree (dict): Tree history of all editions of a piece.\n edition_number (int): The edition number to check for.\n In the case of a piece (master edition), an empty\n string (``''``) or zero (``0``) can be passed.\n\n Returns:\n list: The chain of ownsership of a particular\n edition of the piece ordered by time.\n\n \"\"\"\n # return the chain for an edition_number sorted by the timestamp\n return sorted(tree.get(edition_number, []), key=lambda d: d['timestamp_utc'])\n",
"def strip_loan(chain):\n \"\"\"\n Returns the chain without loan. This way we can\n look at the last transaction to establish ownership.\n\n Args:\n chain (list): Chain for a particular edition.\n\n Returns:\n list: Chain with loan transactions striped\n from the end of the chain.\n\n \"\"\"\n while chain[-1]['action'] == 'LOAN':\n chain.pop()\n\n return chain\n"
] | class Ownership(object):
"""
Checks the actions that an address can make on a piece.
Attributes:
address (str): Bitcoin address to check ownership over
:attr:`piece_address`.
piece_address (str): Bitcoin address of the piece to check.
edition_number (int): The edition number of the piece.
testnet (bool): Bitcoin network. :const:`True` for
``testnet`` or :const:`False` for ``mainnet``.
reason (str): Message indicating the reason
for the failure of an ownership property.
"""
def __init__(self, address, piece_address, edition_number, testnet=False,
service='blockr', username='', password='', host='', port=''):
"""
Args:
address (str): Bitcoin address to check ownership over
``piece_address``.
piece_address (str): Bitcoin address of the piece to check.
edition_number (int): The edition number of the piece.
testnet (Optional[boo]l): Whether to use the testnet
(:const:`True`) or the mainnet (:const:`False`).
Defaults to :const:`False`.
service (Optional[str]): Name of service to use to connect
to the bitcoin network. Possible names are
``('blockr', 'daemon', 'regtest')``. Defaults to ``'blockr'``.
username (Optional[str]): Username to connect to a bitcoin node
via json-rpc based services: ``('daemon', 'regtest')``.
password (Optional[str]): Password to connect to a bitcoin node
via json-rpc based services: ``('daemon', 'regtest').``
host (Optional[str]): Host of the bitcoin node to connect to
via json-rpc based services: ``('daemon', 'regtest')``.
port (Optional[str]): Port of the bitcoin node to connect to
via json-rpc based services: ``('daemon', 'regtest')``.
"""
self.address = address
self.piece_address = piece_address
self.edition_number = edition_number
self.testnet = testnet
self._bcs = BlockchainSpider(service=service, testnet=testnet, username=username,
password=password, host=host, port=port)
self._tree = self._bcs.history(piece_address)
self.reason = ''
@property
def can_transfer(self):
"""
bool: :const:`True` if :attr:`address` can transfer the edition
:attr:`edition_number` of :attr:`piece_address` else :const:`False`.
"""
# 1. The address needs to own the edition
chain = BlockchainSpider.chain(self._tree, self.edition_number)
if len(chain) == 0:
self.reason = 'The edition number {} does not exist in the blockchain'.format(self.edition_number)
return False
chain = BlockchainSpider.strip_loan(chain)
to_address = chain[-1]['to_address']
if to_address != self.address:
self.reason = 'Address {} does not own the edition number {}'.format(self.address, self.edition_number)
return False
return True
@property
def can_consign(self):
"""
bool: :const:`True` if :attr:`address` can consign the edition
:attr:`edition_number` of :attr:`piece_address` else :const:`False`.
"""
return self.can_transfer
@property
def can_loan(self):
"""
bool: :const:`True` if :attr:`address` can loan the edition
:attr:`edition_number` of :attr:`piece_address` else :const:`False`.
"""
return self.can_transfer
@property
def can_unconsign(self):
"""
bool: :const:`True` if :attr:`address` can unconsign the edition
:attr:`edition_number` of :attr:`piece_address` else :const:`False`.
If the last transaction is a consignment of the edition to the user.
"""
chain = BlockchainSpider.chain(self._tree, self.edition_number)
if len(chain) == 0:
self.reason = 'Master edition not yet registered'
return False
chain = BlockchainSpider.strip_loan(chain)
action = chain[-1]['action']
piece_address = chain[-1]['piece_address']
edition_number = chain[-1]['edition_number']
to_address = chain[-1]['to_address']
if action != 'CONSIGN' or piece_address != self.piece_address or edition_number != self.edition_number or to_address != self.address:
self.reason = 'Edition number {} is not consigned to {}'.format(self.edition_number, self.address)
return False
return True
@property
@property
def can_register_master(self):
"""
bool: :const:`True` if :attr:`address` can register the master
edition of :attr:`piece_address` else :const:`False`.
To register a master edition the piece address cannot exist in the
bitcoin network.
"""
if self._tree != {}:
self.reason = 'Master piece already registered in the blockchain'
return False
return True
@property
def can_editions(self):
"""
bool: :const:`True` if :attr:`address` can register the number of
editions of :attr:`piece_address` else :const:`False`.
In order to register the number of editions:
1. There needs to a least one transaction for the :attr:`piece_address`
(the registration of the master edition).
2. A piece with address :attr:`piece_address` needs to be registered
with ``'ASCRIBESPOOL01PIECE'`` (master edition).
3. The number of editions should have not been set yet (no tx with
verb ``'ASCRIBESPOOLEDITIONS'``).
"""
chain = BlockchainSpider.chain(self._tree, REGISTERED_PIECE_CODE)
if len(chain) == 0:
self.reason = 'Master edition not yet registered'
return False
number_editions = chain[0]['number_editions']
if number_editions != 0:
self.reason = 'Number of editions was already registered for this piece'
return False
return True
|
ascribe/pyspool | spool/ownership.py | Ownership.can_editions | python | def can_editions(self):
chain = BlockchainSpider.chain(self._tree, REGISTERED_PIECE_CODE)
if len(chain) == 0:
self.reason = 'Master edition not yet registered'
return False
number_editions = chain[0]['number_editions']
if number_editions != 0:
self.reason = 'Number of editions was already registered for this piece'
return False
return True | bool: :const:`True` if :attr:`address` can register the number of
editions of :attr:`piece_address` else :const:`False`.
In order to register the number of editions:
1. There needs to a least one transaction for the :attr:`piece_address`
(the registration of the master edition).
2. A piece with address :attr:`piece_address` needs to be registered
with ``'ASCRIBESPOOL01PIECE'`` (master edition).
3. The number of editions should have not been set yet (no tx with
verb ``'ASCRIBESPOOLEDITIONS'``). | train | https://github.com/ascribe/pyspool/blob/f8b10df1e7d2ea7950dde433c1cb6d5225112f4f/spool/ownership.py#L208-L236 | [
"def chain(tree, edition_number):\n \"\"\"\n Args:\n tree (dict): Tree history of all editions of a piece.\n edition_number (int): The edition number to check for.\n In the case of a piece (master edition), an empty\n string (``''``) or zero (``0``) can be passed.\n\n Returns:\n list: The chain of ownsership of a particular\n edition of the piece ordered by time.\n\n \"\"\"\n # return the chain for an edition_number sorted by the timestamp\n return sorted(tree.get(edition_number, []), key=lambda d: d['timestamp_utc'])\n"
] | class Ownership(object):
"""
Checks the actions that an address can make on a piece.
Attributes:
address (str): Bitcoin address to check ownership over
:attr:`piece_address`.
piece_address (str): Bitcoin address of the piece to check.
edition_number (int): The edition number of the piece.
testnet (bool): Bitcoin network. :const:`True` for
``testnet`` or :const:`False` for ``mainnet``.
reason (str): Message indicating the reason
for the failure of an ownership property.
"""
def __init__(self, address, piece_address, edition_number, testnet=False,
service='blockr', username='', password='', host='', port=''):
"""
Args:
address (str): Bitcoin address to check ownership over
``piece_address``.
piece_address (str): Bitcoin address of the piece to check.
edition_number (int): The edition number of the piece.
testnet (Optional[boo]l): Whether to use the testnet
(:const:`True`) or the mainnet (:const:`False`).
Defaults to :const:`False`.
service (Optional[str]): Name of service to use to connect
to the bitcoin network. Possible names are
``('blockr', 'daemon', 'regtest')``. Defaults to ``'blockr'``.
username (Optional[str]): Username to connect to a bitcoin node
via json-rpc based services: ``('daemon', 'regtest')``.
password (Optional[str]): Password to connect to a bitcoin node
via json-rpc based services: ``('daemon', 'regtest').``
host (Optional[str]): Host of the bitcoin node to connect to
via json-rpc based services: ``('daemon', 'regtest')``.
port (Optional[str]): Port of the bitcoin node to connect to
via json-rpc based services: ``('daemon', 'regtest')``.
"""
self.address = address
self.piece_address = piece_address
self.edition_number = edition_number
self.testnet = testnet
self._bcs = BlockchainSpider(service=service, testnet=testnet, username=username,
password=password, host=host, port=port)
self._tree = self._bcs.history(piece_address)
self.reason = ''
@property
def can_transfer(self):
"""
bool: :const:`True` if :attr:`address` can transfer the edition
:attr:`edition_number` of :attr:`piece_address` else :const:`False`.
"""
# 1. The address needs to own the edition
chain = BlockchainSpider.chain(self._tree, self.edition_number)
if len(chain) == 0:
self.reason = 'The edition number {} does not exist in the blockchain'.format(self.edition_number)
return False
chain = BlockchainSpider.strip_loan(chain)
to_address = chain[-1]['to_address']
if to_address != self.address:
self.reason = 'Address {} does not own the edition number {}'.format(self.address, self.edition_number)
return False
return True
@property
def can_consign(self):
"""
bool: :const:`True` if :attr:`address` can consign the edition
:attr:`edition_number` of :attr:`piece_address` else :const:`False`.
"""
return self.can_transfer
@property
def can_loan(self):
"""
bool: :const:`True` if :attr:`address` can loan the edition
:attr:`edition_number` of :attr:`piece_address` else :const:`False`.
"""
return self.can_transfer
@property
def can_unconsign(self):
"""
bool: :const:`True` if :attr:`address` can unconsign the edition
:attr:`edition_number` of :attr:`piece_address` else :const:`False`.
If the last transaction is a consignment of the edition to the user.
"""
chain = BlockchainSpider.chain(self._tree, self.edition_number)
if len(chain) == 0:
self.reason = 'Master edition not yet registered'
return False
chain = BlockchainSpider.strip_loan(chain)
action = chain[-1]['action']
piece_address = chain[-1]['piece_address']
edition_number = chain[-1]['edition_number']
to_address = chain[-1]['to_address']
if action != 'CONSIGN' or piece_address != self.piece_address or edition_number != self.edition_number or to_address != self.address:
self.reason = 'Edition number {} is not consigned to {}'.format(self.edition_number, self.address)
return False
return True
@property
def can_register(self):
"""
bool: :const:`True` if :attr:`address` can register the edition
:attr:`edition_number` of :attr:`piece_address` else :const:`False`.
In order to register an edition:
1. The master piece needs to be registered.
2. The number of editions needs to be registered.
3. The :attr:`edition_number` should not have been registered yet.
.. todo:: Also check that the root address owns the piece.
Right now we cannot do this because we only receive
the leaf address when registering an edition.
"""
chain = BlockchainSpider.chain(self._tree, REGISTERED_PIECE_CODE)
# edition 0 should only have two transactions: REGISTER and EDITIONS
if len(chain) == 0:
self.reason = 'Master edition not yet registered'
return False
chain = BlockchainSpider.strip_loan(chain)
number_editions = chain[0]['number_editions']
if number_editions == 0:
self.reason = 'Number of editions not yet registered'
return False
if self.edition_number > number_editions:
self.reason = 'You can only register {} editions. You are trying to register edition {}'.format(number_editions, self.edition_number)
return False
if self.edition_number in self._tree:
self.reason = 'Edition number {} is already registered in the blockchain'. format(self.edition_number)
return False
return True
@property
def can_register_master(self):
"""
bool: :const:`True` if :attr:`address` can register the master
edition of :attr:`piece_address` else :const:`False`.
To register a master edition the piece address cannot exist in the
bitcoin network.
"""
if self._tree != {}:
self.reason = 'Master piece already registered in the blockchain'
return False
return True
@property
|
ascribe/pyspool | spool/spoolverb.py | Spoolverb.from_verb | python | def from_verb(cls, verb):
pattern = r'^(?P<meta>[A-Z]+)(?P<version>\d+)(?P<action>[A-Z]+)(?P<arg1>\d+)?(\/(?P<arg2>\d+))?$'
try:
verb = verb.decode()
except AttributeError:
pass
match = re.match(pattern, verb)
if not match:
raise SpoolverbError('Invalid spoolverb: {}'.format(verb))
data = match.groupdict()
meta = data['meta']
version = data['version']
action = data['action']
if action == 'EDITIONS':
num_editions = data['arg1']
return cls(meta=meta, version=version, action=action, num_editions=int(num_editions))
elif action == 'LOAN':
# TODO Review. Workaround for piece loans
try:
edition_num = int(data['arg1'])
except TypeError:
edition_num = 0
loan_start = data['arg2'][:6]
loan_end = data['arg2'][6:]
return cls(meta=meta, version=version, action=action, edition_num=int(edition_num),
loan_start=loan_start, loan_end=loan_end)
elif action in ['FUEL', 'PIECE', 'CONSIGNEDREGISTRATION']:
# no edition number for these verbs
return cls(meta=meta, version=version, action=action)
else:
edition_num = data['arg1']
return cls(meta=meta, version=version, action=action, edition_num=int(edition_num)) | Constructs a :class:`Spoolverb` instance from the string
representation of the given verb.
Args:
verb (str): representation of the verb e.g.:
``'ASCRIBESPOOL01LOAN12/150526150528'``. Can also be in
binary format (:obj:`bytes`): ``b'ASCRIBESPOOL01PIECE'``.
Returns:
:class:`Spoolverb` instance. | train | https://github.com/ascribe/pyspool/blob/f8b10df1e7d2ea7950dde433c1cb6d5225112f4f/spool/spoolverb.py#L72-L117 | null | class Spoolverb(object):
"""
Allows for easy creation of the verb to be encoded on the
``op_return`` of all SPOOL transactions.
Attributes:
supported_actions (List[str]): Actions supported by the SPOOL
protocol.
"""
supported_actions = ['REGISTER', 'CONSIGN', 'TRANSFER', 'LOAN', 'UNCONSIGN',
'FUEL', 'EDITIONS', 'PIECE', 'MIGRATE', 'CONSIGNEDREGISTRATION']
def __init__(self, num_editions=None, edition_num=None, loan_start='',
loan_end='', meta='ASCRIBESPOOL', version='01', action=None):
"""
Initializer for the Spoolverb class.
Args:
num_editions (int): Number of editions to register.
edition_num (str): Number of the edition to use.
loan_start (str): Start of the loan in the format ``YYMMDD``.
loan_end (str): End of the loan in the format ``YYMMDD``.
meta (str): Header for the spool protocol. Defaults to
``'ASCRIBESPOOL'``.
version (str): Version of the protocol. Defaults to ``'01'``.
action (str): One of the actions in :attr:`supported_actions`.
Returns:
:class:`Spoolverb` instance.
"""
self.meta = meta
self.version = version
self.num_editions = num_editions
self.edition_number = edition_num if edition_num else ''
self.loan_start = loan_start
self.loan_end = loan_end
self.action = action
@classmethod
@property
def piece(self):
"""
str: representation of the ``PIECE`` spoolverb. E.g.:
``'ASCRIBESPOOL01PIECE'``.
"""
return '{}{}PIECE'.format(self.meta, self.version)
@property
def register(self):
"""
str: representation of the ``REGISTER`` spoolverb. E.g.:
``'ASCRIBESPOOL01REGISTER1'```.
"""
return '{}{}REGISTER{}'.format(self.meta, self.version, self.edition_number)
@property
def editions(self):
"""
str: representation of the ``EDITIONS`` spoolverb. E.g.:
``'ASCRIBESPOOL01EDITIONS10'``.
"""
return '{}{}EDITIONS{}'.format(self.meta, self.version, self.num_editions)
@property
def transfer(self):
"""
str: representation of the ``TRANSFER`` spoolverb. E.g.:
``'ASCRIBESPOOL01TRANSFER1'``.
"""
return '{}{}TRANSFER{}'.format(self.meta, self.version, self.edition_number)
@property
def consign(self):
"""
str: representation of the ``CONSIGN`` spoolverb. E.g.:
``'ASCRIBESPOOL01CONSIGN1'``.
"""
return '{}{}CONSIGN{}'.format(self.meta, self.version, self.edition_number)
@property
def unconsign(self):
"""
str: representation of the ``UNCONSIGN`` spoolverb. E.g.:
``'ASCRIBESPOOL01UNCONSIGN1'``.
"""
return '{}{}UNCONSIGN{}'.format(self.meta, self.version, self.edition_number)
@property
def loan(self):
"""
str: representation of the ``LOAN`` spoolverb. E.g.:
``'ASCRIBESPOOL01LOAN1/150526150528'``.
"""
return '{}{}LOAN{}/{}{}'.format(self.meta, self.version, self.edition_number,
self.loan_start, self.loan_end)
@property
def migrate(self):
"""
str: representation of the ``MIGRATE`` spoolverb. E.g.:
``'ASCRIBESPOOL01MIGRATE1'``.
"""
return '{}{}MIGRATE{}'.format(self.meta, self.version, self.edition_number)
@property
def consigned_registration(self):
"""
str: representation of the ``CONSIGNEDREGISTRATION`` spoolverb. E.g.:
``'ASCRIBESPOOL01CONSIGNEDREGISTRATION'``.
"""
return '{}{}CONSIGNEDREGISTRATION'.format(self.meta, self.version)
@property
def fuel(self):
"""
str: representation of the ``FUEL`` spoolverb. E.g.:
``'ASCRIBESPOOL01FUEL'``.
"""
return '{}{}FUEL'.format(self.meta, self.version)
|
ascribe/pyspool | spool/spoolverb.py | Spoolverb.loan | python | def loan(self):
return '{}{}LOAN{}/{}{}'.format(self.meta, self.version, self.edition_number,
self.loan_start, self.loan_end) | str: representation of the ``LOAN`` spoolverb. E.g.:
``'ASCRIBESPOOL01LOAN1/150526150528'``. | train | https://github.com/ascribe/pyspool/blob/f8b10df1e7d2ea7950dde433c1cb6d5225112f4f/spool/spoolverb.py#L168-L174 | null | class Spoolverb(object):
"""
Allows for easy creation of the verb to be encoded on the
``op_return`` of all SPOOL transactions.
Attributes:
supported_actions (List[str]): Actions supported by the SPOOL
protocol.
"""
supported_actions = ['REGISTER', 'CONSIGN', 'TRANSFER', 'LOAN', 'UNCONSIGN',
'FUEL', 'EDITIONS', 'PIECE', 'MIGRATE', 'CONSIGNEDREGISTRATION']
def __init__(self, num_editions=None, edition_num=None, loan_start='',
loan_end='', meta='ASCRIBESPOOL', version='01', action=None):
"""
Initializer for the Spoolverb class.
Args:
num_editions (int): Number of editions to register.
edition_num (str): Number of the edition to use.
loan_start (str): Start of the loan in the format ``YYMMDD``.
loan_end (str): End of the loan in the format ``YYMMDD``.
meta (str): Header for the spool protocol. Defaults to
``'ASCRIBESPOOL'``.
version (str): Version of the protocol. Defaults to ``'01'``.
action (str): One of the actions in :attr:`supported_actions`.
Returns:
:class:`Spoolverb` instance.
"""
self.meta = meta
self.version = version
self.num_editions = num_editions
self.edition_number = edition_num if edition_num else ''
self.loan_start = loan_start
self.loan_end = loan_end
self.action = action
@classmethod
def from_verb(cls, verb):
"""
Constructs a :class:`Spoolverb` instance from the string
representation of the given verb.
Args:
verb (str): representation of the verb e.g.:
``'ASCRIBESPOOL01LOAN12/150526150528'``. Can also be in
binary format (:obj:`bytes`): ``b'ASCRIBESPOOL01PIECE'``.
Returns:
:class:`Spoolverb` instance.
"""
pattern = r'^(?P<meta>[A-Z]+)(?P<version>\d+)(?P<action>[A-Z]+)(?P<arg1>\d+)?(\/(?P<arg2>\d+))?$'
try:
verb = verb.decode()
except AttributeError:
pass
match = re.match(pattern, verb)
if not match:
raise SpoolverbError('Invalid spoolverb: {}'.format(verb))
data = match.groupdict()
meta = data['meta']
version = data['version']
action = data['action']
if action == 'EDITIONS':
num_editions = data['arg1']
return cls(meta=meta, version=version, action=action, num_editions=int(num_editions))
elif action == 'LOAN':
# TODO Review. Workaround for piece loans
try:
edition_num = int(data['arg1'])
except TypeError:
edition_num = 0
loan_start = data['arg2'][:6]
loan_end = data['arg2'][6:]
return cls(meta=meta, version=version, action=action, edition_num=int(edition_num),
loan_start=loan_start, loan_end=loan_end)
elif action in ['FUEL', 'PIECE', 'CONSIGNEDREGISTRATION']:
# no edition number for these verbs
return cls(meta=meta, version=version, action=action)
else:
edition_num = data['arg1']
return cls(meta=meta, version=version, action=action, edition_num=int(edition_num))
@property
def piece(self):
"""
str: representation of the ``PIECE`` spoolverb. E.g.:
``'ASCRIBESPOOL01PIECE'``.
"""
return '{}{}PIECE'.format(self.meta, self.version)
@property
def register(self):
"""
str: representation of the ``REGISTER`` spoolverb. E.g.:
``'ASCRIBESPOOL01REGISTER1'```.
"""
return '{}{}REGISTER{}'.format(self.meta, self.version, self.edition_number)
@property
def editions(self):
"""
str: representation of the ``EDITIONS`` spoolverb. E.g.:
``'ASCRIBESPOOL01EDITIONS10'``.
"""
return '{}{}EDITIONS{}'.format(self.meta, self.version, self.num_editions)
@property
def transfer(self):
"""
str: representation of the ``TRANSFER`` spoolverb. E.g.:
``'ASCRIBESPOOL01TRANSFER1'``.
"""
return '{}{}TRANSFER{}'.format(self.meta, self.version, self.edition_number)
@property
def consign(self):
"""
str: representation of the ``CONSIGN`` spoolverb. E.g.:
``'ASCRIBESPOOL01CONSIGN1'``.
"""
return '{}{}CONSIGN{}'.format(self.meta, self.version, self.edition_number)
@property
def unconsign(self):
"""
str: representation of the ``UNCONSIGN`` spoolverb. E.g.:
``'ASCRIBESPOOL01UNCONSIGN1'``.
"""
return '{}{}UNCONSIGN{}'.format(self.meta, self.version, self.edition_number)
@property
@property
def migrate(self):
"""
str: representation of the ``MIGRATE`` spoolverb. E.g.:
``'ASCRIBESPOOL01MIGRATE1'``.
"""
return '{}{}MIGRATE{}'.format(self.meta, self.version, self.edition_number)
@property
def consigned_registration(self):
"""
str: representation of the ``CONSIGNEDREGISTRATION`` spoolverb. E.g.:
``'ASCRIBESPOOL01CONSIGNEDREGISTRATION'``.
"""
return '{}{}CONSIGNEDREGISTRATION'.format(self.meta, self.version)
@property
def fuel(self):
"""
str: representation of the ``FUEL`` spoolverb. E.g.:
``'ASCRIBESPOOL01FUEL'``.
"""
return '{}{}FUEL'.format(self.meta, self.version)
|
ascribe/pyspool | spool/spool.py | Spool.register_piece | python | def register_piece(self, from_address, to_address, hash, password, min_confirmations=6, sync=False, ownership=True):
file_hash, file_hash_metadata = hash
path, from_address = from_address
verb = Spoolverb()
unsigned_tx = self.simple_spool_transaction(from_address,
[file_hash, file_hash_metadata, to_address],
op_return=verb.piece,
min_confirmations=min_confirmations)
signed_tx = self._t.sign_transaction(unsigned_tx, password)
txid = self._t.push(signed_tx)
return txid | Register a piece
Args:
from_address (Tuple[str]): Federation address. All register transactions
originate from the the Federation wallet
to_address (str): Address registering the edition
hash (Tuple[str]): Hash of the piece. (file_hash, file_hash_metadata)
password (str): Federation wallet password. For signing the transaction
edition_num (int): The number of the edition to register. User
edition_num=0 to register the master edition
min_confirmations (int): Override the number of confirmations when
chosing the inputs of the transaction. Defaults to 6
sync (bool): Perform the transaction in synchronous mode, the call to the
function will block until there is at least on confirmation on
the blockchain. Defaults to False
ownership (bool): Check ownsership in the blockchain before pushing the
transaction. Defaults to True
Returns:
str: transaction id | train | https://github.com/ascribe/pyspool/blob/f8b10df1e7d2ea7950dde433c1cb6d5225112f4f/spool/spool.py#L99-L133 | [
"def simple_spool_transaction(self, from_address, to, op_return, min_confirmations=6):\n \"\"\"\n Utililty function to create the spool transactions. Selects the inputs,\n encodes the op_return and constructs the transaction.\n\n Args:\n from_address (str): Address originating the transaction\n to (str): list of addresses to receive tokens (file_hash, file_hash_metadata, ...)\n op_return (str): String representation of the spoolverb, as returned by the properties of Spoolverb\n min_confirmations (int): Number of confirmations when chosing the inputs of the transaction. Defaults to 6\n\n Returns:\n str: unsigned transaction\n\n \"\"\"\n # list of addresses to send\n ntokens = len(to)\n nfees = old_div(self._t.estimate_fee(ntokens, 2), self.fee)\n inputs = self.select_inputs(from_address, nfees, ntokens, min_confirmations=min_confirmations)\n # outputs\n outputs = [{'address': to_address, 'value': self.token} for to_address in to]\n outputs += [{'script': self._t._op_return_hex(op_return), 'value': 0}]\n # build transaction\n unsigned_tx = self._t.build_transaction(inputs, outputs)\n return unsigned_tx\n"
] | class Spool(object):
"""
Class that contains all Spool methods.
In the SPOOL implementation there is no notion of users only addresses.
All addresses come from BIP32 HD wallets. This makes it easier to manage all the keys
since we can retrieve everything we need from a master secret (namely the private key
to sign the transactions).
Since we are dealing with HD wallets we expect all ``from_address`` to be a
tuple of ``(path, address)`` so that we can retrieve the private key for
that particular leaf address. If we want to use the root address we can
just pass an empty string to the first element of the tuple e.g.
``('', address)``. For instance when using the federation wallet address we
have no need to create leaf addresses.
A file is represented by two hashes:
- ``file_hash``: is the hash of the digital file
- ``file_hash_metadata``: is the hash of the digital file + metadata
The hash is passed to the methods has a tuple: ``(file_hash, file_hash_metadata)``
Attributes:
FEE (int): transaction fee
TOKEN (int): token
SPENTS_QUEUE_MAXSIZE (int): spent outputs queue maximum size
"""
FEE = 30000
TOKEN = 3000
SPENTS_QUEUE_MAXSIZE = 50
def __init__(self, testnet=False, service='blockr', username='',
password='', host='', port='', fee=None, token=None):
"""
Args:
testnet (bool): Whether to use the mainnet or testnet.
Defaults to the mainnet (:const:`False`).
service (str): Bitcoin communication interface: ``'blockr'``,
``'daemon'``, or ``'regtest'``. ``'blockr'`` refers to the
public api, whereas ``'daemon'`` and ``'regtest'`` refer
to the jsonrpc inteface. Defaults to ``'blockr'``.
username (str): username for jsonrpc communications
password (str): password for jsonrpc communications
hostname (str): hostname of the bitcoin node when using jsonrpc
port (str): port number of the bitcoin node when using jsonrpc
fee (int): transaction fee
token (int): token
"""
self.testnet = testnet
self._netcode = 'XTN' if testnet else 'BTC'
self._t = Transactions(service=service, testnet=testnet, username=username,
password=password, host=host, port=port)
# simple cache for spent outputs. Useful so that rapid firing transactions don't use the same outputs
self._spents = Queue(maxsize=self.SPENTS_QUEUE_MAXSIZE)
self.fee = fee or self.FEE
self.token = token or self.TOKEN
@dispatch
@dispatch
def register(self, from_address, to_address, hash, password, edition_num, min_confirmations=6, sync=False, ownership=True):
"""
Register an edition or master edition of a piece
Args:
from_address (Tuple[str]): Federation address. All register transactions originate from the the Federation wallet
to_address (str): Address registering the edition
hash (Tuple[str])): Hash of the piece. Tuple (file_hash, file_hash_metadata)
password (str): Federation wallet password. For signing the transaction
edition_num (int): The number of the edition to register. User edition_num=0 to register the master edition
min_confirmations (int): Override the number of confirmations when chosing the inputs of the transaction. Defaults to 6
sync (bool): Perform the transaction in synchronous mode, the call to the function will block until there is at
least on confirmation on the blockchain. Defaults to False
ownership (bool): Check ownsership in the blockchain before pushing the transaction. Defaults to True
Returns:
str: transaction id
"""
file_hash, file_hash_metadata = hash
path, from_address = from_address
verb = Spoolverb(edition_num=edition_num)
unsigned_tx = self.simple_spool_transaction(from_address,
[file_hash, file_hash_metadata, to_address],
op_return=verb.register,
min_confirmations=min_confirmations)
signed_tx = self._t.sign_transaction(unsigned_tx, password)
txid = self._t.push(signed_tx)
return txid
@dispatch
def consigned_registration(self, from_address, to_address, hash, password, min_confirmations=6, sync=False, ownership=True):
"""
Register an edition or master edition of a piece consigned to ``from_address``
Args:
from_address (Tuple[str])): Federation address. All register transactions originate from the the Federation wallet
to_address (str): Address registering the edition
hash (Tuple[str]): Hash of the piece. Tuple (file_hash, file_hash_metadata)
password (str): Federation wallet password. For signing the transaction
min_confirmations (int): Override the number of confirmations when chosing the inputs of the transaction. Defaults to 6
sync (bool): Perform the transaction in synchronous mode, the call to the function will block until there is at
least on confirmation on the blockchain. Defaults to False
ownership (bool): Check ownsership in the blockchain before pushing the transaction. Defaults to True
Returns:
str: transaction id
"""
file_hash, file_hash_metadata = hash
path, from_address = from_address
verb = Spoolverb()
unsigned_tx = self.simple_spool_transaction(from_address,
[file_hash, file_hash_metadata, to_address],
op_return=verb.consigned_registration,
min_confirmations=min_confirmations)
signed_tx = self._t.sign_transaction(unsigned_tx, password)
txid = self._t.push(signed_tx)
return txid
@dispatch
def editions(self, from_address, to_address, hash, password, num_editions, min_confirmations=6, sync=False, ownership=True):
"""
Register the number of editions of a piece
Args:
from_address (Tuple[str]): Federation address. All register transactions originate from the the Federation wallet
to_address (str): Address registering the number of editions
hash (Tuple[str]): Hash of the piece. Tuple (file_hash, file_hash_metadata)
password (str): Federation wallet password. For signing the transaction
num_editions (int): Number of editions of the piece
min_confirmations (int): Number of confirmations when chosing the inputs of the transaction. Defaults to 6
sync (bool): Perform the transaction in synchronous mode, the call to the function will block until there is at
least on confirmation on the blockchain. Defaults to False
ownership (bool): Check ownsership in the blockchain before pushing the transaction. Defaults to True
Returns:
str: transaction id
"""
file_hash, file_hash_metadata = hash
path, from_address = from_address
verb = Spoolverb(num_editions=num_editions)
unsigned_tx = self.simple_spool_transaction(from_address,
[file_hash, file_hash_metadata, to_address],
op_return=verb.editions,
min_confirmations=min_confirmations)
signed_tx = self._t.sign_transaction(unsigned_tx, password)
txid = self._t.push(signed_tx)
return txid
@dispatch
def transfer(self, from_address, to_address, hash, password, edition_num, min_confirmations=6, sync=False, ownership=True):
"""
Transfer a piece between addresses
Args:
from_address (Tuple[str]): Address currently owning the edition
to_address: Address to receive the edition
hash (Tuple[str]): Hash of the piece. Tuple (file_hash, file_hash_metadata)
password (str): Password for the wallet currently owning the edition. For signing the transaction
edition_num (int): the number of the edition to transfer
min_confirmations (int): Number of confirmations when chosing the inputs of the transaction. Defaults to 6
sync (bool): Perform the transaction in synchronous mode, the call to the function will block until there is at
least on confirmation on the blockchain. Defaults to False
ownership (bool): Check ownsership in the blockchain before pushing the transaction. Defaults to True
Returns:
str: transaction id
"""
path, from_address = from_address
file_hash, file_hash_metadata = hash
verb = Spoolverb(edition_num=edition_num)
unsigned_tx = self.simple_spool_transaction(from_address,
[file_hash, to_address],
op_return=verb.transfer,
min_confirmations=min_confirmations)
signed_tx = self._t.sign_transaction(unsigned_tx, password, path=path)
txid = self._t.push(signed_tx)
return txid
@dispatch
def consign(self, from_address, to_address, hash, password, edition_num, min_confirmations=6, sync=False, ownership=True):
"""
Consign a piece to an address
Args:
from_address (Tuple[str]): Address currently owning the edition
to_address (str): Address to where the piece will be consigned to
hash (Tuple[str]): Hash of the piece. Tuple (file_hash, file_hash_metadata)
password (str): Password for the wallet currently owning the edition. For signing the transaction
edition_num (int): the number of the edition to consign
min_confirmations (int): Number of confirmations when chosing the inputs of the transaction. Defaults to 6
sync (bool): Perform the transaction in synchronous mode, the call to the function will block until there is at
least on confirmation on the blockchain. Defaults to False
ownership (bool): Check ownsership in the blockchain before pushing the transaction. Defaults to True
Returns:
str: transaction id
"""
path, from_address = from_address
file_hash, file_hash_metadata = hash
verb = Spoolverb(edition_num=edition_num)
unsigned_tx = self.simple_spool_transaction(from_address,
[file_hash, to_address],
op_return=verb.consign,
min_confirmations=min_confirmations)
signed_tx = self._t.sign_transaction(unsigned_tx, password, path=path)
txid = self._t.push(signed_tx)
return txid
@dispatch
def unconsign(self, from_address, to_address, hash, password, edition_num, min_confirmations=6, sync=False, ownership=True):
"""
Unconsign the edition
Args:
from_address (Tuple[str]): Address where the edition is currently consigned
to_address (str): Address that consigned the piece to from_address
hash (Tuple[str]): Hash of the piece. Tuple (file_hash, file_hash_metadata)
password (str): Password for the wallet currently holding the edition. For signing the transaction
edition_num (int): the number of the edition to unconsign
min_confirmations (int): Number of confirmations when chosing the inputs of the transaction. Defaults to 6
sync (bool): Perform the transaction in synchronous mode, the call to the function will block until there is at
least on confirmation on the blockchain. Defaults to False
ownership (bool): Check ownsership in the blockchain before pushing the transaction. Defaults to True
Returns:
str: transaction id
"""
# In an unconsignment the to_address needs to be the address that created the consign transaction
path, from_address = from_address
file_hash, file_hash_metadata = hash
verb = Spoolverb(edition_num=edition_num)
unsigned_tx = self.simple_spool_transaction(from_address,
[file_hash, to_address],
op_return=verb.unconsign,
min_confirmations=min_confirmations)
signed_tx = self._t.sign_transaction(unsigned_tx, password, path=path)
txid = self._t.push(signed_tx)
return txid
@dispatch
def loan(self, from_address, to_address, hash, password, edition_num, loan_start, loan_end, min_confirmations=6, sync=False, ownership=True):
"""
Loan the edition
Args:
from_address (Tuple[str]): Address currently holding the edition
to_address (str): Address to loan the edition to
hash (Tuple[str]): Hash of the piece. Tuple (file_hash, file_hash_metadata)
password (str): Password for the wallet currently holding the edition. For signing the transaction
edition_num (int): the number of the edition to loan
loan_start (str): Start date for the loan. In the form YYMMDD
loan_end (str): End date for the loan. In the form YYMMDD
min_confirmations (int): Number of confirmations when chosing the inputs of the transaction. Defaults to 6
sync (bool): Perform the transaction in synchronous mode, the call to the function will block until there is at
least on confirmation on the blockchain. Defaults to False
ownership (bool): Check ownsership in the blockchain before pushing the transaction. Defaults to True
Returns:
str: transaction id
"""
path, from_address = from_address
file_hash, file_hash_metadata = hash
verb = Spoolverb(edition_num=edition_num, loan_start=loan_start, loan_end=loan_end)
unsigned_tx = self.simple_spool_transaction(from_address,
[file_hash, to_address],
op_return=verb.loan,
min_confirmations=min_confirmations)
signed_tx = self._t.sign_transaction(unsigned_tx, password, path=path)
txid = self._t.push(signed_tx)
return txid
@dispatch
def migrate(self, from_address, prev_address, new_address, hash, password, edition_num, min_confirmations=6, sync=False, ownership=True):
"""
Migrate an edition
Args:
from_address (Tuple[str]): Federation address. All register transactions originate from the the Federation wallet
to_address (str): Address registering the edition
hash (Tuple[str]): Hash of the piece. Tuple (file_hash, file_hash_metadata)
password (str): Federation wallet password. For signing the transaction
edition_num (int): The number of the edition to register. User edition_num=0 to register the master edition
min_confirmations (int): Override the number of confirmations when chosing the inputs of the transaction. Defaults to 6
sync (bool): Perform the transaction in synchronous mode, the call to the function will block until there is at
least on confirmation on the blockchain. Defaults to False
ownership (bool): Check ownsership in the blockchain before pushing the transaction. Defaults to True
Returns:
str: transaction id
"""
file_hash, file_hash_metadata = hash
path, from_address = from_address
verb = Spoolverb(edition_num=edition_num)
unsigned_tx = self.simple_spool_transaction(from_address,
[file_hash, prev_address, new_address],
op_return=verb.migrate,
min_confirmations=min_confirmations)
signed_tx = self._t.sign_transaction(unsigned_tx, password)
txid = self._t.push(signed_tx)
return txid
@dispatch
def refill_main_wallet(self, from_address, to_address, nfees, ntokens, password, min_confirmations=6, sync=False):
"""
Refill the Federation wallet with tokens and fees. This keeps the federation wallet clean.
Dealing with exact values simplifies the transactions. No need to calculate change. Easier to keep track of the
unspents and prevent double spends that would result in transactions being rejected by the bitcoin network.
Args:
from_address (Tuple[str]): Refill wallet address. Refills the federation wallet with tokens and fees
to_address (str): Federation wallet address
nfees (int): Number of fees to transfer. Each fee is 10000 satoshi. Used to pay for the transactions
ntokens (int): Number of tokens to transfer. Each token is 600 satoshi. Used to register hashes in the blockchain
password (str): Password for the Refill wallet. Used to sign the transaction
min_confirmations (int): Number of confirmations when chosing the inputs of the transaction. Defaults to 6
sync (bool): Perform the transaction in synchronous mode, the call to the function will block until there is at
least on confirmation on the blockchain. Defaults to False
Returns:
str: transaction id
"""
path, from_address = from_address
unsigned_tx = self._t.simple_transaction(from_address,
[(to_address, self.fee)] * nfees + [(to_address, self.token)] * ntokens,
min_confirmations=min_confirmations)
signed_tx = self._t.sign_transaction(unsigned_tx, password)
txid = self._t.push(signed_tx)
return txid
@dispatch
def refill(self, from_address, to_address, nfees, ntokens, password, min_confirmations=6, sync=False):
"""
Refill wallets with the necessary fuel to perform spool transactions
Args:
from_address (Tuple[str]): Federation wallet address. Fuels the wallets with tokens and fees. All transactions to wallets
holding a particular piece should come from the Federation wallet
to_address (str): Wallet address that needs to perform a spool transaction
nfees (int): Number of fees to transfer. Each fee is 10000 satoshi. Used to pay for the transactions
ntokens (int): Number of tokens to transfer. Each token is 600 satoshi. Used to register hashes in the blockchain
password (str): Password for the Federation wallet. Used to sign the transaction
min_confirmations (int): Number of confirmations when chosing the inputs of the transaction. Defaults to 6
sync (bool): Perform the transaction in synchronous mode, the call to the function will block until there is at
least on confirmation on the blockchain. Defaults to False
Returns:
str: transaction id
"""
path, from_address = from_address
verb = Spoolverb()
# nfees + 1: nfees to refill plus one fee for the refill transaction itself
inputs = self.select_inputs(from_address, nfees + 1, ntokens, min_confirmations=min_confirmations)
outputs = [{'address': to_address, 'value': self.token}] * ntokens
outputs += [{'address': to_address, 'value': self.fee}] * nfees
outputs += [{'script': self._t._op_return_hex(verb.fuel), 'value': 0}]
unsigned_tx = self._t.build_transaction(inputs, outputs)
signed_tx = self._t.sign_transaction(unsigned_tx, password, path=path)
txid = self._t.push(signed_tx)
return txid
def simple_spool_transaction(self, from_address, to, op_return, min_confirmations=6):
"""
Utililty function to create the spool transactions. Selects the inputs,
encodes the op_return and constructs the transaction.
Args:
from_address (str): Address originating the transaction
to (str): list of addresses to receive tokens (file_hash, file_hash_metadata, ...)
op_return (str): String representation of the spoolverb, as returned by the properties of Spoolverb
min_confirmations (int): Number of confirmations when chosing the inputs of the transaction. Defaults to 6
Returns:
str: unsigned transaction
"""
# list of addresses to send
ntokens = len(to)
nfees = old_div(self._t.estimate_fee(ntokens, 2), self.fee)
inputs = self.select_inputs(from_address, nfees, ntokens, min_confirmations=min_confirmations)
# outputs
outputs = [{'address': to_address, 'value': self.token} for to_address in to]
outputs += [{'script': self._t._op_return_hex(op_return), 'value': 0}]
# build transaction
unsigned_tx = self._t.build_transaction(inputs, outputs)
return unsigned_tx
def select_inputs(self, address, nfees, ntokens, min_confirmations=6):
"""
Selects the inputs for the spool transaction.
Args:
address (str): bitcoin address to select inputs for
nfees (int): number of fees
ntokens (int): number of tokens
min_confirmations (Optional[int]): minimum number of required
confirmations; defaults to 6
"""
unspents = self._t.get(address, min_confirmations=min_confirmations)['unspents']
unspents = [u for u in unspents if u not in self._spents.queue]
if len(unspents) == 0:
raise Exception("No spendable outputs found")
fees = [u for u in unspents if u['amount'] == self.fee][:nfees]
tokens = [u for u in unspents if u['amount'] == self.token][:ntokens]
if len(fees) != nfees or len(tokens) != ntokens:
raise SpoolFundsError("Not enough outputs to spend. Refill your wallet")
if self._spents.qsize() > self.SPENTS_QUEUE_MAXSIZE - (nfees + ntokens):
[self._spents.get() for i in range(self._spents.qsize() + nfees + ntokens - self.SPENTS_QUEUE_MAXSIZE)]
[self._spents.put(fee) for fee in fees]
[self._spents.put(token) for token in tokens]
return fees + tokens
|
ascribe/pyspool | spool/spool.py | Spool.refill_main_wallet | python | def refill_main_wallet(self, from_address, to_address, nfees, ntokens, password, min_confirmations=6, sync=False):
path, from_address = from_address
unsigned_tx = self._t.simple_transaction(from_address,
[(to_address, self.fee)] * nfees + [(to_address, self.token)] * ntokens,
min_confirmations=min_confirmations)
signed_tx = self._t.sign_transaction(unsigned_tx, password)
txid = self._t.push(signed_tx)
return txid | Refill the Federation wallet with tokens and fees. This keeps the federation wallet clean.
Dealing with exact values simplifies the transactions. No need to calculate change. Easier to keep track of the
unspents and prevent double spends that would result in transactions being rejected by the bitcoin network.
Args:
from_address (Tuple[str]): Refill wallet address. Refills the federation wallet with tokens and fees
to_address (str): Federation wallet address
nfees (int): Number of fees to transfer. Each fee is 10000 satoshi. Used to pay for the transactions
ntokens (int): Number of tokens to transfer. Each token is 600 satoshi. Used to register hashes in the blockchain
password (str): Password for the Refill wallet. Used to sign the transaction
min_confirmations (int): Number of confirmations when chosing the inputs of the transaction. Defaults to 6
sync (bool): Perform the transaction in synchronous mode, the call to the function will block until there is at
least on confirmation on the blockchain. Defaults to False
Returns:
str: transaction id | train | https://github.com/ascribe/pyspool/blob/f8b10df1e7d2ea7950dde433c1cb6d5225112f4f/spool/spool.py#L390-L417 | null | class Spool(object):
"""
Class that contains all Spool methods.
In the SPOOL implementation there is no notion of users only addresses.
All addresses come from BIP32 HD wallets. This makes it easier to manage all the keys
since we can retrieve everything we need from a master secret (namely the private key
to sign the transactions).
Since we are dealing with HD wallets we expect all ``from_address`` to be a
tuple of ``(path, address)`` so that we can retrieve the private key for
that particular leaf address. If we want to use the root address we can
just pass an empty string to the first element of the tuple e.g.
``('', address)``. For instance when using the federation wallet address we
have no need to create leaf addresses.
A file is represented by two hashes:
- ``file_hash``: is the hash of the digital file
- ``file_hash_metadata``: is the hash of the digital file + metadata
The hash is passed to the methods has a tuple: ``(file_hash, file_hash_metadata)``
Attributes:
FEE (int): transaction fee
TOKEN (int): token
SPENTS_QUEUE_MAXSIZE (int): spent outputs queue maximum size
"""
FEE = 30000
TOKEN = 3000
SPENTS_QUEUE_MAXSIZE = 50
def __init__(self, testnet=False, service='blockr', username='',
password='', host='', port='', fee=None, token=None):
"""
Args:
testnet (bool): Whether to use the mainnet or testnet.
Defaults to the mainnet (:const:`False`).
service (str): Bitcoin communication interface: ``'blockr'``,
``'daemon'``, or ``'regtest'``. ``'blockr'`` refers to the
public api, whereas ``'daemon'`` and ``'regtest'`` refer
to the jsonrpc inteface. Defaults to ``'blockr'``.
username (str): username for jsonrpc communications
password (str): password for jsonrpc communications
hostname (str): hostname of the bitcoin node when using jsonrpc
port (str): port number of the bitcoin node when using jsonrpc
fee (int): transaction fee
token (int): token
"""
self.testnet = testnet
self._netcode = 'XTN' if testnet else 'BTC'
self._t = Transactions(service=service, testnet=testnet, username=username,
password=password, host=host, port=port)
# simple cache for spent outputs. Useful so that rapid firing transactions don't use the same outputs
self._spents = Queue(maxsize=self.SPENTS_QUEUE_MAXSIZE)
self.fee = fee or self.FEE
self.token = token or self.TOKEN
@dispatch
def register_piece(self, from_address, to_address, hash, password, min_confirmations=6, sync=False, ownership=True):
"""
Register a piece
Args:
from_address (Tuple[str]): Federation address. All register transactions
originate from the the Federation wallet
to_address (str): Address registering the edition
hash (Tuple[str]): Hash of the piece. (file_hash, file_hash_metadata)
password (str): Federation wallet password. For signing the transaction
edition_num (int): The number of the edition to register. User
edition_num=0 to register the master edition
min_confirmations (int): Override the number of confirmations when
chosing the inputs of the transaction. Defaults to 6
sync (bool): Perform the transaction in synchronous mode, the call to the
function will block until there is at least on confirmation on
the blockchain. Defaults to False
ownership (bool): Check ownsership in the blockchain before pushing the
transaction. Defaults to True
Returns:
str: transaction id
"""
file_hash, file_hash_metadata = hash
path, from_address = from_address
verb = Spoolverb()
unsigned_tx = self.simple_spool_transaction(from_address,
[file_hash, file_hash_metadata, to_address],
op_return=verb.piece,
min_confirmations=min_confirmations)
signed_tx = self._t.sign_transaction(unsigned_tx, password)
txid = self._t.push(signed_tx)
return txid
@dispatch
def register(self, from_address, to_address, hash, password, edition_num, min_confirmations=6, sync=False, ownership=True):
"""
Register an edition or master edition of a piece
Args:
from_address (Tuple[str]): Federation address. All register transactions originate from the the Federation wallet
to_address (str): Address registering the edition
hash (Tuple[str])): Hash of the piece. Tuple (file_hash, file_hash_metadata)
password (str): Federation wallet password. For signing the transaction
edition_num (int): The number of the edition to register. User edition_num=0 to register the master edition
min_confirmations (int): Override the number of confirmations when chosing the inputs of the transaction. Defaults to 6
sync (bool): Perform the transaction in synchronous mode, the call to the function will block until there is at
least on confirmation on the blockchain. Defaults to False
ownership (bool): Check ownsership in the blockchain before pushing the transaction. Defaults to True
Returns:
str: transaction id
"""
file_hash, file_hash_metadata = hash
path, from_address = from_address
verb = Spoolverb(edition_num=edition_num)
unsigned_tx = self.simple_spool_transaction(from_address,
[file_hash, file_hash_metadata, to_address],
op_return=verb.register,
min_confirmations=min_confirmations)
signed_tx = self._t.sign_transaction(unsigned_tx, password)
txid = self._t.push(signed_tx)
return txid
@dispatch
def consigned_registration(self, from_address, to_address, hash, password, min_confirmations=6, sync=False, ownership=True):
"""
Register an edition or master edition of a piece consigned to ``from_address``
Args:
from_address (Tuple[str])): Federation address. All register transactions originate from the the Federation wallet
to_address (str): Address registering the edition
hash (Tuple[str]): Hash of the piece. Tuple (file_hash, file_hash_metadata)
password (str): Federation wallet password. For signing the transaction
min_confirmations (int): Override the number of confirmations when chosing the inputs of the transaction. Defaults to 6
sync (bool): Perform the transaction in synchronous mode, the call to the function will block until there is at
least on confirmation on the blockchain. Defaults to False
ownership (bool): Check ownsership in the blockchain before pushing the transaction. Defaults to True
Returns:
str: transaction id
"""
file_hash, file_hash_metadata = hash
path, from_address = from_address
verb = Spoolverb()
unsigned_tx = self.simple_spool_transaction(from_address,
[file_hash, file_hash_metadata, to_address],
op_return=verb.consigned_registration,
min_confirmations=min_confirmations)
signed_tx = self._t.sign_transaction(unsigned_tx, password)
txid = self._t.push(signed_tx)
return txid
@dispatch
def editions(self, from_address, to_address, hash, password, num_editions, min_confirmations=6, sync=False, ownership=True):
"""
Register the number of editions of a piece
Args:
from_address (Tuple[str]): Federation address. All register transactions originate from the the Federation wallet
to_address (str): Address registering the number of editions
hash (Tuple[str]): Hash of the piece. Tuple (file_hash, file_hash_metadata)
password (str): Federation wallet password. For signing the transaction
num_editions (int): Number of editions of the piece
min_confirmations (int): Number of confirmations when chosing the inputs of the transaction. Defaults to 6
sync (bool): Perform the transaction in synchronous mode, the call to the function will block until there is at
least on confirmation on the blockchain. Defaults to False
ownership (bool): Check ownsership in the blockchain before pushing the transaction. Defaults to True
Returns:
str: transaction id
"""
file_hash, file_hash_metadata = hash
path, from_address = from_address
verb = Spoolverb(num_editions=num_editions)
unsigned_tx = self.simple_spool_transaction(from_address,
[file_hash, file_hash_metadata, to_address],
op_return=verb.editions,
min_confirmations=min_confirmations)
signed_tx = self._t.sign_transaction(unsigned_tx, password)
txid = self._t.push(signed_tx)
return txid
@dispatch
def transfer(self, from_address, to_address, hash, password, edition_num, min_confirmations=6, sync=False, ownership=True):
"""
Transfer a piece between addresses
Args:
from_address (Tuple[str]): Address currently owning the edition
to_address: Address to receive the edition
hash (Tuple[str]): Hash of the piece. Tuple (file_hash, file_hash_metadata)
password (str): Password for the wallet currently owning the edition. For signing the transaction
edition_num (int): the number of the edition to transfer
min_confirmations (int): Number of confirmations when chosing the inputs of the transaction. Defaults to 6
sync (bool): Perform the transaction in synchronous mode, the call to the function will block until there is at
least on confirmation on the blockchain. Defaults to False
ownership (bool): Check ownsership in the blockchain before pushing the transaction. Defaults to True
Returns:
str: transaction id
"""
path, from_address = from_address
file_hash, file_hash_metadata = hash
verb = Spoolverb(edition_num=edition_num)
unsigned_tx = self.simple_spool_transaction(from_address,
[file_hash, to_address],
op_return=verb.transfer,
min_confirmations=min_confirmations)
signed_tx = self._t.sign_transaction(unsigned_tx, password, path=path)
txid = self._t.push(signed_tx)
return txid
@dispatch
def consign(self, from_address, to_address, hash, password, edition_num, min_confirmations=6, sync=False, ownership=True):
"""
Consign a piece to an address
Args:
from_address (Tuple[str]): Address currently owning the edition
to_address (str): Address to where the piece will be consigned to
hash (Tuple[str]): Hash of the piece. Tuple (file_hash, file_hash_metadata)
password (str): Password for the wallet currently owning the edition. For signing the transaction
edition_num (int): the number of the edition to consign
min_confirmations (int): Number of confirmations when chosing the inputs of the transaction. Defaults to 6
sync (bool): Perform the transaction in synchronous mode, the call to the function will block until there is at
least on confirmation on the blockchain. Defaults to False
ownership (bool): Check ownsership in the blockchain before pushing the transaction. Defaults to True
Returns:
str: transaction id
"""
path, from_address = from_address
file_hash, file_hash_metadata = hash
verb = Spoolverb(edition_num=edition_num)
unsigned_tx = self.simple_spool_transaction(from_address,
[file_hash, to_address],
op_return=verb.consign,
min_confirmations=min_confirmations)
signed_tx = self._t.sign_transaction(unsigned_tx, password, path=path)
txid = self._t.push(signed_tx)
return txid
@dispatch
def unconsign(self, from_address, to_address, hash, password, edition_num, min_confirmations=6, sync=False, ownership=True):
"""
Unconsign the edition
Args:
from_address (Tuple[str]): Address where the edition is currently consigned
to_address (str): Address that consigned the piece to from_address
hash (Tuple[str]): Hash of the piece. Tuple (file_hash, file_hash_metadata)
password (str): Password for the wallet currently holding the edition. For signing the transaction
edition_num (int): the number of the edition to unconsign
min_confirmations (int): Number of confirmations when chosing the inputs of the transaction. Defaults to 6
sync (bool): Perform the transaction in synchronous mode, the call to the function will block until there is at
least on confirmation on the blockchain. Defaults to False
ownership (bool): Check ownsership in the blockchain before pushing the transaction. Defaults to True
Returns:
str: transaction id
"""
# In an unconsignment the to_address needs to be the address that created the consign transaction
path, from_address = from_address
file_hash, file_hash_metadata = hash
verb = Spoolverb(edition_num=edition_num)
unsigned_tx = self.simple_spool_transaction(from_address,
[file_hash, to_address],
op_return=verb.unconsign,
min_confirmations=min_confirmations)
signed_tx = self._t.sign_transaction(unsigned_tx, password, path=path)
txid = self._t.push(signed_tx)
return txid
@dispatch
def loan(self, from_address, to_address, hash, password, edition_num, loan_start, loan_end, min_confirmations=6, sync=False, ownership=True):
"""
Loan the edition
Args:
from_address (Tuple[str]): Address currently holding the edition
to_address (str): Address to loan the edition to
hash (Tuple[str]): Hash of the piece. Tuple (file_hash, file_hash_metadata)
password (str): Password for the wallet currently holding the edition. For signing the transaction
edition_num (int): the number of the edition to loan
loan_start (str): Start date for the loan. In the form YYMMDD
loan_end (str): End date for the loan. In the form YYMMDD
min_confirmations (int): Number of confirmations when chosing the inputs of the transaction. Defaults to 6
sync (bool): Perform the transaction in synchronous mode, the call to the function will block until there is at
least on confirmation on the blockchain. Defaults to False
ownership (bool): Check ownsership in the blockchain before pushing the transaction. Defaults to True
Returns:
str: transaction id
"""
path, from_address = from_address
file_hash, file_hash_metadata = hash
verb = Spoolverb(edition_num=edition_num, loan_start=loan_start, loan_end=loan_end)
unsigned_tx = self.simple_spool_transaction(from_address,
[file_hash, to_address],
op_return=verb.loan,
min_confirmations=min_confirmations)
signed_tx = self._t.sign_transaction(unsigned_tx, password, path=path)
txid = self._t.push(signed_tx)
return txid
@dispatch
def migrate(self, from_address, prev_address, new_address, hash, password, edition_num, min_confirmations=6, sync=False, ownership=True):
"""
Migrate an edition
Args:
from_address (Tuple[str]): Federation address. All register transactions originate from the the Federation wallet
to_address (str): Address registering the edition
hash (Tuple[str]): Hash of the piece. Tuple (file_hash, file_hash_metadata)
password (str): Federation wallet password. For signing the transaction
edition_num (int): The number of the edition to register. User edition_num=0 to register the master edition
min_confirmations (int): Override the number of confirmations when chosing the inputs of the transaction. Defaults to 6
sync (bool): Perform the transaction in synchronous mode, the call to the function will block until there is at
least on confirmation on the blockchain. Defaults to False
ownership (bool): Check ownsership in the blockchain before pushing the transaction. Defaults to True
Returns:
str: transaction id
"""
file_hash, file_hash_metadata = hash
path, from_address = from_address
verb = Spoolverb(edition_num=edition_num)
unsigned_tx = self.simple_spool_transaction(from_address,
[file_hash, prev_address, new_address],
op_return=verb.migrate,
min_confirmations=min_confirmations)
signed_tx = self._t.sign_transaction(unsigned_tx, password)
txid = self._t.push(signed_tx)
return txid
@dispatch
@dispatch
def refill(self, from_address, to_address, nfees, ntokens, password, min_confirmations=6, sync=False):
"""
Refill wallets with the necessary fuel to perform spool transactions
Args:
from_address (Tuple[str]): Federation wallet address. Fuels the wallets with tokens and fees. All transactions to wallets
holding a particular piece should come from the Federation wallet
to_address (str): Wallet address that needs to perform a spool transaction
nfees (int): Number of fees to transfer. Each fee is 10000 satoshi. Used to pay for the transactions
ntokens (int): Number of tokens to transfer. Each token is 600 satoshi. Used to register hashes in the blockchain
password (str): Password for the Federation wallet. Used to sign the transaction
min_confirmations (int): Number of confirmations when chosing the inputs of the transaction. Defaults to 6
sync (bool): Perform the transaction in synchronous mode, the call to the function will block until there is at
least on confirmation on the blockchain. Defaults to False
Returns:
str: transaction id
"""
path, from_address = from_address
verb = Spoolverb()
# nfees + 1: nfees to refill plus one fee for the refill transaction itself
inputs = self.select_inputs(from_address, nfees + 1, ntokens, min_confirmations=min_confirmations)
outputs = [{'address': to_address, 'value': self.token}] * ntokens
outputs += [{'address': to_address, 'value': self.fee}] * nfees
outputs += [{'script': self._t._op_return_hex(verb.fuel), 'value': 0}]
unsigned_tx = self._t.build_transaction(inputs, outputs)
signed_tx = self._t.sign_transaction(unsigned_tx, password, path=path)
txid = self._t.push(signed_tx)
return txid
def simple_spool_transaction(self, from_address, to, op_return, min_confirmations=6):
"""
Utililty function to create the spool transactions. Selects the inputs,
encodes the op_return and constructs the transaction.
Args:
from_address (str): Address originating the transaction
to (str): list of addresses to receive tokens (file_hash, file_hash_metadata, ...)
op_return (str): String representation of the spoolverb, as returned by the properties of Spoolverb
min_confirmations (int): Number of confirmations when chosing the inputs of the transaction. Defaults to 6
Returns:
str: unsigned transaction
"""
# list of addresses to send
ntokens = len(to)
nfees = old_div(self._t.estimate_fee(ntokens, 2), self.fee)
inputs = self.select_inputs(from_address, nfees, ntokens, min_confirmations=min_confirmations)
# outputs
outputs = [{'address': to_address, 'value': self.token} for to_address in to]
outputs += [{'script': self._t._op_return_hex(op_return), 'value': 0}]
# build transaction
unsigned_tx = self._t.build_transaction(inputs, outputs)
return unsigned_tx
def select_inputs(self, address, nfees, ntokens, min_confirmations=6):
"""
Selects the inputs for the spool transaction.
Args:
address (str): bitcoin address to select inputs for
nfees (int): number of fees
ntokens (int): number of tokens
min_confirmations (Optional[int]): minimum number of required
confirmations; defaults to 6
"""
unspents = self._t.get(address, min_confirmations=min_confirmations)['unspents']
unspents = [u for u in unspents if u not in self._spents.queue]
if len(unspents) == 0:
raise Exception("No spendable outputs found")
fees = [u for u in unspents if u['amount'] == self.fee][:nfees]
tokens = [u for u in unspents if u['amount'] == self.token][:ntokens]
if len(fees) != nfees or len(tokens) != ntokens:
raise SpoolFundsError("Not enough outputs to spend. Refill your wallet")
if self._spents.qsize() > self.SPENTS_QUEUE_MAXSIZE - (nfees + ntokens):
[self._spents.get() for i in range(self._spents.qsize() + nfees + ntokens - self.SPENTS_QUEUE_MAXSIZE)]
[self._spents.put(fee) for fee in fees]
[self._spents.put(token) for token in tokens]
return fees + tokens
|
ascribe/pyspool | spool/spool.py | Spool.refill | python | def refill(self, from_address, to_address, nfees, ntokens, password, min_confirmations=6, sync=False):
path, from_address = from_address
verb = Spoolverb()
# nfees + 1: nfees to refill plus one fee for the refill transaction itself
inputs = self.select_inputs(from_address, nfees + 1, ntokens, min_confirmations=min_confirmations)
outputs = [{'address': to_address, 'value': self.token}] * ntokens
outputs += [{'address': to_address, 'value': self.fee}] * nfees
outputs += [{'script': self._t._op_return_hex(verb.fuel), 'value': 0}]
unsigned_tx = self._t.build_transaction(inputs, outputs)
signed_tx = self._t.sign_transaction(unsigned_tx, password, path=path)
txid = self._t.push(signed_tx)
return txid | Refill wallets with the necessary fuel to perform spool transactions
Args:
from_address (Tuple[str]): Federation wallet address. Fuels the wallets with tokens and fees. All transactions to wallets
holding a particular piece should come from the Federation wallet
to_address (str): Wallet address that needs to perform a spool transaction
nfees (int): Number of fees to transfer. Each fee is 10000 satoshi. Used to pay for the transactions
ntokens (int): Number of tokens to transfer. Each token is 600 satoshi. Used to register hashes in the blockchain
password (str): Password for the Federation wallet. Used to sign the transaction
min_confirmations (int): Number of confirmations when chosing the inputs of the transaction. Defaults to 6
sync (bool): Perform the transaction in synchronous mode, the call to the function will block until there is at
least on confirmation on the blockchain. Defaults to False
Returns:
str: transaction id | train | https://github.com/ascribe/pyspool/blob/f8b10df1e7d2ea7950dde433c1cb6d5225112f4f/spool/spool.py#L420-L449 | [
"def select_inputs(self, address, nfees, ntokens, min_confirmations=6):\n \"\"\"\n Selects the inputs for the spool transaction.\n\n Args:\n address (str): bitcoin address to select inputs for\n nfees (int): number of fees\n ntokens (int): number of tokens\n min_confirmations (Optional[int]): minimum number of required\n confirmations; defaults to 6\n\n \"\"\"\n unspents = self._t.get(address, min_confirmations=min_confirmations)['unspents']\n unspents = [u for u in unspents if u not in self._spents.queue]\n if len(unspents) == 0:\n raise Exception(\"No spendable outputs found\")\n\n fees = [u for u in unspents if u['amount'] == self.fee][:nfees]\n tokens = [u for u in unspents if u['amount'] == self.token][:ntokens]\n if len(fees) != nfees or len(tokens) != ntokens:\n raise SpoolFundsError(\"Not enough outputs to spend. Refill your wallet\")\n if self._spents.qsize() > self.SPENTS_QUEUE_MAXSIZE - (nfees + ntokens):\n [self._spents.get() for i in range(self._spents.qsize() + nfees + ntokens - self.SPENTS_QUEUE_MAXSIZE)]\n [self._spents.put(fee) for fee in fees]\n [self._spents.put(token) for token in tokens]\n return fees + tokens\n"
] | class Spool(object):
"""
Class that contains all Spool methods.
In the SPOOL implementation there is no notion of users only addresses.
All addresses come from BIP32 HD wallets. This makes it easier to manage all the keys
since we can retrieve everything we need from a master secret (namely the private key
to sign the transactions).
Since we are dealing with HD wallets we expect all ``from_address`` to be a
tuple of ``(path, address)`` so that we can retrieve the private key for
that particular leaf address. If we want to use the root address we can
just pass an empty string to the first element of the tuple e.g.
``('', address)``. For instance when using the federation wallet address we
have no need to create leaf addresses.
A file is represented by two hashes:
- ``file_hash``: is the hash of the digital file
- ``file_hash_metadata``: is the hash of the digital file + metadata
The hash is passed to the methods has a tuple: ``(file_hash, file_hash_metadata)``
Attributes:
FEE (int): transaction fee
TOKEN (int): token
SPENTS_QUEUE_MAXSIZE (int): spent outputs queue maximum size
"""
FEE = 30000
TOKEN = 3000
SPENTS_QUEUE_MAXSIZE = 50
def __init__(self, testnet=False, service='blockr', username='',
password='', host='', port='', fee=None, token=None):
"""
Args:
testnet (bool): Whether to use the mainnet or testnet.
Defaults to the mainnet (:const:`False`).
service (str): Bitcoin communication interface: ``'blockr'``,
``'daemon'``, or ``'regtest'``. ``'blockr'`` refers to the
public api, whereas ``'daemon'`` and ``'regtest'`` refer
to the jsonrpc inteface. Defaults to ``'blockr'``.
username (str): username for jsonrpc communications
password (str): password for jsonrpc communications
hostname (str): hostname of the bitcoin node when using jsonrpc
port (str): port number of the bitcoin node when using jsonrpc
fee (int): transaction fee
token (int): token
"""
self.testnet = testnet
self._netcode = 'XTN' if testnet else 'BTC'
self._t = Transactions(service=service, testnet=testnet, username=username,
password=password, host=host, port=port)
# simple cache for spent outputs. Useful so that rapid firing transactions don't use the same outputs
self._spents = Queue(maxsize=self.SPENTS_QUEUE_MAXSIZE)
self.fee = fee or self.FEE
self.token = token or self.TOKEN
@dispatch
def register_piece(self, from_address, to_address, hash, password, min_confirmations=6, sync=False, ownership=True):
"""
Register a piece
Args:
from_address (Tuple[str]): Federation address. All register transactions
originate from the the Federation wallet
to_address (str): Address registering the edition
hash (Tuple[str]): Hash of the piece. (file_hash, file_hash_metadata)
password (str): Federation wallet password. For signing the transaction
edition_num (int): The number of the edition to register. User
edition_num=0 to register the master edition
min_confirmations (int): Override the number of confirmations when
chosing the inputs of the transaction. Defaults to 6
sync (bool): Perform the transaction in synchronous mode, the call to the
function will block until there is at least on confirmation on
the blockchain. Defaults to False
ownership (bool): Check ownsership in the blockchain before pushing the
transaction. Defaults to True
Returns:
str: transaction id
"""
file_hash, file_hash_metadata = hash
path, from_address = from_address
verb = Spoolverb()
unsigned_tx = self.simple_spool_transaction(from_address,
[file_hash, file_hash_metadata, to_address],
op_return=verb.piece,
min_confirmations=min_confirmations)
signed_tx = self._t.sign_transaction(unsigned_tx, password)
txid = self._t.push(signed_tx)
return txid
@dispatch
def register(self, from_address, to_address, hash, password, edition_num, min_confirmations=6, sync=False, ownership=True):
"""
Register an edition or master edition of a piece
Args:
from_address (Tuple[str]): Federation address. All register transactions originate from the the Federation wallet
to_address (str): Address registering the edition
hash (Tuple[str])): Hash of the piece. Tuple (file_hash, file_hash_metadata)
password (str): Federation wallet password. For signing the transaction
edition_num (int): The number of the edition to register. User edition_num=0 to register the master edition
min_confirmations (int): Override the number of confirmations when chosing the inputs of the transaction. Defaults to 6
sync (bool): Perform the transaction in synchronous mode, the call to the function will block until there is at
least on confirmation on the blockchain. Defaults to False
ownership (bool): Check ownsership in the blockchain before pushing the transaction. Defaults to True
Returns:
str: transaction id
"""
file_hash, file_hash_metadata = hash
path, from_address = from_address
verb = Spoolverb(edition_num=edition_num)
unsigned_tx = self.simple_spool_transaction(from_address,
[file_hash, file_hash_metadata, to_address],
op_return=verb.register,
min_confirmations=min_confirmations)
signed_tx = self._t.sign_transaction(unsigned_tx, password)
txid = self._t.push(signed_tx)
return txid
@dispatch
def consigned_registration(self, from_address, to_address, hash, password, min_confirmations=6, sync=False, ownership=True):
"""
Register an edition or master edition of a piece consigned to ``from_address``
Args:
from_address (Tuple[str])): Federation address. All register transactions originate from the the Federation wallet
to_address (str): Address registering the edition
hash (Tuple[str]): Hash of the piece. Tuple (file_hash, file_hash_metadata)
password (str): Federation wallet password. For signing the transaction
min_confirmations (int): Override the number of confirmations when chosing the inputs of the transaction. Defaults to 6
sync (bool): Perform the transaction in synchronous mode, the call to the function will block until there is at
least on confirmation on the blockchain. Defaults to False
ownership (bool): Check ownsership in the blockchain before pushing the transaction. Defaults to True
Returns:
str: transaction id
"""
file_hash, file_hash_metadata = hash
path, from_address = from_address
verb = Spoolverb()
unsigned_tx = self.simple_spool_transaction(from_address,
[file_hash, file_hash_metadata, to_address],
op_return=verb.consigned_registration,
min_confirmations=min_confirmations)
signed_tx = self._t.sign_transaction(unsigned_tx, password)
txid = self._t.push(signed_tx)
return txid
@dispatch
def editions(self, from_address, to_address, hash, password, num_editions, min_confirmations=6, sync=False, ownership=True):
"""
Register the number of editions of a piece
Args:
from_address (Tuple[str]): Federation address. All register transactions originate from the the Federation wallet
to_address (str): Address registering the number of editions
hash (Tuple[str]): Hash of the piece. Tuple (file_hash, file_hash_metadata)
password (str): Federation wallet password. For signing the transaction
num_editions (int): Number of editions of the piece
min_confirmations (int): Number of confirmations when chosing the inputs of the transaction. Defaults to 6
sync (bool): Perform the transaction in synchronous mode, the call to the function will block until there is at
least on confirmation on the blockchain. Defaults to False
ownership (bool): Check ownsership in the blockchain before pushing the transaction. Defaults to True
Returns:
str: transaction id
"""
file_hash, file_hash_metadata = hash
path, from_address = from_address
verb = Spoolverb(num_editions=num_editions)
unsigned_tx = self.simple_spool_transaction(from_address,
[file_hash, file_hash_metadata, to_address],
op_return=verb.editions,
min_confirmations=min_confirmations)
signed_tx = self._t.sign_transaction(unsigned_tx, password)
txid = self._t.push(signed_tx)
return txid
@dispatch
def transfer(self, from_address, to_address, hash, password, edition_num, min_confirmations=6, sync=False, ownership=True):
"""
Transfer a piece between addresses
Args:
from_address (Tuple[str]): Address currently owning the edition
to_address: Address to receive the edition
hash (Tuple[str]): Hash of the piece. Tuple (file_hash, file_hash_metadata)
password (str): Password for the wallet currently owning the edition. For signing the transaction
edition_num (int): the number of the edition to transfer
min_confirmations (int): Number of confirmations when chosing the inputs of the transaction. Defaults to 6
sync (bool): Perform the transaction in synchronous mode, the call to the function will block until there is at
least on confirmation on the blockchain. Defaults to False
ownership (bool): Check ownsership in the blockchain before pushing the transaction. Defaults to True
Returns:
str: transaction id
"""
path, from_address = from_address
file_hash, file_hash_metadata = hash
verb = Spoolverb(edition_num=edition_num)
unsigned_tx = self.simple_spool_transaction(from_address,
[file_hash, to_address],
op_return=verb.transfer,
min_confirmations=min_confirmations)
signed_tx = self._t.sign_transaction(unsigned_tx, password, path=path)
txid = self._t.push(signed_tx)
return txid
@dispatch
def consign(self, from_address, to_address, hash, password, edition_num, min_confirmations=6, sync=False, ownership=True):
"""
Consign a piece to an address
Args:
from_address (Tuple[str]): Address currently owning the edition
to_address (str): Address to where the piece will be consigned to
hash (Tuple[str]): Hash of the piece. Tuple (file_hash, file_hash_metadata)
password (str): Password for the wallet currently owning the edition. For signing the transaction
edition_num (int): the number of the edition to consign
min_confirmations (int): Number of confirmations when chosing the inputs of the transaction. Defaults to 6
sync (bool): Perform the transaction in synchronous mode, the call to the function will block until there is at
least on confirmation on the blockchain. Defaults to False
ownership (bool): Check ownsership in the blockchain before pushing the transaction. Defaults to True
Returns:
str: transaction id
"""
path, from_address = from_address
file_hash, file_hash_metadata = hash
verb = Spoolverb(edition_num=edition_num)
unsigned_tx = self.simple_spool_transaction(from_address,
[file_hash, to_address],
op_return=verb.consign,
min_confirmations=min_confirmations)
signed_tx = self._t.sign_transaction(unsigned_tx, password, path=path)
txid = self._t.push(signed_tx)
return txid
@dispatch
def unconsign(self, from_address, to_address, hash, password, edition_num, min_confirmations=6, sync=False, ownership=True):
"""
Unconsign the edition
Args:
from_address (Tuple[str]): Address where the edition is currently consigned
to_address (str): Address that consigned the piece to from_address
hash (Tuple[str]): Hash of the piece. Tuple (file_hash, file_hash_metadata)
password (str): Password for the wallet currently holding the edition. For signing the transaction
edition_num (int): the number of the edition to unconsign
min_confirmations (int): Number of confirmations when chosing the inputs of the transaction. Defaults to 6
sync (bool): Perform the transaction in synchronous mode, the call to the function will block until there is at
least on confirmation on the blockchain. Defaults to False
ownership (bool): Check ownsership in the blockchain before pushing the transaction. Defaults to True
Returns:
str: transaction id
"""
# In an unconsignment the to_address needs to be the address that created the consign transaction
path, from_address = from_address
file_hash, file_hash_metadata = hash
verb = Spoolverb(edition_num=edition_num)
unsigned_tx = self.simple_spool_transaction(from_address,
[file_hash, to_address],
op_return=verb.unconsign,
min_confirmations=min_confirmations)
signed_tx = self._t.sign_transaction(unsigned_tx, password, path=path)
txid = self._t.push(signed_tx)
return txid
@dispatch
def loan(self, from_address, to_address, hash, password, edition_num, loan_start, loan_end, min_confirmations=6, sync=False, ownership=True):
"""
Loan the edition
Args:
from_address (Tuple[str]): Address currently holding the edition
to_address (str): Address to loan the edition to
hash (Tuple[str]): Hash of the piece. Tuple (file_hash, file_hash_metadata)
password (str): Password for the wallet currently holding the edition. For signing the transaction
edition_num (int): the number of the edition to loan
loan_start (str): Start date for the loan. In the form YYMMDD
loan_end (str): End date for the loan. In the form YYMMDD
min_confirmations (int): Number of confirmations when chosing the inputs of the transaction. Defaults to 6
sync (bool): Perform the transaction in synchronous mode, the call to the function will block until there is at
least on confirmation on the blockchain. Defaults to False
ownership (bool): Check ownsership in the blockchain before pushing the transaction. Defaults to True
Returns:
str: transaction id
"""
path, from_address = from_address
file_hash, file_hash_metadata = hash
verb = Spoolverb(edition_num=edition_num, loan_start=loan_start, loan_end=loan_end)
unsigned_tx = self.simple_spool_transaction(from_address,
[file_hash, to_address],
op_return=verb.loan,
min_confirmations=min_confirmations)
signed_tx = self._t.sign_transaction(unsigned_tx, password, path=path)
txid = self._t.push(signed_tx)
return txid
@dispatch
def migrate(self, from_address, prev_address, new_address, hash, password, edition_num, min_confirmations=6, sync=False, ownership=True):
"""
Migrate an edition
Args:
from_address (Tuple[str]): Federation address. All register transactions originate from the the Federation wallet
to_address (str): Address registering the edition
hash (Tuple[str]): Hash of the piece. Tuple (file_hash, file_hash_metadata)
password (str): Federation wallet password. For signing the transaction
edition_num (int): The number of the edition to register. User edition_num=0 to register the master edition
min_confirmations (int): Override the number of confirmations when chosing the inputs of the transaction. Defaults to 6
sync (bool): Perform the transaction in synchronous mode, the call to the function will block until there is at
least on confirmation on the blockchain. Defaults to False
ownership (bool): Check ownsership in the blockchain before pushing the transaction. Defaults to True
Returns:
str: transaction id
"""
file_hash, file_hash_metadata = hash
path, from_address = from_address
verb = Spoolverb(edition_num=edition_num)
unsigned_tx = self.simple_spool_transaction(from_address,
[file_hash, prev_address, new_address],
op_return=verb.migrate,
min_confirmations=min_confirmations)
signed_tx = self._t.sign_transaction(unsigned_tx, password)
txid = self._t.push(signed_tx)
return txid
@dispatch
def refill_main_wallet(self, from_address, to_address, nfees, ntokens, password, min_confirmations=6, sync=False):
"""
Refill the Federation wallet with tokens and fees. This keeps the federation wallet clean.
Dealing with exact values simplifies the transactions. No need to calculate change. Easier to keep track of the
unspents and prevent double spends that would result in transactions being rejected by the bitcoin network.
Args:
from_address (Tuple[str]): Refill wallet address. Refills the federation wallet with tokens and fees
to_address (str): Federation wallet address
nfees (int): Number of fees to transfer. Each fee is 10000 satoshi. Used to pay for the transactions
ntokens (int): Number of tokens to transfer. Each token is 600 satoshi. Used to register hashes in the blockchain
password (str): Password for the Refill wallet. Used to sign the transaction
min_confirmations (int): Number of confirmations when chosing the inputs of the transaction. Defaults to 6
sync (bool): Perform the transaction in synchronous mode, the call to the function will block until there is at
least on confirmation on the blockchain. Defaults to False
Returns:
str: transaction id
"""
path, from_address = from_address
unsigned_tx = self._t.simple_transaction(from_address,
[(to_address, self.fee)] * nfees + [(to_address, self.token)] * ntokens,
min_confirmations=min_confirmations)
signed_tx = self._t.sign_transaction(unsigned_tx, password)
txid = self._t.push(signed_tx)
return txid
@dispatch
def simple_spool_transaction(self, from_address, to, op_return, min_confirmations=6):
"""
Utililty function to create the spool transactions. Selects the inputs,
encodes the op_return and constructs the transaction.
Args:
from_address (str): Address originating the transaction
to (str): list of addresses to receive tokens (file_hash, file_hash_metadata, ...)
op_return (str): String representation of the spoolverb, as returned by the properties of Spoolverb
min_confirmations (int): Number of confirmations when chosing the inputs of the transaction. Defaults to 6
Returns:
str: unsigned transaction
"""
# list of addresses to send
ntokens = len(to)
nfees = old_div(self._t.estimate_fee(ntokens, 2), self.fee)
inputs = self.select_inputs(from_address, nfees, ntokens, min_confirmations=min_confirmations)
# outputs
outputs = [{'address': to_address, 'value': self.token} for to_address in to]
outputs += [{'script': self._t._op_return_hex(op_return), 'value': 0}]
# build transaction
unsigned_tx = self._t.build_transaction(inputs, outputs)
return unsigned_tx
def select_inputs(self, address, nfees, ntokens, min_confirmations=6):
"""
Selects the inputs for the spool transaction.
Args:
address (str): bitcoin address to select inputs for
nfees (int): number of fees
ntokens (int): number of tokens
min_confirmations (Optional[int]): minimum number of required
confirmations; defaults to 6
"""
unspents = self._t.get(address, min_confirmations=min_confirmations)['unspents']
unspents = [u for u in unspents if u not in self._spents.queue]
if len(unspents) == 0:
raise Exception("No spendable outputs found")
fees = [u for u in unspents if u['amount'] == self.fee][:nfees]
tokens = [u for u in unspents if u['amount'] == self.token][:ntokens]
if len(fees) != nfees or len(tokens) != ntokens:
raise SpoolFundsError("Not enough outputs to spend. Refill your wallet")
if self._spents.qsize() > self.SPENTS_QUEUE_MAXSIZE - (nfees + ntokens):
[self._spents.get() for i in range(self._spents.qsize() + nfees + ntokens - self.SPENTS_QUEUE_MAXSIZE)]
[self._spents.put(fee) for fee in fees]
[self._spents.put(token) for token in tokens]
return fees + tokens
|
ascribe/pyspool | spool/spool.py | Spool.simple_spool_transaction | python | def simple_spool_transaction(self, from_address, to, op_return, min_confirmations=6):
# list of addresses to send
ntokens = len(to)
nfees = old_div(self._t.estimate_fee(ntokens, 2), self.fee)
inputs = self.select_inputs(from_address, nfees, ntokens, min_confirmations=min_confirmations)
# outputs
outputs = [{'address': to_address, 'value': self.token} for to_address in to]
outputs += [{'script': self._t._op_return_hex(op_return), 'value': 0}]
# build transaction
unsigned_tx = self._t.build_transaction(inputs, outputs)
return unsigned_tx | Utililty function to create the spool transactions. Selects the inputs,
encodes the op_return and constructs the transaction.
Args:
from_address (str): Address originating the transaction
to (str): list of addresses to receive tokens (file_hash, file_hash_metadata, ...)
op_return (str): String representation of the spoolverb, as returned by the properties of Spoolverb
min_confirmations (int): Number of confirmations when chosing the inputs of the transaction. Defaults to 6
Returns:
str: unsigned transaction | train | https://github.com/ascribe/pyspool/blob/f8b10df1e7d2ea7950dde433c1cb6d5225112f4f/spool/spool.py#L451-L475 | [
"def select_inputs(self, address, nfees, ntokens, min_confirmations=6):\n \"\"\"\n Selects the inputs for the spool transaction.\n\n Args:\n address (str): bitcoin address to select inputs for\n nfees (int): number of fees\n ntokens (int): number of tokens\n min_confirmations (Optional[int]): minimum number of required\n confirmations; defaults to 6\n\n \"\"\"\n unspents = self._t.get(address, min_confirmations=min_confirmations)['unspents']\n unspents = [u for u in unspents if u not in self._spents.queue]\n if len(unspents) == 0:\n raise Exception(\"No spendable outputs found\")\n\n fees = [u for u in unspents if u['amount'] == self.fee][:nfees]\n tokens = [u for u in unspents if u['amount'] == self.token][:ntokens]\n if len(fees) != nfees or len(tokens) != ntokens:\n raise SpoolFundsError(\"Not enough outputs to spend. Refill your wallet\")\n if self._spents.qsize() > self.SPENTS_QUEUE_MAXSIZE - (nfees + ntokens):\n [self._spents.get() for i in range(self._spents.qsize() + nfees + ntokens - self.SPENTS_QUEUE_MAXSIZE)]\n [self._spents.put(fee) for fee in fees]\n [self._spents.put(token) for token in tokens]\n return fees + tokens\n"
] | class Spool(object):
"""
Class that contains all Spool methods.
In the SPOOL implementation there is no notion of users only addresses.
All addresses come from BIP32 HD wallets. This makes it easier to manage all the keys
since we can retrieve everything we need from a master secret (namely the private key
to sign the transactions).
Since we are dealing with HD wallets we expect all ``from_address`` to be a
tuple of ``(path, address)`` so that we can retrieve the private key for
that particular leaf address. If we want to use the root address we can
just pass an empty string to the first element of the tuple e.g.
``('', address)``. For instance when using the federation wallet address we
have no need to create leaf addresses.
A file is represented by two hashes:
- ``file_hash``: is the hash of the digital file
- ``file_hash_metadata``: is the hash of the digital file + metadata
The hash is passed to the methods has a tuple: ``(file_hash, file_hash_metadata)``
Attributes:
FEE (int): transaction fee
TOKEN (int): token
SPENTS_QUEUE_MAXSIZE (int): spent outputs queue maximum size
"""
FEE = 30000
TOKEN = 3000
SPENTS_QUEUE_MAXSIZE = 50
def __init__(self, testnet=False, service='blockr', username='',
password='', host='', port='', fee=None, token=None):
"""
Args:
testnet (bool): Whether to use the mainnet or testnet.
Defaults to the mainnet (:const:`False`).
service (str): Bitcoin communication interface: ``'blockr'``,
``'daemon'``, or ``'regtest'``. ``'blockr'`` refers to the
public api, whereas ``'daemon'`` and ``'regtest'`` refer
to the jsonrpc inteface. Defaults to ``'blockr'``.
username (str): username for jsonrpc communications
password (str): password for jsonrpc communications
hostname (str): hostname of the bitcoin node when using jsonrpc
port (str): port number of the bitcoin node when using jsonrpc
fee (int): transaction fee
token (int): token
"""
self.testnet = testnet
self._netcode = 'XTN' if testnet else 'BTC'
self._t = Transactions(service=service, testnet=testnet, username=username,
password=password, host=host, port=port)
# simple cache for spent outputs. Useful so that rapid firing transactions don't use the same outputs
self._spents = Queue(maxsize=self.SPENTS_QUEUE_MAXSIZE)
self.fee = fee or self.FEE
self.token = token or self.TOKEN
@dispatch
def register_piece(self, from_address, to_address, hash, password, min_confirmations=6, sync=False, ownership=True):
"""
Register a piece
Args:
from_address (Tuple[str]): Federation address. All register transactions
originate from the the Federation wallet
to_address (str): Address registering the edition
hash (Tuple[str]): Hash of the piece. (file_hash, file_hash_metadata)
password (str): Federation wallet password. For signing the transaction
edition_num (int): The number of the edition to register. User
edition_num=0 to register the master edition
min_confirmations (int): Override the number of confirmations when
chosing the inputs of the transaction. Defaults to 6
sync (bool): Perform the transaction in synchronous mode, the call to the
function will block until there is at least on confirmation on
the blockchain. Defaults to False
ownership (bool): Check ownsership in the blockchain before pushing the
transaction. Defaults to True
Returns:
str: transaction id
"""
file_hash, file_hash_metadata = hash
path, from_address = from_address
verb = Spoolverb()
unsigned_tx = self.simple_spool_transaction(from_address,
[file_hash, file_hash_metadata, to_address],
op_return=verb.piece,
min_confirmations=min_confirmations)
signed_tx = self._t.sign_transaction(unsigned_tx, password)
txid = self._t.push(signed_tx)
return txid
@dispatch
def register(self, from_address, to_address, hash, password, edition_num, min_confirmations=6, sync=False, ownership=True):
"""
Register an edition or master edition of a piece
Args:
from_address (Tuple[str]): Federation address. All register transactions originate from the the Federation wallet
to_address (str): Address registering the edition
hash (Tuple[str])): Hash of the piece. Tuple (file_hash, file_hash_metadata)
password (str): Federation wallet password. For signing the transaction
edition_num (int): The number of the edition to register. User edition_num=0 to register the master edition
min_confirmations (int): Override the number of confirmations when chosing the inputs of the transaction. Defaults to 6
sync (bool): Perform the transaction in synchronous mode, the call to the function will block until there is at
least on confirmation on the blockchain. Defaults to False
ownership (bool): Check ownsership in the blockchain before pushing the transaction. Defaults to True
Returns:
str: transaction id
"""
file_hash, file_hash_metadata = hash
path, from_address = from_address
verb = Spoolverb(edition_num=edition_num)
unsigned_tx = self.simple_spool_transaction(from_address,
[file_hash, file_hash_metadata, to_address],
op_return=verb.register,
min_confirmations=min_confirmations)
signed_tx = self._t.sign_transaction(unsigned_tx, password)
txid = self._t.push(signed_tx)
return txid
@dispatch
def consigned_registration(self, from_address, to_address, hash, password, min_confirmations=6, sync=False, ownership=True):
"""
Register an edition or master edition of a piece consigned to ``from_address``
Args:
from_address (Tuple[str])): Federation address. All register transactions originate from the the Federation wallet
to_address (str): Address registering the edition
hash (Tuple[str]): Hash of the piece. Tuple (file_hash, file_hash_metadata)
password (str): Federation wallet password. For signing the transaction
min_confirmations (int): Override the number of confirmations when chosing the inputs of the transaction. Defaults to 6
sync (bool): Perform the transaction in synchronous mode, the call to the function will block until there is at
least on confirmation on the blockchain. Defaults to False
ownership (bool): Check ownsership in the blockchain before pushing the transaction. Defaults to True
Returns:
str: transaction id
"""
file_hash, file_hash_metadata = hash
path, from_address = from_address
verb = Spoolverb()
unsigned_tx = self.simple_spool_transaction(from_address,
[file_hash, file_hash_metadata, to_address],
op_return=verb.consigned_registration,
min_confirmations=min_confirmations)
signed_tx = self._t.sign_transaction(unsigned_tx, password)
txid = self._t.push(signed_tx)
return txid
@dispatch
def editions(self, from_address, to_address, hash, password, num_editions, min_confirmations=6, sync=False, ownership=True):
"""
Register the number of editions of a piece
Args:
from_address (Tuple[str]): Federation address. All register transactions originate from the the Federation wallet
to_address (str): Address registering the number of editions
hash (Tuple[str]): Hash of the piece. Tuple (file_hash, file_hash_metadata)
password (str): Federation wallet password. For signing the transaction
num_editions (int): Number of editions of the piece
min_confirmations (int): Number of confirmations when chosing the inputs of the transaction. Defaults to 6
sync (bool): Perform the transaction in synchronous mode, the call to the function will block until there is at
least on confirmation on the blockchain. Defaults to False
ownership (bool): Check ownsership in the blockchain before pushing the transaction. Defaults to True
Returns:
str: transaction id
"""
file_hash, file_hash_metadata = hash
path, from_address = from_address
verb = Spoolverb(num_editions=num_editions)
unsigned_tx = self.simple_spool_transaction(from_address,
[file_hash, file_hash_metadata, to_address],
op_return=verb.editions,
min_confirmations=min_confirmations)
signed_tx = self._t.sign_transaction(unsigned_tx, password)
txid = self._t.push(signed_tx)
return txid
@dispatch
def transfer(self, from_address, to_address, hash, password, edition_num, min_confirmations=6, sync=False, ownership=True):
"""
Transfer a piece between addresses
Args:
from_address (Tuple[str]): Address currently owning the edition
to_address: Address to receive the edition
hash (Tuple[str]): Hash of the piece. Tuple (file_hash, file_hash_metadata)
password (str): Password for the wallet currently owning the edition. For signing the transaction
edition_num (int): the number of the edition to transfer
min_confirmations (int): Number of confirmations when chosing the inputs of the transaction. Defaults to 6
sync (bool): Perform the transaction in synchronous mode, the call to the function will block until there is at
least on confirmation on the blockchain. Defaults to False
ownership (bool): Check ownsership in the blockchain before pushing the transaction. Defaults to True
Returns:
str: transaction id
"""
path, from_address = from_address
file_hash, file_hash_metadata = hash
verb = Spoolverb(edition_num=edition_num)
unsigned_tx = self.simple_spool_transaction(from_address,
[file_hash, to_address],
op_return=verb.transfer,
min_confirmations=min_confirmations)
signed_tx = self._t.sign_transaction(unsigned_tx, password, path=path)
txid = self._t.push(signed_tx)
return txid
@dispatch
def consign(self, from_address, to_address, hash, password, edition_num, min_confirmations=6, sync=False, ownership=True):
"""
Consign a piece to an address
Args:
from_address (Tuple[str]): Address currently owning the edition
to_address (str): Address to where the piece will be consigned to
hash (Tuple[str]): Hash of the piece. Tuple (file_hash, file_hash_metadata)
password (str): Password for the wallet currently owning the edition. For signing the transaction
edition_num (int): the number of the edition to consign
min_confirmations (int): Number of confirmations when chosing the inputs of the transaction. Defaults to 6
sync (bool): Perform the transaction in synchronous mode, the call to the function will block until there is at
least on confirmation on the blockchain. Defaults to False
ownership (bool): Check ownsership in the blockchain before pushing the transaction. Defaults to True
Returns:
str: transaction id
"""
path, from_address = from_address
file_hash, file_hash_metadata = hash
verb = Spoolverb(edition_num=edition_num)
unsigned_tx = self.simple_spool_transaction(from_address,
[file_hash, to_address],
op_return=verb.consign,
min_confirmations=min_confirmations)
signed_tx = self._t.sign_transaction(unsigned_tx, password, path=path)
txid = self._t.push(signed_tx)
return txid
@dispatch
def unconsign(self, from_address, to_address, hash, password, edition_num, min_confirmations=6, sync=False, ownership=True):
"""
Unconsign the edition
Args:
from_address (Tuple[str]): Address where the edition is currently consigned
to_address (str): Address that consigned the piece to from_address
hash (Tuple[str]): Hash of the piece. Tuple (file_hash, file_hash_metadata)
password (str): Password for the wallet currently holding the edition. For signing the transaction
edition_num (int): the number of the edition to unconsign
min_confirmations (int): Number of confirmations when chosing the inputs of the transaction. Defaults to 6
sync (bool): Perform the transaction in synchronous mode, the call to the function will block until there is at
least on confirmation on the blockchain. Defaults to False
ownership (bool): Check ownsership in the blockchain before pushing the transaction. Defaults to True
Returns:
str: transaction id
"""
# In an unconsignment the to_address needs to be the address that created the consign transaction
path, from_address = from_address
file_hash, file_hash_metadata = hash
verb = Spoolverb(edition_num=edition_num)
unsigned_tx = self.simple_spool_transaction(from_address,
[file_hash, to_address],
op_return=verb.unconsign,
min_confirmations=min_confirmations)
signed_tx = self._t.sign_transaction(unsigned_tx, password, path=path)
txid = self._t.push(signed_tx)
return txid
@dispatch
def loan(self, from_address, to_address, hash, password, edition_num, loan_start, loan_end, min_confirmations=6, sync=False, ownership=True):
"""
Loan the edition
Args:
from_address (Tuple[str]): Address currently holding the edition
to_address (str): Address to loan the edition to
hash (Tuple[str]): Hash of the piece. Tuple (file_hash, file_hash_metadata)
password (str): Password for the wallet currently holding the edition. For signing the transaction
edition_num (int): the number of the edition to loan
loan_start (str): Start date for the loan. In the form YYMMDD
loan_end (str): End date for the loan. In the form YYMMDD
min_confirmations (int): Number of confirmations when chosing the inputs of the transaction. Defaults to 6
sync (bool): Perform the transaction in synchronous mode, the call to the function will block until there is at
least on confirmation on the blockchain. Defaults to False
ownership (bool): Check ownsership in the blockchain before pushing the transaction. Defaults to True
Returns:
str: transaction id
"""
path, from_address = from_address
file_hash, file_hash_metadata = hash
verb = Spoolverb(edition_num=edition_num, loan_start=loan_start, loan_end=loan_end)
unsigned_tx = self.simple_spool_transaction(from_address,
[file_hash, to_address],
op_return=verb.loan,
min_confirmations=min_confirmations)
signed_tx = self._t.sign_transaction(unsigned_tx, password, path=path)
txid = self._t.push(signed_tx)
return txid
@dispatch
def migrate(self, from_address, prev_address, new_address, hash, password, edition_num, min_confirmations=6, sync=False, ownership=True):
"""
Migrate an edition
Args:
from_address (Tuple[str]): Federation address. All register transactions originate from the the Federation wallet
to_address (str): Address registering the edition
hash (Tuple[str]): Hash of the piece. Tuple (file_hash, file_hash_metadata)
password (str): Federation wallet password. For signing the transaction
edition_num (int): The number of the edition to register. User edition_num=0 to register the master edition
min_confirmations (int): Override the number of confirmations when chosing the inputs of the transaction. Defaults to 6
sync (bool): Perform the transaction in synchronous mode, the call to the function will block until there is at
least on confirmation on the blockchain. Defaults to False
ownership (bool): Check ownsership in the blockchain before pushing the transaction. Defaults to True
Returns:
str: transaction id
"""
file_hash, file_hash_metadata = hash
path, from_address = from_address
verb = Spoolverb(edition_num=edition_num)
unsigned_tx = self.simple_spool_transaction(from_address,
[file_hash, prev_address, new_address],
op_return=verb.migrate,
min_confirmations=min_confirmations)
signed_tx = self._t.sign_transaction(unsigned_tx, password)
txid = self._t.push(signed_tx)
return txid
@dispatch
def refill_main_wallet(self, from_address, to_address, nfees, ntokens, password, min_confirmations=6, sync=False):
"""
Refill the Federation wallet with tokens and fees. This keeps the federation wallet clean.
Dealing with exact values simplifies the transactions. No need to calculate change. Easier to keep track of the
unspents and prevent double spends that would result in transactions being rejected by the bitcoin network.
Args:
from_address (Tuple[str]): Refill wallet address. Refills the federation wallet with tokens and fees
to_address (str): Federation wallet address
nfees (int): Number of fees to transfer. Each fee is 10000 satoshi. Used to pay for the transactions
ntokens (int): Number of tokens to transfer. Each token is 600 satoshi. Used to register hashes in the blockchain
password (str): Password for the Refill wallet. Used to sign the transaction
min_confirmations (int): Number of confirmations when chosing the inputs of the transaction. Defaults to 6
sync (bool): Perform the transaction in synchronous mode, the call to the function will block until there is at
least on confirmation on the blockchain. Defaults to False
Returns:
str: transaction id
"""
path, from_address = from_address
unsigned_tx = self._t.simple_transaction(from_address,
[(to_address, self.fee)] * nfees + [(to_address, self.token)] * ntokens,
min_confirmations=min_confirmations)
signed_tx = self._t.sign_transaction(unsigned_tx, password)
txid = self._t.push(signed_tx)
return txid
@dispatch
def refill(self, from_address, to_address, nfees, ntokens, password, min_confirmations=6, sync=False):
"""
Refill wallets with the necessary fuel to perform spool transactions
Args:
from_address (Tuple[str]): Federation wallet address. Fuels the wallets with tokens and fees. All transactions to wallets
holding a particular piece should come from the Federation wallet
to_address (str): Wallet address that needs to perform a spool transaction
nfees (int): Number of fees to transfer. Each fee is 10000 satoshi. Used to pay for the transactions
ntokens (int): Number of tokens to transfer. Each token is 600 satoshi. Used to register hashes in the blockchain
password (str): Password for the Federation wallet. Used to sign the transaction
min_confirmations (int): Number of confirmations when chosing the inputs of the transaction. Defaults to 6
sync (bool): Perform the transaction in synchronous mode, the call to the function will block until there is at
least on confirmation on the blockchain. Defaults to False
Returns:
str: transaction id
"""
path, from_address = from_address
verb = Spoolverb()
# nfees + 1: nfees to refill plus one fee for the refill transaction itself
inputs = self.select_inputs(from_address, nfees + 1, ntokens, min_confirmations=min_confirmations)
outputs = [{'address': to_address, 'value': self.token}] * ntokens
outputs += [{'address': to_address, 'value': self.fee}] * nfees
outputs += [{'script': self._t._op_return_hex(verb.fuel), 'value': 0}]
unsigned_tx = self._t.build_transaction(inputs, outputs)
signed_tx = self._t.sign_transaction(unsigned_tx, password, path=path)
txid = self._t.push(signed_tx)
return txid
def select_inputs(self, address, nfees, ntokens, min_confirmations=6):
"""
Selects the inputs for the spool transaction.
Args:
address (str): bitcoin address to select inputs for
nfees (int): number of fees
ntokens (int): number of tokens
min_confirmations (Optional[int]): minimum number of required
confirmations; defaults to 6
"""
unspents = self._t.get(address, min_confirmations=min_confirmations)['unspents']
unspents = [u for u in unspents if u not in self._spents.queue]
if len(unspents) == 0:
raise Exception("No spendable outputs found")
fees = [u for u in unspents if u['amount'] == self.fee][:nfees]
tokens = [u for u in unspents if u['amount'] == self.token][:ntokens]
if len(fees) != nfees or len(tokens) != ntokens:
raise SpoolFundsError("Not enough outputs to spend. Refill your wallet")
if self._spents.qsize() > self.SPENTS_QUEUE_MAXSIZE - (nfees + ntokens):
[self._spents.get() for i in range(self._spents.qsize() + nfees + ntokens - self.SPENTS_QUEUE_MAXSIZE)]
[self._spents.put(fee) for fee in fees]
[self._spents.put(token) for token in tokens]
return fees + tokens
|
ascribe/pyspool | spool/spool.py | Spool.select_inputs | python | def select_inputs(self, address, nfees, ntokens, min_confirmations=6):
unspents = self._t.get(address, min_confirmations=min_confirmations)['unspents']
unspents = [u for u in unspents if u not in self._spents.queue]
if len(unspents) == 0:
raise Exception("No spendable outputs found")
fees = [u for u in unspents if u['amount'] == self.fee][:nfees]
tokens = [u for u in unspents if u['amount'] == self.token][:ntokens]
if len(fees) != nfees or len(tokens) != ntokens:
raise SpoolFundsError("Not enough outputs to spend. Refill your wallet")
if self._spents.qsize() > self.SPENTS_QUEUE_MAXSIZE - (nfees + ntokens):
[self._spents.get() for i in range(self._spents.qsize() + nfees + ntokens - self.SPENTS_QUEUE_MAXSIZE)]
[self._spents.put(fee) for fee in fees]
[self._spents.put(token) for token in tokens]
return fees + tokens | Selects the inputs for the spool transaction.
Args:
address (str): bitcoin address to select inputs for
nfees (int): number of fees
ntokens (int): number of tokens
min_confirmations (Optional[int]): minimum number of required
confirmations; defaults to 6 | train | https://github.com/ascribe/pyspool/blob/f8b10df1e7d2ea7950dde433c1cb6d5225112f4f/spool/spool.py#L477-L502 | null | class Spool(object):
"""
Class that contains all Spool methods.
In the SPOOL implementation there is no notion of users only addresses.
All addresses come from BIP32 HD wallets. This makes it easier to manage all the keys
since we can retrieve everything we need from a master secret (namely the private key
to sign the transactions).
Since we are dealing with HD wallets we expect all ``from_address`` to be a
tuple of ``(path, address)`` so that we can retrieve the private key for
that particular leaf address. If we want to use the root address we can
just pass an empty string to the first element of the tuple e.g.
``('', address)``. For instance when using the federation wallet address we
have no need to create leaf addresses.
A file is represented by two hashes:
- ``file_hash``: is the hash of the digital file
- ``file_hash_metadata``: is the hash of the digital file + metadata
The hash is passed to the methods has a tuple: ``(file_hash, file_hash_metadata)``
Attributes:
FEE (int): transaction fee
TOKEN (int): token
SPENTS_QUEUE_MAXSIZE (int): spent outputs queue maximum size
"""
FEE = 30000
TOKEN = 3000
SPENTS_QUEUE_MAXSIZE = 50
def __init__(self, testnet=False, service='blockr', username='',
password='', host='', port='', fee=None, token=None):
"""
Args:
testnet (bool): Whether to use the mainnet or testnet.
Defaults to the mainnet (:const:`False`).
service (str): Bitcoin communication interface: ``'blockr'``,
``'daemon'``, or ``'regtest'``. ``'blockr'`` refers to the
public api, whereas ``'daemon'`` and ``'regtest'`` refer
to the jsonrpc inteface. Defaults to ``'blockr'``.
username (str): username for jsonrpc communications
password (str): password for jsonrpc communications
hostname (str): hostname of the bitcoin node when using jsonrpc
port (str): port number of the bitcoin node when using jsonrpc
fee (int): transaction fee
token (int): token
"""
self.testnet = testnet
self._netcode = 'XTN' if testnet else 'BTC'
self._t = Transactions(service=service, testnet=testnet, username=username,
password=password, host=host, port=port)
# simple cache for spent outputs. Useful so that rapid firing transactions don't use the same outputs
self._spents = Queue(maxsize=self.SPENTS_QUEUE_MAXSIZE)
self.fee = fee or self.FEE
self.token = token or self.TOKEN
@dispatch
def register_piece(self, from_address, to_address, hash, password, min_confirmations=6, sync=False, ownership=True):
"""
Register a piece
Args:
from_address (Tuple[str]): Federation address. All register transactions
originate from the the Federation wallet
to_address (str): Address registering the edition
hash (Tuple[str]): Hash of the piece. (file_hash, file_hash_metadata)
password (str): Federation wallet password. For signing the transaction
edition_num (int): The number of the edition to register. User
edition_num=0 to register the master edition
min_confirmations (int): Override the number of confirmations when
chosing the inputs of the transaction. Defaults to 6
sync (bool): Perform the transaction in synchronous mode, the call to the
function will block until there is at least on confirmation on
the blockchain. Defaults to False
ownership (bool): Check ownsership in the blockchain before pushing the
transaction. Defaults to True
Returns:
str: transaction id
"""
file_hash, file_hash_metadata = hash
path, from_address = from_address
verb = Spoolverb()
unsigned_tx = self.simple_spool_transaction(from_address,
[file_hash, file_hash_metadata, to_address],
op_return=verb.piece,
min_confirmations=min_confirmations)
signed_tx = self._t.sign_transaction(unsigned_tx, password)
txid = self._t.push(signed_tx)
return txid
@dispatch
def register(self, from_address, to_address, hash, password, edition_num, min_confirmations=6, sync=False, ownership=True):
"""
Register an edition or master edition of a piece
Args:
from_address (Tuple[str]): Federation address. All register transactions originate from the the Federation wallet
to_address (str): Address registering the edition
hash (Tuple[str])): Hash of the piece. Tuple (file_hash, file_hash_metadata)
password (str): Federation wallet password. For signing the transaction
edition_num (int): The number of the edition to register. User edition_num=0 to register the master edition
min_confirmations (int): Override the number of confirmations when chosing the inputs of the transaction. Defaults to 6
sync (bool): Perform the transaction in synchronous mode, the call to the function will block until there is at
least on confirmation on the blockchain. Defaults to False
ownership (bool): Check ownsership in the blockchain before pushing the transaction. Defaults to True
Returns:
str: transaction id
"""
file_hash, file_hash_metadata = hash
path, from_address = from_address
verb = Spoolverb(edition_num=edition_num)
unsigned_tx = self.simple_spool_transaction(from_address,
[file_hash, file_hash_metadata, to_address],
op_return=verb.register,
min_confirmations=min_confirmations)
signed_tx = self._t.sign_transaction(unsigned_tx, password)
txid = self._t.push(signed_tx)
return txid
@dispatch
def consigned_registration(self, from_address, to_address, hash, password, min_confirmations=6, sync=False, ownership=True):
"""
Register an edition or master edition of a piece consigned to ``from_address``
Args:
from_address (Tuple[str])): Federation address. All register transactions originate from the the Federation wallet
to_address (str): Address registering the edition
hash (Tuple[str]): Hash of the piece. Tuple (file_hash, file_hash_metadata)
password (str): Federation wallet password. For signing the transaction
min_confirmations (int): Override the number of confirmations when chosing the inputs of the transaction. Defaults to 6
sync (bool): Perform the transaction in synchronous mode, the call to the function will block until there is at
least on confirmation on the blockchain. Defaults to False
ownership (bool): Check ownsership in the blockchain before pushing the transaction. Defaults to True
Returns:
str: transaction id
"""
file_hash, file_hash_metadata = hash
path, from_address = from_address
verb = Spoolverb()
unsigned_tx = self.simple_spool_transaction(from_address,
[file_hash, file_hash_metadata, to_address],
op_return=verb.consigned_registration,
min_confirmations=min_confirmations)
signed_tx = self._t.sign_transaction(unsigned_tx, password)
txid = self._t.push(signed_tx)
return txid
@dispatch
def editions(self, from_address, to_address, hash, password, num_editions, min_confirmations=6, sync=False, ownership=True):
"""
Register the number of editions of a piece
Args:
from_address (Tuple[str]): Federation address. All register transactions originate from the the Federation wallet
to_address (str): Address registering the number of editions
hash (Tuple[str]): Hash of the piece. Tuple (file_hash, file_hash_metadata)
password (str): Federation wallet password. For signing the transaction
num_editions (int): Number of editions of the piece
min_confirmations (int): Number of confirmations when chosing the inputs of the transaction. Defaults to 6
sync (bool): Perform the transaction in synchronous mode, the call to the function will block until there is at
least on confirmation on the blockchain. Defaults to False
ownership (bool): Check ownsership in the blockchain before pushing the transaction. Defaults to True
Returns:
str: transaction id
"""
file_hash, file_hash_metadata = hash
path, from_address = from_address
verb = Spoolverb(num_editions=num_editions)
unsigned_tx = self.simple_spool_transaction(from_address,
[file_hash, file_hash_metadata, to_address],
op_return=verb.editions,
min_confirmations=min_confirmations)
signed_tx = self._t.sign_transaction(unsigned_tx, password)
txid = self._t.push(signed_tx)
return txid
@dispatch
def transfer(self, from_address, to_address, hash, password, edition_num, min_confirmations=6, sync=False, ownership=True):
"""
Transfer a piece between addresses
Args:
from_address (Tuple[str]): Address currently owning the edition
to_address: Address to receive the edition
hash (Tuple[str]): Hash of the piece. Tuple (file_hash, file_hash_metadata)
password (str): Password for the wallet currently owning the edition. For signing the transaction
edition_num (int): the number of the edition to transfer
min_confirmations (int): Number of confirmations when chosing the inputs of the transaction. Defaults to 6
sync (bool): Perform the transaction in synchronous mode, the call to the function will block until there is at
least on confirmation on the blockchain. Defaults to False
ownership (bool): Check ownsership in the blockchain before pushing the transaction. Defaults to True
Returns:
str: transaction id
"""
path, from_address = from_address
file_hash, file_hash_metadata = hash
verb = Spoolverb(edition_num=edition_num)
unsigned_tx = self.simple_spool_transaction(from_address,
[file_hash, to_address],
op_return=verb.transfer,
min_confirmations=min_confirmations)
signed_tx = self._t.sign_transaction(unsigned_tx, password, path=path)
txid = self._t.push(signed_tx)
return txid
@dispatch
def consign(self, from_address, to_address, hash, password, edition_num, min_confirmations=6, sync=False, ownership=True):
"""
Consign a piece to an address
Args:
from_address (Tuple[str]): Address currently owning the edition
to_address (str): Address to where the piece will be consigned to
hash (Tuple[str]): Hash of the piece. Tuple (file_hash, file_hash_metadata)
password (str): Password for the wallet currently owning the edition. For signing the transaction
edition_num (int): the number of the edition to consign
min_confirmations (int): Number of confirmations when chosing the inputs of the transaction. Defaults to 6
sync (bool): Perform the transaction in synchronous mode, the call to the function will block until there is at
least on confirmation on the blockchain. Defaults to False
ownership (bool): Check ownsership in the blockchain before pushing the transaction. Defaults to True
Returns:
str: transaction id
"""
path, from_address = from_address
file_hash, file_hash_metadata = hash
verb = Spoolverb(edition_num=edition_num)
unsigned_tx = self.simple_spool_transaction(from_address,
[file_hash, to_address],
op_return=verb.consign,
min_confirmations=min_confirmations)
signed_tx = self._t.sign_transaction(unsigned_tx, password, path=path)
txid = self._t.push(signed_tx)
return txid
@dispatch
def unconsign(self, from_address, to_address, hash, password, edition_num, min_confirmations=6, sync=False, ownership=True):
"""
Unconsign the edition
Args:
from_address (Tuple[str]): Address where the edition is currently consigned
to_address (str): Address that consigned the piece to from_address
hash (Tuple[str]): Hash of the piece. Tuple (file_hash, file_hash_metadata)
password (str): Password for the wallet currently holding the edition. For signing the transaction
edition_num (int): the number of the edition to unconsign
min_confirmations (int): Number of confirmations when chosing the inputs of the transaction. Defaults to 6
sync (bool): Perform the transaction in synchronous mode, the call to the function will block until there is at
least on confirmation on the blockchain. Defaults to False
ownership (bool): Check ownsership in the blockchain before pushing the transaction. Defaults to True
Returns:
str: transaction id
"""
# In an unconsignment the to_address needs to be the address that created the consign transaction
path, from_address = from_address
file_hash, file_hash_metadata = hash
verb = Spoolverb(edition_num=edition_num)
unsigned_tx = self.simple_spool_transaction(from_address,
[file_hash, to_address],
op_return=verb.unconsign,
min_confirmations=min_confirmations)
signed_tx = self._t.sign_transaction(unsigned_tx, password, path=path)
txid = self._t.push(signed_tx)
return txid
@dispatch
def loan(self, from_address, to_address, hash, password, edition_num, loan_start, loan_end, min_confirmations=6, sync=False, ownership=True):
"""
Loan the edition
Args:
from_address (Tuple[str]): Address currently holding the edition
to_address (str): Address to loan the edition to
hash (Tuple[str]): Hash of the piece. Tuple (file_hash, file_hash_metadata)
password (str): Password for the wallet currently holding the edition. For signing the transaction
edition_num (int): the number of the edition to loan
loan_start (str): Start date for the loan. In the form YYMMDD
loan_end (str): End date for the loan. In the form YYMMDD
min_confirmations (int): Number of confirmations when chosing the inputs of the transaction. Defaults to 6
sync (bool): Perform the transaction in synchronous mode, the call to the function will block until there is at
least on confirmation on the blockchain. Defaults to False
ownership (bool): Check ownsership in the blockchain before pushing the transaction. Defaults to True
Returns:
str: transaction id
"""
path, from_address = from_address
file_hash, file_hash_metadata = hash
verb = Spoolverb(edition_num=edition_num, loan_start=loan_start, loan_end=loan_end)
unsigned_tx = self.simple_spool_transaction(from_address,
[file_hash, to_address],
op_return=verb.loan,
min_confirmations=min_confirmations)
signed_tx = self._t.sign_transaction(unsigned_tx, password, path=path)
txid = self._t.push(signed_tx)
return txid
@dispatch
def migrate(self, from_address, prev_address, new_address, hash, password, edition_num, min_confirmations=6, sync=False, ownership=True):
"""
Migrate an edition
Args:
from_address (Tuple[str]): Federation address. All register transactions originate from the the Federation wallet
to_address (str): Address registering the edition
hash (Tuple[str]): Hash of the piece. Tuple (file_hash, file_hash_metadata)
password (str): Federation wallet password. For signing the transaction
edition_num (int): The number of the edition to register. User edition_num=0 to register the master edition
min_confirmations (int): Override the number of confirmations when chosing the inputs of the transaction. Defaults to 6
sync (bool): Perform the transaction in synchronous mode, the call to the function will block until there is at
least on confirmation on the blockchain. Defaults to False
ownership (bool): Check ownsership in the blockchain before pushing the transaction. Defaults to True
Returns:
str: transaction id
"""
file_hash, file_hash_metadata = hash
path, from_address = from_address
verb = Spoolverb(edition_num=edition_num)
unsigned_tx = self.simple_spool_transaction(from_address,
[file_hash, prev_address, new_address],
op_return=verb.migrate,
min_confirmations=min_confirmations)
signed_tx = self._t.sign_transaction(unsigned_tx, password)
txid = self._t.push(signed_tx)
return txid
@dispatch
def refill_main_wallet(self, from_address, to_address, nfees, ntokens, password, min_confirmations=6, sync=False):
"""
Refill the Federation wallet with tokens and fees. This keeps the federation wallet clean.
Dealing with exact values simplifies the transactions. No need to calculate change. Easier to keep track of the
unspents and prevent double spends that would result in transactions being rejected by the bitcoin network.
Args:
from_address (Tuple[str]): Refill wallet address. Refills the federation wallet with tokens and fees
to_address (str): Federation wallet address
nfees (int): Number of fees to transfer. Each fee is 10000 satoshi. Used to pay for the transactions
ntokens (int): Number of tokens to transfer. Each token is 600 satoshi. Used to register hashes in the blockchain
password (str): Password for the Refill wallet. Used to sign the transaction
min_confirmations (int): Number of confirmations when chosing the inputs of the transaction. Defaults to 6
sync (bool): Perform the transaction in synchronous mode, the call to the function will block until there is at
least on confirmation on the blockchain. Defaults to False
Returns:
str: transaction id
"""
path, from_address = from_address
unsigned_tx = self._t.simple_transaction(from_address,
[(to_address, self.fee)] * nfees + [(to_address, self.token)] * ntokens,
min_confirmations=min_confirmations)
signed_tx = self._t.sign_transaction(unsigned_tx, password)
txid = self._t.push(signed_tx)
return txid
@dispatch
def refill(self, from_address, to_address, nfees, ntokens, password, min_confirmations=6, sync=False):
"""
Refill wallets with the necessary fuel to perform spool transactions
Args:
from_address (Tuple[str]): Federation wallet address. Fuels the wallets with tokens and fees. All transactions to wallets
holding a particular piece should come from the Federation wallet
to_address (str): Wallet address that needs to perform a spool transaction
nfees (int): Number of fees to transfer. Each fee is 10000 satoshi. Used to pay for the transactions
ntokens (int): Number of tokens to transfer. Each token is 600 satoshi. Used to register hashes in the blockchain
password (str): Password for the Federation wallet. Used to sign the transaction
min_confirmations (int): Number of confirmations when chosing the inputs of the transaction. Defaults to 6
sync (bool): Perform the transaction in synchronous mode, the call to the function will block until there is at
least on confirmation on the blockchain. Defaults to False
Returns:
str: transaction id
"""
path, from_address = from_address
verb = Spoolverb()
# nfees + 1: nfees to refill plus one fee for the refill transaction itself
inputs = self.select_inputs(from_address, nfees + 1, ntokens, min_confirmations=min_confirmations)
outputs = [{'address': to_address, 'value': self.token}] * ntokens
outputs += [{'address': to_address, 'value': self.fee}] * nfees
outputs += [{'script': self._t._op_return_hex(verb.fuel), 'value': 0}]
unsigned_tx = self._t.build_transaction(inputs, outputs)
signed_tx = self._t.sign_transaction(unsigned_tx, password, path=path)
txid = self._t.push(signed_tx)
return txid
def simple_spool_transaction(self, from_address, to, op_return, min_confirmations=6):
"""
Utililty function to create the spool transactions. Selects the inputs,
encodes the op_return and constructs the transaction.
Args:
from_address (str): Address originating the transaction
to (str): list of addresses to receive tokens (file_hash, file_hash_metadata, ...)
op_return (str): String representation of the spoolverb, as returned by the properties of Spoolverb
min_confirmations (int): Number of confirmations when chosing the inputs of the transaction. Defaults to 6
Returns:
str: unsigned transaction
"""
# list of addresses to send
ntokens = len(to)
nfees = old_div(self._t.estimate_fee(ntokens, 2), self.fee)
inputs = self.select_inputs(from_address, nfees, ntokens, min_confirmations=min_confirmations)
# outputs
outputs = [{'address': to_address, 'value': self.token} for to_address in to]
outputs += [{'script': self._t._op_return_hex(op_return), 'value': 0}]
# build transaction
unsigned_tx = self._t.build_transaction(inputs, outputs)
return unsigned_tx
|
ascribe/pyspool | spool/spoolex.py | BlockchainSpider.history | python | def history(self, hash):
txs = self._t.get(hash, max_transactions=10000)['transactions']
tree = defaultdict(list)
number_editions = 0
for tx in txs:
_tx = self._t.get(tx['txid'])
txid = _tx['txid']
verb_str = BlockchainSpider.check_script(_tx['vouts'])
verb = Spoolverb.from_verb(verb_str)
from_address, to_address, piece_address = BlockchainSpider._get_addresses(_tx)
timestamp_utc = _tx['time']
action = verb.action
edition_number = 0
if action != 'EDITIONS':
edition_number = verb.edition_number
else:
number_editions = verb.num_editions
tree[edition_number].append({'txid': txid,
'verb': verb_str,
'from_address': from_address,
'to_address': to_address,
'piece_address': piece_address,
'timestamp_utc': timestamp_utc,
'action': action,
'number_editions': number_editions,
'edition_number': edition_number})
# lets update the records with the number of editions of the piece since we do not know
# this information before the EDITIONS transaction
for edition, chain in tree.items():
[d.update({'number_editions': number_editions}) for d in chain]
return dict(tree) | Retrieve the ownership tree of all editions of a piece given the hash.
Args:
hash (str): Hash of the file to check. Can be created with the
:class:`File` class
Returns:
dict: Ownsership tree of all editions of a piece.
.. note:: For now we only support searching the blockchain by
the piece hash. | train | https://github.com/ascribe/pyspool/blob/f8b10df1e7d2ea7950dde433c1cb6d5225112f4f/spool/spoolex.py#L70-L118 | [
"def check_script(vouts):\n \"\"\"\n Looks into the vouts list of a transaction\n and returns the ``op_return`` if one exists.\n\n Args;\n vouts (list): List of outputs of a transaction.\n\n Returns:\n str: String representation of the ``op_return``.\n\n Raises:\n Exception: If no ``vout`` having a supported\n verb (:attr:`supported_actions`) is found.\n\n \"\"\"\n for vout in [v for v in vouts[::-1] if v['hex'].startswith('6a')]:\n verb = BlockchainSpider.decode_op_return(vout['hex'])\n action = Spoolverb.from_verb(verb).action\n if action in Spoolverb.supported_actions:\n return verb\n raise Exception(\"Invalid ascribe transaction\")\n",
"def _get_addresses(tx):\n \"\"\"\n Checks for the from, to, and piece address of a SPOOL transaction.\n\n Args:\n tx (dict): Transaction payload, as returned by\n :meth:`transactions.Transactions.get()`.\n\n .. note:: Formats as returned by JSON-RPC API\n ``decoderawtransaction`` have yet to be supported.\n\n Returns:\n Tuple([str]): Sender, receiver, and piece addresses.\n\n \"\"\"\n from_address = set([vin['address'] for vin in tx['vins']])\n if len(from_address) != 1:\n raise InvalidTransactionError(\"Transaction should have inputs \" \\\n \"from only one address {}\".format(from_address))\n\n # order vouts. discard the last vout since it's the op_return\n vouts = sorted(tx['vouts'], key=lambda d: d['n'])[:-1]\n piece_address = vouts[0]['address']\n to_address = vouts[-1]['address']\n from_address = from_address.pop()\n\n return from_address, to_address, piece_address\n",
"def from_verb(cls, verb):\n \"\"\"\n Constructs a :class:`Spoolverb` instance from the string\n representation of the given verb.\n\n Args:\n verb (str): representation of the verb e.g.:\n ``'ASCRIBESPOOL01LOAN12/150526150528'``. Can also be in\n binary format (:obj:`bytes`): ``b'ASCRIBESPOOL01PIECE'``.\n\n Returns:\n :class:`Spoolverb` instance.\n\n \"\"\"\n pattern = r'^(?P<meta>[A-Z]+)(?P<version>\\d+)(?P<action>[A-Z]+)(?P<arg1>\\d+)?(\\/(?P<arg2>\\d+))?$'\n try:\n verb = verb.decode()\n except AttributeError:\n pass\n match = re.match(pattern, verb)\n if not match:\n raise SpoolverbError('Invalid spoolverb: {}'.format(verb))\n\n data = match.groupdict()\n meta = data['meta']\n version = data['version']\n action = data['action']\n if action == 'EDITIONS':\n num_editions = data['arg1']\n return cls(meta=meta, version=version, action=action, num_editions=int(num_editions))\n elif action == 'LOAN':\n # TODO Review. Workaround for piece loans\n try:\n edition_num = int(data['arg1'])\n except TypeError:\n edition_num = 0\n loan_start = data['arg2'][:6]\n loan_end = data['arg2'][6:]\n return cls(meta=meta, version=version, action=action, edition_num=int(edition_num),\n loan_start=loan_start, loan_end=loan_end)\n elif action in ['FUEL', 'PIECE', 'CONSIGNEDREGISTRATION']:\n # no edition number for these verbs\n return cls(meta=meta, version=version, action=action)\n else:\n edition_num = data['arg1']\n return cls(meta=meta, version=version, action=action, edition_num=int(edition_num))\n"
] | class BlockchainSpider(object):
"""
Spool blockchain explorer. Retrieves from the blockchain
the chain of ownership of a hash created with the
`SPOOL <https://github.com/ascribe/spool>`_ protocol.
"""
def __init__(self, testnet=False, service='blockr', username='', password='', host='', port=''):
"""
Args:
testnet (bool): Whether to use the mainnet or testnet.
Defaults to the mainnet (:const:`False`).
service (str): Bitcoin communication interface: ``'blockr'``,
``'daemon'``, or ``'regtest'``. ``'blockr'`` refers to the
public api, whereas ``'daemon'`` and ``'regtest'`` refer
to the jsonrpc inteface. Defaults to ``'blockr'``.
username (str): username for jsonrpc communications
password (str): password for jsonrpc communications
hostname (str): hostname of the bitcoin node when using jsonrpc
port (str): port number of the bitcoin node when using jsonrpc
"""
self._t = Transactions(service=service, testnet=testnet, username=username,
password=password, host=host, port=port)
@staticmethod
def chain(tree, edition_number):
"""
Args:
tree (dict): Tree history of all editions of a piece.
edition_number (int): The edition number to check for.
In the case of a piece (master edition), an empty
string (``''``) or zero (``0``) can be passed.
Returns:
list: The chain of ownsership of a particular
edition of the piece ordered by time.
"""
# return the chain for an edition_number sorted by the timestamp
return sorted(tree.get(edition_number, []), key=lambda d: d['timestamp_utc'])
@staticmethod
def strip_loan(chain):
"""
Returns the chain without loan. This way we can
look at the last transaction to establish ownership.
Args:
chain (list): Chain for a particular edition.
Returns:
list: Chain with loan transactions striped
from the end of the chain.
"""
while chain[-1]['action'] == 'LOAN':
chain.pop()
return chain
@staticmethod
def pprint(tree):
"""
Utility function to pretty print the history tree of a piece.
Args:
tree (dict): History tree of a piece.
"""
p = PrettyPrinter(indent=2)
p.pprint(tree)
@staticmethod
def decode_op_return(op_return_hex):
"""
Decodes the given ``op_return`` hexadecimal
string representation into a string (:obj:`str`).
Args:
op_return_hex (str): Hexadecimal string
representation of the ``op_return``.
Returns:
str: String representation of the ``op_return``.
"""
return binascii.unhexlify(op_return_hex[4:])
@staticmethod
def check_script(vouts):
"""
Looks into the vouts list of a transaction
and returns the ``op_return`` if one exists.
Args;
vouts (list): List of outputs of a transaction.
Returns:
str: String representation of the ``op_return``.
Raises:
Exception: If no ``vout`` having a supported
verb (:attr:`supported_actions`) is found.
"""
for vout in [v for v in vouts[::-1] if v['hex'].startswith('6a')]:
verb = BlockchainSpider.decode_op_return(vout['hex'])
action = Spoolverb.from_verb(verb).action
if action in Spoolverb.supported_actions:
return verb
raise Exception("Invalid ascribe transaction")
@staticmethod
def _get_addresses(tx):
"""
Checks for the from, to, and piece address of a SPOOL transaction.
Args:
tx (dict): Transaction payload, as returned by
:meth:`transactions.Transactions.get()`.
.. note:: Formats as returned by JSON-RPC API
``decoderawtransaction`` have yet to be supported.
Returns:
Tuple([str]): Sender, receiver, and piece addresses.
"""
from_address = set([vin['address'] for vin in tx['vins']])
if len(from_address) != 1:
raise InvalidTransactionError("Transaction should have inputs " \
"from only one address {}".format(from_address))
# order vouts. discard the last vout since it's the op_return
vouts = sorted(tx['vouts'], key=lambda d: d['n'])[:-1]
piece_address = vouts[0]['address']
to_address = vouts[-1]['address']
from_address = from_address.pop()
return from_address, to_address, piece_address
@staticmethod
def _get_time_utc(time_utc_str):
"""
Convert a string representation of the time (as returned by
blockr.io api) into unix timestamp.
Args:
time_utc_str (str): String representation of the time, with the
format: `'%Y-%m-%dT%H:%M:%S %Z'`.
Returns:
int: Unix timestamp.
"""
dt = datetime.strptime(time_utc_str, TIME_FORMAT)
return int(calendar.timegm(dt.utctimetuple()))
|
ascribe/pyspool | spool/spoolex.py | BlockchainSpider.chain | python | def chain(tree, edition_number):
# return the chain for an edition_number sorted by the timestamp
return sorted(tree.get(edition_number, []), key=lambda d: d['timestamp_utc']) | Args:
tree (dict): Tree history of all editions of a piece.
edition_number (int): The edition number to check for.
In the case of a piece (master edition), an empty
string (``''``) or zero (``0``) can be passed.
Returns:
list: The chain of ownsership of a particular
edition of the piece ordered by time. | train | https://github.com/ascribe/pyspool/blob/f8b10df1e7d2ea7950dde433c1cb6d5225112f4f/spool/spoolex.py#L121-L135 | null | class BlockchainSpider(object):
"""
Spool blockchain explorer. Retrieves from the blockchain
the chain of ownership of a hash created with the
`SPOOL <https://github.com/ascribe/spool>`_ protocol.
"""
def __init__(self, testnet=False, service='blockr', username='', password='', host='', port=''):
"""
Args:
testnet (bool): Whether to use the mainnet or testnet.
Defaults to the mainnet (:const:`False`).
service (str): Bitcoin communication interface: ``'blockr'``,
``'daemon'``, or ``'regtest'``. ``'blockr'`` refers to the
public api, whereas ``'daemon'`` and ``'regtest'`` refer
to the jsonrpc inteface. Defaults to ``'blockr'``.
username (str): username for jsonrpc communications
password (str): password for jsonrpc communications
hostname (str): hostname of the bitcoin node when using jsonrpc
port (str): port number of the bitcoin node when using jsonrpc
"""
self._t = Transactions(service=service, testnet=testnet, username=username,
password=password, host=host, port=port)
def history(self, hash):
"""
Retrieve the ownership tree of all editions of a piece given the hash.
Args:
hash (str): Hash of the file to check. Can be created with the
:class:`File` class
Returns:
dict: Ownsership tree of all editions of a piece.
.. note:: For now we only support searching the blockchain by
the piece hash.
"""
txs = self._t.get(hash, max_transactions=10000)['transactions']
tree = defaultdict(list)
number_editions = 0
for tx in txs:
_tx = self._t.get(tx['txid'])
txid = _tx['txid']
verb_str = BlockchainSpider.check_script(_tx['vouts'])
verb = Spoolverb.from_verb(verb_str)
from_address, to_address, piece_address = BlockchainSpider._get_addresses(_tx)
timestamp_utc = _tx['time']
action = verb.action
edition_number = 0
if action != 'EDITIONS':
edition_number = verb.edition_number
else:
number_editions = verb.num_editions
tree[edition_number].append({'txid': txid,
'verb': verb_str,
'from_address': from_address,
'to_address': to_address,
'piece_address': piece_address,
'timestamp_utc': timestamp_utc,
'action': action,
'number_editions': number_editions,
'edition_number': edition_number})
# lets update the records with the number of editions of the piece since we do not know
# this information before the EDITIONS transaction
for edition, chain in tree.items():
[d.update({'number_editions': number_editions}) for d in chain]
return dict(tree)
@staticmethod
@staticmethod
def strip_loan(chain):
"""
Returns the chain without loan. This way we can
look at the last transaction to establish ownership.
Args:
chain (list): Chain for a particular edition.
Returns:
list: Chain with loan transactions striped
from the end of the chain.
"""
while chain[-1]['action'] == 'LOAN':
chain.pop()
return chain
@staticmethod
def pprint(tree):
"""
Utility function to pretty print the history tree of a piece.
Args:
tree (dict): History tree of a piece.
"""
p = PrettyPrinter(indent=2)
p.pprint(tree)
@staticmethod
def decode_op_return(op_return_hex):
"""
Decodes the given ``op_return`` hexadecimal
string representation into a string (:obj:`str`).
Args:
op_return_hex (str): Hexadecimal string
representation of the ``op_return``.
Returns:
str: String representation of the ``op_return``.
"""
return binascii.unhexlify(op_return_hex[4:])
@staticmethod
def check_script(vouts):
"""
Looks into the vouts list of a transaction
and returns the ``op_return`` if one exists.
Args;
vouts (list): List of outputs of a transaction.
Returns:
str: String representation of the ``op_return``.
Raises:
Exception: If no ``vout`` having a supported
verb (:attr:`supported_actions`) is found.
"""
for vout in [v for v in vouts[::-1] if v['hex'].startswith('6a')]:
verb = BlockchainSpider.decode_op_return(vout['hex'])
action = Spoolverb.from_verb(verb).action
if action in Spoolverb.supported_actions:
return verb
raise Exception("Invalid ascribe transaction")
@staticmethod
def _get_addresses(tx):
"""
Checks for the from, to, and piece address of a SPOOL transaction.
Args:
tx (dict): Transaction payload, as returned by
:meth:`transactions.Transactions.get()`.
.. note:: Formats as returned by JSON-RPC API
``decoderawtransaction`` have yet to be supported.
Returns:
Tuple([str]): Sender, receiver, and piece addresses.
"""
from_address = set([vin['address'] for vin in tx['vins']])
if len(from_address) != 1:
raise InvalidTransactionError("Transaction should have inputs " \
"from only one address {}".format(from_address))
# order vouts. discard the last vout since it's the op_return
vouts = sorted(tx['vouts'], key=lambda d: d['n'])[:-1]
piece_address = vouts[0]['address']
to_address = vouts[-1]['address']
from_address = from_address.pop()
return from_address, to_address, piece_address
@staticmethod
def _get_time_utc(time_utc_str):
"""
Convert a string representation of the time (as returned by
blockr.io api) into unix timestamp.
Args:
time_utc_str (str): String representation of the time, with the
format: `'%Y-%m-%dT%H:%M:%S %Z'`.
Returns:
int: Unix timestamp.
"""
dt = datetime.strptime(time_utc_str, TIME_FORMAT)
return int(calendar.timegm(dt.utctimetuple()))
|
ascribe/pyspool | spool/spoolex.py | BlockchainSpider.check_script | python | def check_script(vouts):
for vout in [v for v in vouts[::-1] if v['hex'].startswith('6a')]:
verb = BlockchainSpider.decode_op_return(vout['hex'])
action = Spoolverb.from_verb(verb).action
if action in Spoolverb.supported_actions:
return verb
raise Exception("Invalid ascribe transaction") | Looks into the vouts list of a transaction
and returns the ``op_return`` if one exists.
Args;
vouts (list): List of outputs of a transaction.
Returns:
str: String representation of the ``op_return``.
Raises:
Exception: If no ``vout`` having a supported
verb (:attr:`supported_actions`) is found. | train | https://github.com/ascribe/pyspool/blob/f8b10df1e7d2ea7950dde433c1cb6d5225112f4f/spool/spoolex.py#L185-L206 | [
"def decode_op_return(op_return_hex):\n \"\"\"\n Decodes the given ``op_return`` hexadecimal\n string representation into a string (:obj:`str`).\n\n Args:\n op_return_hex (str): Hexadecimal string\n representation of the ``op_return``.\n\n Returns:\n str: String representation of the ``op_return``.\n\n \"\"\"\n return binascii.unhexlify(op_return_hex[4:])\n",
"def from_verb(cls, verb):\n \"\"\"\n Constructs a :class:`Spoolverb` instance from the string\n representation of the given verb.\n\n Args:\n verb (str): representation of the verb e.g.:\n ``'ASCRIBESPOOL01LOAN12/150526150528'``. Can also be in\n binary format (:obj:`bytes`): ``b'ASCRIBESPOOL01PIECE'``.\n\n Returns:\n :class:`Spoolverb` instance.\n\n \"\"\"\n pattern = r'^(?P<meta>[A-Z]+)(?P<version>\\d+)(?P<action>[A-Z]+)(?P<arg1>\\d+)?(\\/(?P<arg2>\\d+))?$'\n try:\n verb = verb.decode()\n except AttributeError:\n pass\n match = re.match(pattern, verb)\n if not match:\n raise SpoolverbError('Invalid spoolverb: {}'.format(verb))\n\n data = match.groupdict()\n meta = data['meta']\n version = data['version']\n action = data['action']\n if action == 'EDITIONS':\n num_editions = data['arg1']\n return cls(meta=meta, version=version, action=action, num_editions=int(num_editions))\n elif action == 'LOAN':\n # TODO Review. Workaround for piece loans\n try:\n edition_num = int(data['arg1'])\n except TypeError:\n edition_num = 0\n loan_start = data['arg2'][:6]\n loan_end = data['arg2'][6:]\n return cls(meta=meta, version=version, action=action, edition_num=int(edition_num),\n loan_start=loan_start, loan_end=loan_end)\n elif action in ['FUEL', 'PIECE', 'CONSIGNEDREGISTRATION']:\n # no edition number for these verbs\n return cls(meta=meta, version=version, action=action)\n else:\n edition_num = data['arg1']\n return cls(meta=meta, version=version, action=action, edition_num=int(edition_num))\n"
] | class BlockchainSpider(object):
"""
Spool blockchain explorer. Retrieves from the blockchain
the chain of ownership of a hash created with the
`SPOOL <https://github.com/ascribe/spool>`_ protocol.
"""
def __init__(self, testnet=False, service='blockr', username='', password='', host='', port=''):
"""
Args:
testnet (bool): Whether to use the mainnet or testnet.
Defaults to the mainnet (:const:`False`).
service (str): Bitcoin communication interface: ``'blockr'``,
``'daemon'``, or ``'regtest'``. ``'blockr'`` refers to the
public api, whereas ``'daemon'`` and ``'regtest'`` refer
to the jsonrpc inteface. Defaults to ``'blockr'``.
username (str): username for jsonrpc communications
password (str): password for jsonrpc communications
hostname (str): hostname of the bitcoin node when using jsonrpc
port (str): port number of the bitcoin node when using jsonrpc
"""
self._t = Transactions(service=service, testnet=testnet, username=username,
password=password, host=host, port=port)
def history(self, hash):
"""
Retrieve the ownership tree of all editions of a piece given the hash.
Args:
hash (str): Hash of the file to check. Can be created with the
:class:`File` class
Returns:
dict: Ownsership tree of all editions of a piece.
.. note:: For now we only support searching the blockchain by
the piece hash.
"""
txs = self._t.get(hash, max_transactions=10000)['transactions']
tree = defaultdict(list)
number_editions = 0
for tx in txs:
_tx = self._t.get(tx['txid'])
txid = _tx['txid']
verb_str = BlockchainSpider.check_script(_tx['vouts'])
verb = Spoolverb.from_verb(verb_str)
from_address, to_address, piece_address = BlockchainSpider._get_addresses(_tx)
timestamp_utc = _tx['time']
action = verb.action
edition_number = 0
if action != 'EDITIONS':
edition_number = verb.edition_number
else:
number_editions = verb.num_editions
tree[edition_number].append({'txid': txid,
'verb': verb_str,
'from_address': from_address,
'to_address': to_address,
'piece_address': piece_address,
'timestamp_utc': timestamp_utc,
'action': action,
'number_editions': number_editions,
'edition_number': edition_number})
# lets update the records with the number of editions of the piece since we do not know
# this information before the EDITIONS transaction
for edition, chain in tree.items():
[d.update({'number_editions': number_editions}) for d in chain]
return dict(tree)
@staticmethod
def chain(tree, edition_number):
"""
Args:
tree (dict): Tree history of all editions of a piece.
edition_number (int): The edition number to check for.
In the case of a piece (master edition), an empty
string (``''``) or zero (``0``) can be passed.
Returns:
list: The chain of ownsership of a particular
edition of the piece ordered by time.
"""
# return the chain for an edition_number sorted by the timestamp
return sorted(tree.get(edition_number, []), key=lambda d: d['timestamp_utc'])
@staticmethod
def strip_loan(chain):
"""
Returns the chain without loan. This way we can
look at the last transaction to establish ownership.
Args:
chain (list): Chain for a particular edition.
Returns:
list: Chain with loan transactions striped
from the end of the chain.
"""
while chain[-1]['action'] == 'LOAN':
chain.pop()
return chain
@staticmethod
def pprint(tree):
"""
Utility function to pretty print the history tree of a piece.
Args:
tree (dict): History tree of a piece.
"""
p = PrettyPrinter(indent=2)
p.pprint(tree)
@staticmethod
def decode_op_return(op_return_hex):
"""
Decodes the given ``op_return`` hexadecimal
string representation into a string (:obj:`str`).
Args:
op_return_hex (str): Hexadecimal string
representation of the ``op_return``.
Returns:
str: String representation of the ``op_return``.
"""
return binascii.unhexlify(op_return_hex[4:])
@staticmethod
@staticmethod
def _get_addresses(tx):
"""
Checks for the from, to, and piece address of a SPOOL transaction.
Args:
tx (dict): Transaction payload, as returned by
:meth:`transactions.Transactions.get()`.
.. note:: Formats as returned by JSON-RPC API
``decoderawtransaction`` have yet to be supported.
Returns:
Tuple([str]): Sender, receiver, and piece addresses.
"""
from_address = set([vin['address'] for vin in tx['vins']])
if len(from_address) != 1:
raise InvalidTransactionError("Transaction should have inputs " \
"from only one address {}".format(from_address))
# order vouts. discard the last vout since it's the op_return
vouts = sorted(tx['vouts'], key=lambda d: d['n'])[:-1]
piece_address = vouts[0]['address']
to_address = vouts[-1]['address']
from_address = from_address.pop()
return from_address, to_address, piece_address
@staticmethod
def _get_time_utc(time_utc_str):
"""
Convert a string representation of the time (as returned by
blockr.io api) into unix timestamp.
Args:
time_utc_str (str): String representation of the time, with the
format: `'%Y-%m-%dT%H:%M:%S %Z'`.
Returns:
int: Unix timestamp.
"""
dt = datetime.strptime(time_utc_str, TIME_FORMAT)
return int(calendar.timegm(dt.utctimetuple()))
|
ascribe/pyspool | spool/spoolex.py | BlockchainSpider._get_addresses | python | def _get_addresses(tx):
from_address = set([vin['address'] for vin in tx['vins']])
if len(from_address) != 1:
raise InvalidTransactionError("Transaction should have inputs " \
"from only one address {}".format(from_address))
# order vouts. discard the last vout since it's the op_return
vouts = sorted(tx['vouts'], key=lambda d: d['n'])[:-1]
piece_address = vouts[0]['address']
to_address = vouts[-1]['address']
from_address = from_address.pop()
return from_address, to_address, piece_address | Checks for the from, to, and piece address of a SPOOL transaction.
Args:
tx (dict): Transaction payload, as returned by
:meth:`transactions.Transactions.get()`.
.. note:: Formats as returned by JSON-RPC API
``decoderawtransaction`` have yet to be supported.
Returns:
Tuple([str]): Sender, receiver, and piece addresses. | train | https://github.com/ascribe/pyspool/blob/f8b10df1e7d2ea7950dde433c1cb6d5225112f4f/spool/spoolex.py#L209-L235 | null | class BlockchainSpider(object):
"""
Spool blockchain explorer. Retrieves from the blockchain
the chain of ownership of a hash created with the
`SPOOL <https://github.com/ascribe/spool>`_ protocol.
"""
def __init__(self, testnet=False, service='blockr', username='', password='', host='', port=''):
"""
Args:
testnet (bool): Whether to use the mainnet or testnet.
Defaults to the mainnet (:const:`False`).
service (str): Bitcoin communication interface: ``'blockr'``,
``'daemon'``, or ``'regtest'``. ``'blockr'`` refers to the
public api, whereas ``'daemon'`` and ``'regtest'`` refer
to the jsonrpc inteface. Defaults to ``'blockr'``.
username (str): username for jsonrpc communications
password (str): password for jsonrpc communications
hostname (str): hostname of the bitcoin node when using jsonrpc
port (str): port number of the bitcoin node when using jsonrpc
"""
self._t = Transactions(service=service, testnet=testnet, username=username,
password=password, host=host, port=port)
def history(self, hash):
"""
Retrieve the ownership tree of all editions of a piece given the hash.
Args:
hash (str): Hash of the file to check. Can be created with the
:class:`File` class
Returns:
dict: Ownsership tree of all editions of a piece.
.. note:: For now we only support searching the blockchain by
the piece hash.
"""
txs = self._t.get(hash, max_transactions=10000)['transactions']
tree = defaultdict(list)
number_editions = 0
for tx in txs:
_tx = self._t.get(tx['txid'])
txid = _tx['txid']
verb_str = BlockchainSpider.check_script(_tx['vouts'])
verb = Spoolverb.from_verb(verb_str)
from_address, to_address, piece_address = BlockchainSpider._get_addresses(_tx)
timestamp_utc = _tx['time']
action = verb.action
edition_number = 0
if action != 'EDITIONS':
edition_number = verb.edition_number
else:
number_editions = verb.num_editions
tree[edition_number].append({'txid': txid,
'verb': verb_str,
'from_address': from_address,
'to_address': to_address,
'piece_address': piece_address,
'timestamp_utc': timestamp_utc,
'action': action,
'number_editions': number_editions,
'edition_number': edition_number})
# lets update the records with the number of editions of the piece since we do not know
# this information before the EDITIONS transaction
for edition, chain in tree.items():
[d.update({'number_editions': number_editions}) for d in chain]
return dict(tree)
@staticmethod
def chain(tree, edition_number):
"""
Args:
tree (dict): Tree history of all editions of a piece.
edition_number (int): The edition number to check for.
In the case of a piece (master edition), an empty
string (``''``) or zero (``0``) can be passed.
Returns:
list: The chain of ownsership of a particular
edition of the piece ordered by time.
"""
# return the chain for an edition_number sorted by the timestamp
return sorted(tree.get(edition_number, []), key=lambda d: d['timestamp_utc'])
@staticmethod
def strip_loan(chain):
"""
Returns the chain without loan. This way we can
look at the last transaction to establish ownership.
Args:
chain (list): Chain for a particular edition.
Returns:
list: Chain with loan transactions striped
from the end of the chain.
"""
while chain[-1]['action'] == 'LOAN':
chain.pop()
return chain
@staticmethod
def pprint(tree):
"""
Utility function to pretty print the history tree of a piece.
Args:
tree (dict): History tree of a piece.
"""
p = PrettyPrinter(indent=2)
p.pprint(tree)
@staticmethod
def decode_op_return(op_return_hex):
"""
Decodes the given ``op_return`` hexadecimal
string representation into a string (:obj:`str`).
Args:
op_return_hex (str): Hexadecimal string
representation of the ``op_return``.
Returns:
str: String representation of the ``op_return``.
"""
return binascii.unhexlify(op_return_hex[4:])
@staticmethod
def check_script(vouts):
"""
Looks into the vouts list of a transaction
and returns the ``op_return`` if one exists.
Args;
vouts (list): List of outputs of a transaction.
Returns:
str: String representation of the ``op_return``.
Raises:
Exception: If no ``vout`` having a supported
verb (:attr:`supported_actions`) is found.
"""
for vout in [v for v in vouts[::-1] if v['hex'].startswith('6a')]:
verb = BlockchainSpider.decode_op_return(vout['hex'])
action = Spoolverb.from_verb(verb).action
if action in Spoolverb.supported_actions:
return verb
raise Exception("Invalid ascribe transaction")
@staticmethod
@staticmethod
def _get_time_utc(time_utc_str):
"""
Convert a string representation of the time (as returned by
blockr.io api) into unix timestamp.
Args:
time_utc_str (str): String representation of the time, with the
format: `'%Y-%m-%dT%H:%M:%S %Z'`.
Returns:
int: Unix timestamp.
"""
dt = datetime.strptime(time_utc_str, TIME_FORMAT)
return int(calendar.timegm(dt.utctimetuple()))
|
ascribe/pyspool | spool/spoolex.py | BlockchainSpider._get_time_utc | python | def _get_time_utc(time_utc_str):
dt = datetime.strptime(time_utc_str, TIME_FORMAT)
return int(calendar.timegm(dt.utctimetuple())) | Convert a string representation of the time (as returned by
blockr.io api) into unix timestamp.
Args:
time_utc_str (str): String representation of the time, with the
format: `'%Y-%m-%dT%H:%M:%S %Z'`.
Returns:
int: Unix timestamp. | train | https://github.com/ascribe/pyspool/blob/f8b10df1e7d2ea7950dde433c1cb6d5225112f4f/spool/spoolex.py#L238-L252 | null | class BlockchainSpider(object):
"""
Spool blockchain explorer. Retrieves from the blockchain
the chain of ownership of a hash created with the
`SPOOL <https://github.com/ascribe/spool>`_ protocol.
"""
def __init__(self, testnet=False, service='blockr', username='', password='', host='', port=''):
"""
Args:
testnet (bool): Whether to use the mainnet or testnet.
Defaults to the mainnet (:const:`False`).
service (str): Bitcoin communication interface: ``'blockr'``,
``'daemon'``, or ``'regtest'``. ``'blockr'`` refers to the
public api, whereas ``'daemon'`` and ``'regtest'`` refer
to the jsonrpc inteface. Defaults to ``'blockr'``.
username (str): username for jsonrpc communications
password (str): password for jsonrpc communications
hostname (str): hostname of the bitcoin node when using jsonrpc
port (str): port number of the bitcoin node when using jsonrpc
"""
self._t = Transactions(service=service, testnet=testnet, username=username,
password=password, host=host, port=port)
def history(self, hash):
"""
Retrieve the ownership tree of all editions of a piece given the hash.
Args:
hash (str): Hash of the file to check. Can be created with the
:class:`File` class
Returns:
dict: Ownsership tree of all editions of a piece.
.. note:: For now we only support searching the blockchain by
the piece hash.
"""
txs = self._t.get(hash, max_transactions=10000)['transactions']
tree = defaultdict(list)
number_editions = 0
for tx in txs:
_tx = self._t.get(tx['txid'])
txid = _tx['txid']
verb_str = BlockchainSpider.check_script(_tx['vouts'])
verb = Spoolverb.from_verb(verb_str)
from_address, to_address, piece_address = BlockchainSpider._get_addresses(_tx)
timestamp_utc = _tx['time']
action = verb.action
edition_number = 0
if action != 'EDITIONS':
edition_number = verb.edition_number
else:
number_editions = verb.num_editions
tree[edition_number].append({'txid': txid,
'verb': verb_str,
'from_address': from_address,
'to_address': to_address,
'piece_address': piece_address,
'timestamp_utc': timestamp_utc,
'action': action,
'number_editions': number_editions,
'edition_number': edition_number})
# lets update the records with the number of editions of the piece since we do not know
# this information before the EDITIONS transaction
for edition, chain in tree.items():
[d.update({'number_editions': number_editions}) for d in chain]
return dict(tree)
@staticmethod
def chain(tree, edition_number):
"""
Args:
tree (dict): Tree history of all editions of a piece.
edition_number (int): The edition number to check for.
In the case of a piece (master edition), an empty
string (``''``) or zero (``0``) can be passed.
Returns:
list: The chain of ownsership of a particular
edition of the piece ordered by time.
"""
# return the chain for an edition_number sorted by the timestamp
return sorted(tree.get(edition_number, []), key=lambda d: d['timestamp_utc'])
@staticmethod
def strip_loan(chain):
"""
Returns the chain without loan. This way we can
look at the last transaction to establish ownership.
Args:
chain (list): Chain for a particular edition.
Returns:
list: Chain with loan transactions striped
from the end of the chain.
"""
while chain[-1]['action'] == 'LOAN':
chain.pop()
return chain
@staticmethod
def pprint(tree):
"""
Utility function to pretty print the history tree of a piece.
Args:
tree (dict): History tree of a piece.
"""
p = PrettyPrinter(indent=2)
p.pprint(tree)
@staticmethod
def decode_op_return(op_return_hex):
"""
Decodes the given ``op_return`` hexadecimal
string representation into a string (:obj:`str`).
Args:
op_return_hex (str): Hexadecimal string
representation of the ``op_return``.
Returns:
str: String representation of the ``op_return``.
"""
return binascii.unhexlify(op_return_hex[4:])
@staticmethod
def check_script(vouts):
"""
Looks into the vouts list of a transaction
and returns the ``op_return`` if one exists.
Args;
vouts (list): List of outputs of a transaction.
Returns:
str: String representation of the ``op_return``.
Raises:
Exception: If no ``vout`` having a supported
verb (:attr:`supported_actions`) is found.
"""
for vout in [v for v in vouts[::-1] if v['hex'].startswith('6a')]:
verb = BlockchainSpider.decode_op_return(vout['hex'])
action = Spoolverb.from_verb(verb).action
if action in Spoolverb.supported_actions:
return verb
raise Exception("Invalid ascribe transaction")
@staticmethod
def _get_addresses(tx):
"""
Checks for the from, to, and piece address of a SPOOL transaction.
Args:
tx (dict): Transaction payload, as returned by
:meth:`transactions.Transactions.get()`.
.. note:: Formats as returned by JSON-RPC API
``decoderawtransaction`` have yet to be supported.
Returns:
Tuple([str]): Sender, receiver, and piece addresses.
"""
from_address = set([vin['address'] for vin in tx['vins']])
if len(from_address) != 1:
raise InvalidTransactionError("Transaction should have inputs " \
"from only one address {}".format(from_address))
# order vouts. discard the last vout since it's the op_return
vouts = sorted(tx['vouts'], key=lambda d: d['n'])[:-1]
piece_address = vouts[0]['address']
to_address = vouts[-1]['address']
from_address = from_address.pop()
return from_address, to_address, piece_address
@staticmethod
|
ascribe/pyspool | spool/file.py | File._calculate_hash | python | def _calculate_hash(self, filename, **kwargs):
with open(filename, 'rb') as f:
file_hash = hashlib.md5(f.read()).hexdigest()
if kwargs:
data = str(
[urepr(kwargs[k]) for k in sorted(kwargs)] + [file_hash])
else:
data = file_hash
address_piece_with_metadata = str(
bin_to_b58check(bin_hash160(data.encode()),
magicbyte=self._magicbyte)
)
address_piece = str(bin_to_b58check(bin_hash160(file_hash.encode()),
magicbyte=self._magicbyte))
return address_piece, address_piece_with_metadata | Calculates the hash of the file and the hash of the file + metadata
(passed in ``kwargs``).
Args:
filename (str): Name of the file
testnet (bool): testnet flag. Defaults to False
**kwargs: Additional metadata to be encoded with the file. Only
the values are used to compute the hash. Values are
ordered using their keys, so that the computation of the
hash is consistent. As an example, given::
File('filename', title='piece title', artist='artist')
the values ``('artist', 'piece title')`` would be used in that
order for the computation of the hash. | train | https://github.com/ascribe/pyspool/blob/f8b10df1e7d2ea7950dde433c1cb6d5225112f4f/spool/file.py#L67-L101 | null | class File(object):
"""
Class used to calculate the hash of a file and the hash of the file +
metadata to be included on the blockchain
"""
def __init__(self, filename, testnet=False, **kwargs):
"""
Args:
filename (str): Name of the file
testnet (bool): testnet flag. Defaults to False
**kwargs: Additional metadata to be encoded with the file. Only
the values are used to compute the hash. Values are
ordered using their keys, so that the computation of the
hash is consistent. As an example, given::
File('filename', title='piece title', artist='artist')
the values ``('artist', 'piece title')`` would be used in that
order for the computation of the hash.
Returns:
:class:`File` instance
"""
self.testnet = testnet
# prefix of the addresses to distinguish between mainnet and testnet
self._magicbyte = 111 if testnet else 0
self.file_hash, self.file_hash_metadata = self._calculate_hash(filename, **kwargs)
@classmethod
def from_hash(cls, hash):
"""
Args:
hash (str): hash of the file
Returns:
:class:`File` instance
"""
cls.hash = hash
return cls
|
joequant/cryptoexchange | cryptoexchange/util/bitmex-generate-api-key.py | BitMEX.create_key | python | def create_key(self):
print("Creating key. Please input the following options:")
name = input("Key name (optional): ")
print("To make this key more secure, you should restrict the IP addresses that can use it. ")
print("To use with all IPs, leave blank or use 0.0.0.0/0.")
print("To use with a single IP, append '/32', such as 207.39.29.22/32. ")
print("See this reference on CIDR blocks: http://software77.net/cidr-101.html")
cidr = input("CIDR (optional): ")
key = self._curl_bitmex("/apiKey",
postdict={"name": name, "cidr": cidr, "enabled": True})
print("Key created. Details:\n")
print("API Key: " + key["id"])
print("Secret: " + key["secret"])
print("\nSafeguard your secret key! If somebody gets a hold of your API key and secret,")
print("your account can be taken over completely.")
print("\nKey generation complete.") | Create an API key. | train | https://github.com/joequant/cryptoexchange/blob/6690fbd9a2ba00e40d7484425808c84d44233f0c/cryptoexchange/util/bitmex-generate-api-key.py#L74-L91 | [
"def _curl_bitmex(self, api, query=None, postdict=None, timeout=3, verb=None):\n url = self.base_url + api\n if query:\n url = url + \"?\" + urlencode(query)\n if postdict:\n postdata = urlencode(postdict).encode(\"utf-8\")\n request = Request(url, postdata)\n else:\n request = Request(url)\n\n if verb:\n request.get_method = lambda: verb\n\n request.add_header('user-agent', 'BitMEX-generate-api-key')\n if self.accessToken:\n request.add_header('accessToken', self.accessToken)\n\n try:\n response = urlopen(request, timeout=timeout)\n except HTTPError as e:\n if e.code == 401:\n print(\"Login information incorrect, please check and restart.\")\n exit(1)\n # 503 - BitMEX temporary downtime, likely due to a deploy. Try again\n elif e.code == 503:\n print(\"Unable to contact the BitMEX API (503). Please try again later.\" +\n \"Request: %s \\n %s\" % (url, json.dumps(postdict)))\n exit(1)\n else:\n print(\"Error:\", e)\n print(\"Endpoint was: \" + api)\n print(\"Please try again.\")\n raise e\n except (URLError, ssl.SSLError) as e:\n print(\"Unable to contact the BitMEX API (URLError). Please check the URL. Please try again later. \" +\n \"Request: %s \\n %s\" % (url, json.dumps(postdict)))\n exit(1)\n\n return json.loads(response.read().decode(\"utf-8\"))\n"
] | class BitMEX(object):
def __init__(self, email=None, password=None, otpToken=None):
self.base_url = (BITMEX_TESTNET if USE_TESTNET else BITMEX_PRODUCTION) + "/api/v1"
self.accessToken = None
self.accessToken = self._curl_bitmex("/user/login",
postdict={"email": email, "password": password, "token": otpToken})["id"]
def list_keys(self):
"""List your API Keys."""
keys = self._curl_bitmex("/apiKey/")
print(json.dumps(keys, sort_keys=True, indent=4))
def enable_key(self):
"""Enable an existing API Key."""
print("This command will enable a disabled key.")
apiKeyID = input("API Key ID: ")
try:
key = self._curl_bitmex("/apiKey/enable",
postdict={"apiKeyID": apiKeyID})
print("Key with ID %s enabled." % key["id"])
except:
print("Unable to enable key, please try again.")
self.enable_key()
def disable_key(self):
"""Disable an existing API Key."""
print("This command will disable a enabled key.")
apiKeyID = input("API Key ID: ")
try:
key = self._curl_bitmex("/apiKey/disable",
postdict={"apiKeyID": apiKeyID})
print("Key with ID %s disabled." % key["id"])
except:
print("Unable to disable key, please try again.")
self.disable_key()
def delete_key(self):
"""Delete an existing API Key."""
print("This command will delete an API key.")
apiKeyID = input("API Key ID: ")
try:
self._curl_bitmex("/apiKey/",
postdict={"apiKeyID": apiKeyID}, verb='DELETE')
print("Key with ID %s disabled." % apiKeyID)
except:
print("Unable to delete key, please try again.")
self.delete_key()
def _curl_bitmex(self, api, query=None, postdict=None, timeout=3, verb=None):
url = self.base_url + api
if query:
url = url + "?" + urlencode(query)
if postdict:
postdata = urlencode(postdict).encode("utf-8")
request = Request(url, postdata)
else:
request = Request(url)
if verb:
request.get_method = lambda: verb
request.add_header('user-agent', 'BitMEX-generate-api-key')
if self.accessToken:
request.add_header('accessToken', self.accessToken)
try:
response = urlopen(request, timeout=timeout)
except HTTPError as e:
if e.code == 401:
print("Login information incorrect, please check and restart.")
exit(1)
# 503 - BitMEX temporary downtime, likely due to a deploy. Try again
elif e.code == 503:
print("Unable to contact the BitMEX API (503). Please try again later." +
"Request: %s \n %s" % (url, json.dumps(postdict)))
exit(1)
else:
print("Error:", e)
print("Endpoint was: " + api)
print("Please try again.")
raise e
except (URLError, ssl.SSLError) as e:
print("Unable to contact the BitMEX API (URLError). Please check the URL. Please try again later. " +
"Request: %s \n %s" % (url, json.dumps(postdict)))
exit(1)
return json.loads(response.read().decode("utf-8"))
|
joequant/cryptoexchange | cryptoexchange/util/bitmex-generate-api-key.py | BitMEX.list_keys | python | def list_keys(self):
keys = self._curl_bitmex("/apiKey/")
print(json.dumps(keys, sort_keys=True, indent=4)) | List your API Keys. | train | https://github.com/joequant/cryptoexchange/blob/6690fbd9a2ba00e40d7484425808c84d44233f0c/cryptoexchange/util/bitmex-generate-api-key.py#L93-L96 | [
"def _curl_bitmex(self, api, query=None, postdict=None, timeout=3, verb=None):\n url = self.base_url + api\n if query:\n url = url + \"?\" + urlencode(query)\n if postdict:\n postdata = urlencode(postdict).encode(\"utf-8\")\n request = Request(url, postdata)\n else:\n request = Request(url)\n\n if verb:\n request.get_method = lambda: verb\n\n request.add_header('user-agent', 'BitMEX-generate-api-key')\n if self.accessToken:\n request.add_header('accessToken', self.accessToken)\n\n try:\n response = urlopen(request, timeout=timeout)\n except HTTPError as e:\n if e.code == 401:\n print(\"Login information incorrect, please check and restart.\")\n exit(1)\n # 503 - BitMEX temporary downtime, likely due to a deploy. Try again\n elif e.code == 503:\n print(\"Unable to contact the BitMEX API (503). Please try again later.\" +\n \"Request: %s \\n %s\" % (url, json.dumps(postdict)))\n exit(1)\n else:\n print(\"Error:\", e)\n print(\"Endpoint was: \" + api)\n print(\"Please try again.\")\n raise e\n except (URLError, ssl.SSLError) as e:\n print(\"Unable to contact the BitMEX API (URLError). Please check the URL. Please try again later. \" +\n \"Request: %s \\n %s\" % (url, json.dumps(postdict)))\n exit(1)\n\n return json.loads(response.read().decode(\"utf-8\"))\n"
] | class BitMEX(object):
def __init__(self, email=None, password=None, otpToken=None):
self.base_url = (BITMEX_TESTNET if USE_TESTNET else BITMEX_PRODUCTION) + "/api/v1"
self.accessToken = None
self.accessToken = self._curl_bitmex("/user/login",
postdict={"email": email, "password": password, "token": otpToken})["id"]
def create_key(self):
"""Create an API key."""
print("Creating key. Please input the following options:")
name = input("Key name (optional): ")
print("To make this key more secure, you should restrict the IP addresses that can use it. ")
print("To use with all IPs, leave blank or use 0.0.0.0/0.")
print("To use with a single IP, append '/32', such as 207.39.29.22/32. ")
print("See this reference on CIDR blocks: http://software77.net/cidr-101.html")
cidr = input("CIDR (optional): ")
key = self._curl_bitmex("/apiKey",
postdict={"name": name, "cidr": cidr, "enabled": True})
print("Key created. Details:\n")
print("API Key: " + key["id"])
print("Secret: " + key["secret"])
print("\nSafeguard your secret key! If somebody gets a hold of your API key and secret,")
print("your account can be taken over completely.")
print("\nKey generation complete.")
def enable_key(self):
"""Enable an existing API Key."""
print("This command will enable a disabled key.")
apiKeyID = input("API Key ID: ")
try:
key = self._curl_bitmex("/apiKey/enable",
postdict={"apiKeyID": apiKeyID})
print("Key with ID %s enabled." % key["id"])
except:
print("Unable to enable key, please try again.")
self.enable_key()
def disable_key(self):
"""Disable an existing API Key."""
print("This command will disable a enabled key.")
apiKeyID = input("API Key ID: ")
try:
key = self._curl_bitmex("/apiKey/disable",
postdict={"apiKeyID": apiKeyID})
print("Key with ID %s disabled." % key["id"])
except:
print("Unable to disable key, please try again.")
self.disable_key()
def delete_key(self):
"""Delete an existing API Key."""
print("This command will delete an API key.")
apiKeyID = input("API Key ID: ")
try:
self._curl_bitmex("/apiKey/",
postdict={"apiKeyID": apiKeyID}, verb='DELETE')
print("Key with ID %s disabled." % apiKeyID)
except:
print("Unable to delete key, please try again.")
self.delete_key()
def _curl_bitmex(self, api, query=None, postdict=None, timeout=3, verb=None):
url = self.base_url + api
if query:
url = url + "?" + urlencode(query)
if postdict:
postdata = urlencode(postdict).encode("utf-8")
request = Request(url, postdata)
else:
request = Request(url)
if verb:
request.get_method = lambda: verb
request.add_header('user-agent', 'BitMEX-generate-api-key')
if self.accessToken:
request.add_header('accessToken', self.accessToken)
try:
response = urlopen(request, timeout=timeout)
except HTTPError as e:
if e.code == 401:
print("Login information incorrect, please check and restart.")
exit(1)
# 503 - BitMEX temporary downtime, likely due to a deploy. Try again
elif e.code == 503:
print("Unable to contact the BitMEX API (503). Please try again later." +
"Request: %s \n %s" % (url, json.dumps(postdict)))
exit(1)
else:
print("Error:", e)
print("Endpoint was: " + api)
print("Please try again.")
raise e
except (URLError, ssl.SSLError) as e:
print("Unable to contact the BitMEX API (URLError). Please check the URL. Please try again later. " +
"Request: %s \n %s" % (url, json.dumps(postdict)))
exit(1)
return json.loads(response.read().decode("utf-8"))
|
joequant/cryptoexchange | cryptoexchange/util/bitmex-generate-api-key.py | BitMEX.enable_key | python | def enable_key(self):
print("This command will enable a disabled key.")
apiKeyID = input("API Key ID: ")
try:
key = self._curl_bitmex("/apiKey/enable",
postdict={"apiKeyID": apiKeyID})
print("Key with ID %s enabled." % key["id"])
except:
print("Unable to enable key, please try again.")
self.enable_key() | Enable an existing API Key. | train | https://github.com/joequant/cryptoexchange/blob/6690fbd9a2ba00e40d7484425808c84d44233f0c/cryptoexchange/util/bitmex-generate-api-key.py#L98-L108 | [
"def enable_key(self):\n \"\"\"Enable an existing API Key.\"\"\"\n print(\"This command will enable a disabled key.\")\n apiKeyID = input(\"API Key ID: \")\n try:\n key = self._curl_bitmex(\"/apiKey/enable\",\n postdict={\"apiKeyID\": apiKeyID})\n print(\"Key with ID %s enabled.\" % key[\"id\"])\n except:\n print(\"Unable to enable key, please try again.\")\n self.enable_key()\n",
"def _curl_bitmex(self, api, query=None, postdict=None, timeout=3, verb=None):\n url = self.base_url + api\n if query:\n url = url + \"?\" + urlencode(query)\n if postdict:\n postdata = urlencode(postdict).encode(\"utf-8\")\n request = Request(url, postdata)\n else:\n request = Request(url)\n\n if verb:\n request.get_method = lambda: verb\n\n request.add_header('user-agent', 'BitMEX-generate-api-key')\n if self.accessToken:\n request.add_header('accessToken', self.accessToken)\n\n try:\n response = urlopen(request, timeout=timeout)\n except HTTPError as e:\n if e.code == 401:\n print(\"Login information incorrect, please check and restart.\")\n exit(1)\n # 503 - BitMEX temporary downtime, likely due to a deploy. Try again\n elif e.code == 503:\n print(\"Unable to contact the BitMEX API (503). Please try again later.\" +\n \"Request: %s \\n %s\" % (url, json.dumps(postdict)))\n exit(1)\n else:\n print(\"Error:\", e)\n print(\"Endpoint was: \" + api)\n print(\"Please try again.\")\n raise e\n except (URLError, ssl.SSLError) as e:\n print(\"Unable to contact the BitMEX API (URLError). Please check the URL. Please try again later. \" +\n \"Request: %s \\n %s\" % (url, json.dumps(postdict)))\n exit(1)\n\n return json.loads(response.read().decode(\"utf-8\"))\n"
] | class BitMEX(object):
def __init__(self, email=None, password=None, otpToken=None):
self.base_url = (BITMEX_TESTNET if USE_TESTNET else BITMEX_PRODUCTION) + "/api/v1"
self.accessToken = None
self.accessToken = self._curl_bitmex("/user/login",
postdict={"email": email, "password": password, "token": otpToken})["id"]
def create_key(self):
"""Create an API key."""
print("Creating key. Please input the following options:")
name = input("Key name (optional): ")
print("To make this key more secure, you should restrict the IP addresses that can use it. ")
print("To use with all IPs, leave blank or use 0.0.0.0/0.")
print("To use with a single IP, append '/32', such as 207.39.29.22/32. ")
print("See this reference on CIDR blocks: http://software77.net/cidr-101.html")
cidr = input("CIDR (optional): ")
key = self._curl_bitmex("/apiKey",
postdict={"name": name, "cidr": cidr, "enabled": True})
print("Key created. Details:\n")
print("API Key: " + key["id"])
print("Secret: " + key["secret"])
print("\nSafeguard your secret key! If somebody gets a hold of your API key and secret,")
print("your account can be taken over completely.")
print("\nKey generation complete.")
def list_keys(self):
"""List your API Keys."""
keys = self._curl_bitmex("/apiKey/")
print(json.dumps(keys, sort_keys=True, indent=4))
def disable_key(self):
"""Disable an existing API Key."""
print("This command will disable a enabled key.")
apiKeyID = input("API Key ID: ")
try:
key = self._curl_bitmex("/apiKey/disable",
postdict={"apiKeyID": apiKeyID})
print("Key with ID %s disabled." % key["id"])
except:
print("Unable to disable key, please try again.")
self.disable_key()
def delete_key(self):
"""Delete an existing API Key."""
print("This command will delete an API key.")
apiKeyID = input("API Key ID: ")
try:
self._curl_bitmex("/apiKey/",
postdict={"apiKeyID": apiKeyID}, verb='DELETE')
print("Key with ID %s disabled." % apiKeyID)
except:
print("Unable to delete key, please try again.")
self.delete_key()
def _curl_bitmex(self, api, query=None, postdict=None, timeout=3, verb=None):
url = self.base_url + api
if query:
url = url + "?" + urlencode(query)
if postdict:
postdata = urlencode(postdict).encode("utf-8")
request = Request(url, postdata)
else:
request = Request(url)
if verb:
request.get_method = lambda: verb
request.add_header('user-agent', 'BitMEX-generate-api-key')
if self.accessToken:
request.add_header('accessToken', self.accessToken)
try:
response = urlopen(request, timeout=timeout)
except HTTPError as e:
if e.code == 401:
print("Login information incorrect, please check and restart.")
exit(1)
# 503 - BitMEX temporary downtime, likely due to a deploy. Try again
elif e.code == 503:
print("Unable to contact the BitMEX API (503). Please try again later." +
"Request: %s \n %s" % (url, json.dumps(postdict)))
exit(1)
else:
print("Error:", e)
print("Endpoint was: " + api)
print("Please try again.")
raise e
except (URLError, ssl.SSLError) as e:
print("Unable to contact the BitMEX API (URLError). Please check the URL. Please try again later. " +
"Request: %s \n %s" % (url, json.dumps(postdict)))
exit(1)
return json.loads(response.read().decode("utf-8"))
|
joequant/cryptoexchange | cryptoexchange/util/bitmex-generate-api-key.py | BitMEX.disable_key | python | def disable_key(self):
print("This command will disable a enabled key.")
apiKeyID = input("API Key ID: ")
try:
key = self._curl_bitmex("/apiKey/disable",
postdict={"apiKeyID": apiKeyID})
print("Key with ID %s disabled." % key["id"])
except:
print("Unable to disable key, please try again.")
self.disable_key() | Disable an existing API Key. | train | https://github.com/joequant/cryptoexchange/blob/6690fbd9a2ba00e40d7484425808c84d44233f0c/cryptoexchange/util/bitmex-generate-api-key.py#L110-L120 | [
"def disable_key(self):\n \"\"\"Disable an existing API Key.\"\"\"\n print(\"This command will disable a enabled key.\")\n apiKeyID = input(\"API Key ID: \")\n try:\n key = self._curl_bitmex(\"/apiKey/disable\",\n postdict={\"apiKeyID\": apiKeyID})\n print(\"Key with ID %s disabled.\" % key[\"id\"])\n except:\n print(\"Unable to disable key, please try again.\")\n self.disable_key()\n",
"def _curl_bitmex(self, api, query=None, postdict=None, timeout=3, verb=None):\n url = self.base_url + api\n if query:\n url = url + \"?\" + urlencode(query)\n if postdict:\n postdata = urlencode(postdict).encode(\"utf-8\")\n request = Request(url, postdata)\n else:\n request = Request(url)\n\n if verb:\n request.get_method = lambda: verb\n\n request.add_header('user-agent', 'BitMEX-generate-api-key')\n if self.accessToken:\n request.add_header('accessToken', self.accessToken)\n\n try:\n response = urlopen(request, timeout=timeout)\n except HTTPError as e:\n if e.code == 401:\n print(\"Login information incorrect, please check and restart.\")\n exit(1)\n # 503 - BitMEX temporary downtime, likely due to a deploy. Try again\n elif e.code == 503:\n print(\"Unable to contact the BitMEX API (503). Please try again later.\" +\n \"Request: %s \\n %s\" % (url, json.dumps(postdict)))\n exit(1)\n else:\n print(\"Error:\", e)\n print(\"Endpoint was: \" + api)\n print(\"Please try again.\")\n raise e\n except (URLError, ssl.SSLError) as e:\n print(\"Unable to contact the BitMEX API (URLError). Please check the URL. Please try again later. \" +\n \"Request: %s \\n %s\" % (url, json.dumps(postdict)))\n exit(1)\n\n return json.loads(response.read().decode(\"utf-8\"))\n"
] | class BitMEX(object):
def __init__(self, email=None, password=None, otpToken=None):
self.base_url = (BITMEX_TESTNET if USE_TESTNET else BITMEX_PRODUCTION) + "/api/v1"
self.accessToken = None
self.accessToken = self._curl_bitmex("/user/login",
postdict={"email": email, "password": password, "token": otpToken})["id"]
def create_key(self):
"""Create an API key."""
print("Creating key. Please input the following options:")
name = input("Key name (optional): ")
print("To make this key more secure, you should restrict the IP addresses that can use it. ")
print("To use with all IPs, leave blank or use 0.0.0.0/0.")
print("To use with a single IP, append '/32', such as 207.39.29.22/32. ")
print("See this reference on CIDR blocks: http://software77.net/cidr-101.html")
cidr = input("CIDR (optional): ")
key = self._curl_bitmex("/apiKey",
postdict={"name": name, "cidr": cidr, "enabled": True})
print("Key created. Details:\n")
print("API Key: " + key["id"])
print("Secret: " + key["secret"])
print("\nSafeguard your secret key! If somebody gets a hold of your API key and secret,")
print("your account can be taken over completely.")
print("\nKey generation complete.")
def list_keys(self):
"""List your API Keys."""
keys = self._curl_bitmex("/apiKey/")
print(json.dumps(keys, sort_keys=True, indent=4))
def enable_key(self):
"""Enable an existing API Key."""
print("This command will enable a disabled key.")
apiKeyID = input("API Key ID: ")
try:
key = self._curl_bitmex("/apiKey/enable",
postdict={"apiKeyID": apiKeyID})
print("Key with ID %s enabled." % key["id"])
except:
print("Unable to enable key, please try again.")
self.enable_key()
def delete_key(self):
"""Delete an existing API Key."""
print("This command will delete an API key.")
apiKeyID = input("API Key ID: ")
try:
self._curl_bitmex("/apiKey/",
postdict={"apiKeyID": apiKeyID}, verb='DELETE')
print("Key with ID %s disabled." % apiKeyID)
except:
print("Unable to delete key, please try again.")
self.delete_key()
def _curl_bitmex(self, api, query=None, postdict=None, timeout=3, verb=None):
url = self.base_url + api
if query:
url = url + "?" + urlencode(query)
if postdict:
postdata = urlencode(postdict).encode("utf-8")
request = Request(url, postdata)
else:
request = Request(url)
if verb:
request.get_method = lambda: verb
request.add_header('user-agent', 'BitMEX-generate-api-key')
if self.accessToken:
request.add_header('accessToken', self.accessToken)
try:
response = urlopen(request, timeout=timeout)
except HTTPError as e:
if e.code == 401:
print("Login information incorrect, please check and restart.")
exit(1)
# 503 - BitMEX temporary downtime, likely due to a deploy. Try again
elif e.code == 503:
print("Unable to contact the BitMEX API (503). Please try again later." +
"Request: %s \n %s" % (url, json.dumps(postdict)))
exit(1)
else:
print("Error:", e)
print("Endpoint was: " + api)
print("Please try again.")
raise e
except (URLError, ssl.SSLError) as e:
print("Unable to contact the BitMEX API (URLError). Please check the URL. Please try again later. " +
"Request: %s \n %s" % (url, json.dumps(postdict)))
exit(1)
return json.loads(response.read().decode("utf-8"))
|
joequant/cryptoexchange | cryptoexchange/util/bitmex-generate-api-key.py | BitMEX.delete_key | python | def delete_key(self):
print("This command will delete an API key.")
apiKeyID = input("API Key ID: ")
try:
self._curl_bitmex("/apiKey/",
postdict={"apiKeyID": apiKeyID}, verb='DELETE')
print("Key with ID %s disabled." % apiKeyID)
except:
print("Unable to delete key, please try again.")
self.delete_key() | Delete an existing API Key. | train | https://github.com/joequant/cryptoexchange/blob/6690fbd9a2ba00e40d7484425808c84d44233f0c/cryptoexchange/util/bitmex-generate-api-key.py#L122-L132 | [
"def delete_key(self):\n \"\"\"Delete an existing API Key.\"\"\"\n print(\"This command will delete an API key.\")\n apiKeyID = input(\"API Key ID: \")\n try:\n self._curl_bitmex(\"/apiKey/\",\n postdict={\"apiKeyID\": apiKeyID}, verb='DELETE')\n print(\"Key with ID %s disabled.\" % apiKeyID)\n except:\n print(\"Unable to delete key, please try again.\")\n self.delete_key()\n",
"def _curl_bitmex(self, api, query=None, postdict=None, timeout=3, verb=None):\n url = self.base_url + api\n if query:\n url = url + \"?\" + urlencode(query)\n if postdict:\n postdata = urlencode(postdict).encode(\"utf-8\")\n request = Request(url, postdata)\n else:\n request = Request(url)\n\n if verb:\n request.get_method = lambda: verb\n\n request.add_header('user-agent', 'BitMEX-generate-api-key')\n if self.accessToken:\n request.add_header('accessToken', self.accessToken)\n\n try:\n response = urlopen(request, timeout=timeout)\n except HTTPError as e:\n if e.code == 401:\n print(\"Login information incorrect, please check and restart.\")\n exit(1)\n # 503 - BitMEX temporary downtime, likely due to a deploy. Try again\n elif e.code == 503:\n print(\"Unable to contact the BitMEX API (503). Please try again later.\" +\n \"Request: %s \\n %s\" % (url, json.dumps(postdict)))\n exit(1)\n else:\n print(\"Error:\", e)\n print(\"Endpoint was: \" + api)\n print(\"Please try again.\")\n raise e\n except (URLError, ssl.SSLError) as e:\n print(\"Unable to contact the BitMEX API (URLError). Please check the URL. Please try again later. \" +\n \"Request: %s \\n %s\" % (url, json.dumps(postdict)))\n exit(1)\n\n return json.loads(response.read().decode(\"utf-8\"))\n"
] | class BitMEX(object):
def __init__(self, email=None, password=None, otpToken=None):
self.base_url = (BITMEX_TESTNET if USE_TESTNET else BITMEX_PRODUCTION) + "/api/v1"
self.accessToken = None
self.accessToken = self._curl_bitmex("/user/login",
postdict={"email": email, "password": password, "token": otpToken})["id"]
def create_key(self):
"""Create an API key."""
print("Creating key. Please input the following options:")
name = input("Key name (optional): ")
print("To make this key more secure, you should restrict the IP addresses that can use it. ")
print("To use with all IPs, leave blank or use 0.0.0.0/0.")
print("To use with a single IP, append '/32', such as 207.39.29.22/32. ")
print("See this reference on CIDR blocks: http://software77.net/cidr-101.html")
cidr = input("CIDR (optional): ")
key = self._curl_bitmex("/apiKey",
postdict={"name": name, "cidr": cidr, "enabled": True})
print("Key created. Details:\n")
print("API Key: " + key["id"])
print("Secret: " + key["secret"])
print("\nSafeguard your secret key! If somebody gets a hold of your API key and secret,")
print("your account can be taken over completely.")
print("\nKey generation complete.")
def list_keys(self):
"""List your API Keys."""
keys = self._curl_bitmex("/apiKey/")
print(json.dumps(keys, sort_keys=True, indent=4))
def enable_key(self):
"""Enable an existing API Key."""
print("This command will enable a disabled key.")
apiKeyID = input("API Key ID: ")
try:
key = self._curl_bitmex("/apiKey/enable",
postdict={"apiKeyID": apiKeyID})
print("Key with ID %s enabled." % key["id"])
except:
print("Unable to enable key, please try again.")
self.enable_key()
def disable_key(self):
"""Disable an existing API Key."""
print("This command will disable a enabled key.")
apiKeyID = input("API Key ID: ")
try:
key = self._curl_bitmex("/apiKey/disable",
postdict={"apiKeyID": apiKeyID})
print("Key with ID %s disabled." % key["id"])
except:
print("Unable to disable key, please try again.")
self.disable_key()
def _curl_bitmex(self, api, query=None, postdict=None, timeout=3, verb=None):
url = self.base_url + api
if query:
url = url + "?" + urlencode(query)
if postdict:
postdata = urlencode(postdict).encode("utf-8")
request = Request(url, postdata)
else:
request = Request(url)
if verb:
request.get_method = lambda: verb
request.add_header('user-agent', 'BitMEX-generate-api-key')
if self.accessToken:
request.add_header('accessToken', self.accessToken)
try:
response = urlopen(request, timeout=timeout)
except HTTPError as e:
if e.code == 401:
print("Login information incorrect, please check and restart.")
exit(1)
# 503 - BitMEX temporary downtime, likely due to a deploy. Try again
elif e.code == 503:
print("Unable to contact the BitMEX API (503). Please try again later." +
"Request: %s \n %s" % (url, json.dumps(postdict)))
exit(1)
else:
print("Error:", e)
print("Endpoint was: " + api)
print("Please try again.")
raise e
except (URLError, ssl.SSLError) as e:
print("Unable to contact the BitMEX API (URLError). Please check the URL. Please try again later. " +
"Request: %s \n %s" % (url, json.dumps(postdict)))
exit(1)
return json.loads(response.read().decode("utf-8"))
|
joequant/cryptoexchange | cryptoexchange/bitmex.py | BitMEX.authenticate | python | def authenticate(self):
if self.apiKey:
return
loginResponse = self._curl_bitmex(
api="user/login",
postdict={'email': self.login, 'password': self.password, 'token': self.otpToken})
self.token = loginResponse['id']
self.session.headers.update({'access-token': self.token}) | Set BitMEX authentication information. | train | https://github.com/joequant/cryptoexchange/blob/6690fbd9a2ba00e40d7484425808c84d44233f0c/cryptoexchange/bitmex.py#L115-L123 | [
" def _curl_bitmex(self, api, query=None, postdict=None, timeout=3, verb=None):\n \"\"\"Send a request to BitMEX Servers.\"\"\"\n # Handle URL\n url = self.base_url + api\n\n # Default to POST if data is attached, GET otherwise\n if not verb:\n verb = 'POST' if postdict else 'GET'\n\n # Auth: Use Access Token by default, API Key/Secret if provided\n auth = AccessTokenAuth(self.token)\n if self.apiKey:\n auth = APIKeyAuthWithExpires(self.apiKey, self.apiSecret)\n\n # Make the request\n try:\n# url = \"http://httpbin.org/post\"\n req = requests.Request(verb, url, data=postdict, auth=auth, params=query)\n prepped = self.session.prepare_request(req)\n response = self.session.send(prepped, timeout=timeout)\n # Make non-200s throw\n response.raise_for_status()\n\n except requests.exceptions.HTTPError as e:\n # 401 - Auth error. Re-auth and re-run this request.\n if response.status_code == 401:\n if self.token is None:\n self.logger.error(\"Login information or API Key incorrect, please check and restart.\")\n self.logger.error(\"Error: \" + response.text)\n if postdict:\n self.logger.error(postdict)\n self.logger.warning(\"Token expired, reauthenticating...\")\n sleep(1)\n self.authenticate()\n return self._curl_bitmex(api, query, postdict, timeout, verb)\n\n # 404, can be thrown if order canceled does not exist.\n elif response.status_code == 404:\n if verb == 'DELETE':\n self.logger.error(\"Order not found: %s\" % postdict['orderID'])\n return\n self.logger.error(\"Unable to contact the BitMEX API (404). \" +\n \"Request: %s \\n %s\" % (url, json.dumps(postdict)))\n # 429, ratelimit\n elif response.status_code == 429:\n self.logger.error(\"Ratelimited on current request. Sleeping, then trying again. Try fewer \" +\n \"order pairs or contact support@bitmex.com to raise your limits. \" +\n \"Request: %s \\n %s\" % (url, json.dumps(postdict)))\n sleep(1)\n return self._curl_bitmex(api, query, postdict, timeout, verb)\n\n # 503 - BitMEX temporary downtime, likely due to a deploy. Try again\n elif response.status_code == 503:\n self.logger.warning(\"Unable to contact the BitMEX API (503), retrying. \" +\n \"Request: %s \\n %s\" % (url, json.dumps(postdict)))\n sleep(1)\n return self._curl_bitmex(api, query, postdict, timeout, verb)\n # Unknown Error\n else:\n self.logger.error(\"Unhandled Error: %s: %s %s\" % (e, response.text, json.dumps(response.json(), indent=4)))\n self.logger.error(\"Endpoint was: %s %s\" % (verb, api))\n except requests.exceptions.Timeout as e:\n # Timeout, re-run this request\n self.logger.warning(\"Timed out, retrying...\")\n return self._curl_bitmex(api, query, postdict, timeout, verb)\n\n except requests.exceptions.ConnectionError as e:\n self.logger.warning(\"Unable to contact the BitMEX API (ConnectionError). Please check the URL. Retrying. \" +\n \"Request: %s \\n %s\" % (url, json.dumps(postdict)))\n sleep(1)\n return self._curl_bitmex(api, query, postdict, timeout, verb)\n\n return response.json()\n"
] | class BitMEX(object):
"""BitMEX API Connector."""
def __init__(self, base_url=None, login=None, password=None, otpToken=None,
apiKey=None, apiSecret=None, orderIDPrefix='mm_bitmex_'):
"""Init connector."""
self.logger = logging.getLogger('root')
self.base_url = base_url
self.token = None
self.login = login
self.password = password
self.otpToken = otpToken
self.apiKey = apiKey
self.apiSecret = apiSecret
if len(orderIDPrefix) > 13:
raise ValueError("settings.ORDERID_PREFIX must be at most 13 characters long!")
self.orderIDPrefix = orderIDPrefix
# Prepare HTTPS session
self.session = requests.Session()
# These headers are always sent
self.session.headers.update({'user-agent': 'bitmex-robot'})
#
# Authentication required methods
#
def authentication_required(function):
"""Annotation for methods that require auth."""
def wrapped(self, *args, **kwargs):
if not (self.token or self.apiKey):
msg = "You must be authenticated to use this method"
raise AuthenticationError(msg)
else:
return function(self, *args, **kwargs)
return wrapped
@authentication_required
def position(self):
return self._curl_bitmex(api="position", verb="GET")
@authentication_required
def place_order(self, quantity, symbol, price):
"""Place an order."""
if price < 0:
raise Exception("Price must be positive.")
endpoint = "order"
# Generate a unique clOrdID with our prefix so we can identify it.
clOrdID = self.orderIDPrefix + base64.b64encode(uuid.uuid4().bytes).decode('ascii').rstrip('=\n')
postdict = {
'symbol': symbol,
'quantity': quantity,
'price': price,
'clOrdID': clOrdID
}
return self._curl_bitmex(api=endpoint, postdict=postdict, verb="POST")
@authentication_required
def open_orders(self, symbol=None):
"""Get open orders via HTTP. Used on close to ensure we catch them all."""
api = "order"
query = {'ordStatus.isTerminated': False }
if symbol != None:
query['symbol'] =symbol
orders = self._curl_bitmex(
api=api,
query={'filter': json.dumps(query)},
verb="GET"
)
return orders
# # Only return orders that start with our clOrdID prefix.
# return [o for o in orders if str(o['clOrdID']).startswith(self.orderIDPrefix)]
@authentication_required
def cancel(self, orderID):
"""Cancel an existing order."""
api = "order"
postdict = {
'orderID': orderID,
}
return self._curl_bitmex(api=api, postdict=postdict, verb="DELETE")
def _curl_bitmex(self, api, query=None, postdict=None, timeout=3, verb=None):
"""Send a request to BitMEX Servers."""
# Handle URL
url = self.base_url + api
# Default to POST if data is attached, GET otherwise
if not verb:
verb = 'POST' if postdict else 'GET'
# Auth: Use Access Token by default, API Key/Secret if provided
auth = AccessTokenAuth(self.token)
if self.apiKey:
auth = APIKeyAuthWithExpires(self.apiKey, self.apiSecret)
# Make the request
try:
# url = "http://httpbin.org/post"
req = requests.Request(verb, url, data=postdict, auth=auth, params=query)
prepped = self.session.prepare_request(req)
response = self.session.send(prepped, timeout=timeout)
# Make non-200s throw
response.raise_for_status()
except requests.exceptions.HTTPError as e:
# 401 - Auth error. Re-auth and re-run this request.
if response.status_code == 401:
if self.token is None:
self.logger.error("Login information or API Key incorrect, please check and restart.")
self.logger.error("Error: " + response.text)
if postdict:
self.logger.error(postdict)
self.logger.warning("Token expired, reauthenticating...")
sleep(1)
self.authenticate()
return self._curl_bitmex(api, query, postdict, timeout, verb)
# 404, can be thrown if order canceled does not exist.
elif response.status_code == 404:
if verb == 'DELETE':
self.logger.error("Order not found: %s" % postdict['orderID'])
return
self.logger.error("Unable to contact the BitMEX API (404). " +
"Request: %s \n %s" % (url, json.dumps(postdict)))
# 429, ratelimit
elif response.status_code == 429:
self.logger.error("Ratelimited on current request. Sleeping, then trying again. Try fewer " +
"order pairs or contact support@bitmex.com to raise your limits. " +
"Request: %s \n %s" % (url, json.dumps(postdict)))
sleep(1)
return self._curl_bitmex(api, query, postdict, timeout, verb)
# 503 - BitMEX temporary downtime, likely due to a deploy. Try again
elif response.status_code == 503:
self.logger.warning("Unable to contact the BitMEX API (503), retrying. " +
"Request: %s \n %s" % (url, json.dumps(postdict)))
sleep(1)
return self._curl_bitmex(api, query, postdict, timeout, verb)
# Unknown Error
else:
self.logger.error("Unhandled Error: %s: %s %s" % (e, response.text, json.dumps(response.json(), indent=4)))
self.logger.error("Endpoint was: %s %s" % (verb, api))
except requests.exceptions.Timeout as e:
# Timeout, re-run this request
self.logger.warning("Timed out, retrying...")
return self._curl_bitmex(api, query, postdict, timeout, verb)
except requests.exceptions.ConnectionError as e:
self.logger.warning("Unable to contact the BitMEX API (ConnectionError). Please check the URL. Retrying. " +
"Request: %s \n %s" % (url, json.dumps(postdict)))
sleep(1)
return self._curl_bitmex(api, query, postdict, timeout, verb)
return response.json()
|
joequant/cryptoexchange | cryptoexchange/bitmex.py | BitMEX.authentication_required | python | def authentication_required(function):
def wrapped(self, *args, **kwargs):
if not (self.token or self.apiKey):
msg = "You must be authenticated to use this method"
raise AuthenticationError(msg)
else:
return function(self, *args, **kwargs)
return wrapped | Annotation for methods that require auth. | train | https://github.com/joequant/cryptoexchange/blob/6690fbd9a2ba00e40d7484425808c84d44233f0c/cryptoexchange/bitmex.py#L125-L133 | null | class BitMEX(object):
"""BitMEX API Connector."""
def __init__(self, base_url=None, login=None, password=None, otpToken=None,
apiKey=None, apiSecret=None, orderIDPrefix='mm_bitmex_'):
"""Init connector."""
self.logger = logging.getLogger('root')
self.base_url = base_url
self.token = None
self.login = login
self.password = password
self.otpToken = otpToken
self.apiKey = apiKey
self.apiSecret = apiSecret
if len(orderIDPrefix) > 13:
raise ValueError("settings.ORDERID_PREFIX must be at most 13 characters long!")
self.orderIDPrefix = orderIDPrefix
# Prepare HTTPS session
self.session = requests.Session()
# These headers are always sent
self.session.headers.update({'user-agent': 'bitmex-robot'})
#
# Authentication required methods
#
def authenticate(self):
"""Set BitMEX authentication information."""
if self.apiKey:
return
loginResponse = self._curl_bitmex(
api="user/login",
postdict={'email': self.login, 'password': self.password, 'token': self.otpToken})
self.token = loginResponse['id']
self.session.headers.update({'access-token': self.token})
@authentication_required
def position(self):
return self._curl_bitmex(api="position", verb="GET")
@authentication_required
def place_order(self, quantity, symbol, price):
"""Place an order."""
if price < 0:
raise Exception("Price must be positive.")
endpoint = "order"
# Generate a unique clOrdID with our prefix so we can identify it.
clOrdID = self.orderIDPrefix + base64.b64encode(uuid.uuid4().bytes).decode('ascii').rstrip('=\n')
postdict = {
'symbol': symbol,
'quantity': quantity,
'price': price,
'clOrdID': clOrdID
}
return self._curl_bitmex(api=endpoint, postdict=postdict, verb="POST")
@authentication_required
def open_orders(self, symbol=None):
"""Get open orders via HTTP. Used on close to ensure we catch them all."""
api = "order"
query = {'ordStatus.isTerminated': False }
if symbol != None:
query['symbol'] =symbol
orders = self._curl_bitmex(
api=api,
query={'filter': json.dumps(query)},
verb="GET"
)
return orders
# # Only return orders that start with our clOrdID prefix.
# return [o for o in orders if str(o['clOrdID']).startswith(self.orderIDPrefix)]
@authentication_required
def cancel(self, orderID):
"""Cancel an existing order."""
api = "order"
postdict = {
'orderID': orderID,
}
return self._curl_bitmex(api=api, postdict=postdict, verb="DELETE")
def _curl_bitmex(self, api, query=None, postdict=None, timeout=3, verb=None):
"""Send a request to BitMEX Servers."""
# Handle URL
url = self.base_url + api
# Default to POST if data is attached, GET otherwise
if not verb:
verb = 'POST' if postdict else 'GET'
# Auth: Use Access Token by default, API Key/Secret if provided
auth = AccessTokenAuth(self.token)
if self.apiKey:
auth = APIKeyAuthWithExpires(self.apiKey, self.apiSecret)
# Make the request
try:
# url = "http://httpbin.org/post"
req = requests.Request(verb, url, data=postdict, auth=auth, params=query)
prepped = self.session.prepare_request(req)
response = self.session.send(prepped, timeout=timeout)
# Make non-200s throw
response.raise_for_status()
except requests.exceptions.HTTPError as e:
# 401 - Auth error. Re-auth and re-run this request.
if response.status_code == 401:
if self.token is None:
self.logger.error("Login information or API Key incorrect, please check and restart.")
self.logger.error("Error: " + response.text)
if postdict:
self.logger.error(postdict)
self.logger.warning("Token expired, reauthenticating...")
sleep(1)
self.authenticate()
return self._curl_bitmex(api, query, postdict, timeout, verb)
# 404, can be thrown if order canceled does not exist.
elif response.status_code == 404:
if verb == 'DELETE':
self.logger.error("Order not found: %s" % postdict['orderID'])
return
self.logger.error("Unable to contact the BitMEX API (404). " +
"Request: %s \n %s" % (url, json.dumps(postdict)))
# 429, ratelimit
elif response.status_code == 429:
self.logger.error("Ratelimited on current request. Sleeping, then trying again. Try fewer " +
"order pairs or contact support@bitmex.com to raise your limits. " +
"Request: %s \n %s" % (url, json.dumps(postdict)))
sleep(1)
return self._curl_bitmex(api, query, postdict, timeout, verb)
# 503 - BitMEX temporary downtime, likely due to a deploy. Try again
elif response.status_code == 503:
self.logger.warning("Unable to contact the BitMEX API (503), retrying. " +
"Request: %s \n %s" % (url, json.dumps(postdict)))
sleep(1)
return self._curl_bitmex(api, query, postdict, timeout, verb)
# Unknown Error
else:
self.logger.error("Unhandled Error: %s: %s %s" % (e, response.text, json.dumps(response.json(), indent=4)))
self.logger.error("Endpoint was: %s %s" % (verb, api))
except requests.exceptions.Timeout as e:
# Timeout, re-run this request
self.logger.warning("Timed out, retrying...")
return self._curl_bitmex(api, query, postdict, timeout, verb)
except requests.exceptions.ConnectionError as e:
self.logger.warning("Unable to contact the BitMEX API (ConnectionError). Please check the URL. Retrying. " +
"Request: %s \n %s" % (url, json.dumps(postdict)))
sleep(1)
return self._curl_bitmex(api, query, postdict, timeout, verb)
return response.json()
|
joequant/cryptoexchange | cryptoexchange/bitmex.py | BitMEX.place_order | python | def place_order(self, quantity, symbol, price):
if price < 0:
raise Exception("Price must be positive.")
endpoint = "order"
# Generate a unique clOrdID with our prefix so we can identify it.
clOrdID = self.orderIDPrefix + base64.b64encode(uuid.uuid4().bytes).decode('ascii').rstrip('=\n')
postdict = {
'symbol': symbol,
'quantity': quantity,
'price': price,
'clOrdID': clOrdID
}
return self._curl_bitmex(api=endpoint, postdict=postdict, verb="POST") | Place an order. | train | https://github.com/joequant/cryptoexchange/blob/6690fbd9a2ba00e40d7484425808c84d44233f0c/cryptoexchange/bitmex.py#L140-L154 | [
" def _curl_bitmex(self, api, query=None, postdict=None, timeout=3, verb=None):\n \"\"\"Send a request to BitMEX Servers.\"\"\"\n # Handle URL\n url = self.base_url + api\n\n # Default to POST if data is attached, GET otherwise\n if not verb:\n verb = 'POST' if postdict else 'GET'\n\n # Auth: Use Access Token by default, API Key/Secret if provided\n auth = AccessTokenAuth(self.token)\n if self.apiKey:\n auth = APIKeyAuthWithExpires(self.apiKey, self.apiSecret)\n\n # Make the request\n try:\n# url = \"http://httpbin.org/post\"\n req = requests.Request(verb, url, data=postdict, auth=auth, params=query)\n prepped = self.session.prepare_request(req)\n response = self.session.send(prepped, timeout=timeout)\n # Make non-200s throw\n response.raise_for_status()\n\n except requests.exceptions.HTTPError as e:\n # 401 - Auth error. Re-auth and re-run this request.\n if response.status_code == 401:\n if self.token is None:\n self.logger.error(\"Login information or API Key incorrect, please check and restart.\")\n self.logger.error(\"Error: \" + response.text)\n if postdict:\n self.logger.error(postdict)\n self.logger.warning(\"Token expired, reauthenticating...\")\n sleep(1)\n self.authenticate()\n return self._curl_bitmex(api, query, postdict, timeout, verb)\n\n # 404, can be thrown if order canceled does not exist.\n elif response.status_code == 404:\n if verb == 'DELETE':\n self.logger.error(\"Order not found: %s\" % postdict['orderID'])\n return\n self.logger.error(\"Unable to contact the BitMEX API (404). \" +\n \"Request: %s \\n %s\" % (url, json.dumps(postdict)))\n # 429, ratelimit\n elif response.status_code == 429:\n self.logger.error(\"Ratelimited on current request. Sleeping, then trying again. Try fewer \" +\n \"order pairs or contact support@bitmex.com to raise your limits. \" +\n \"Request: %s \\n %s\" % (url, json.dumps(postdict)))\n sleep(1)\n return self._curl_bitmex(api, query, postdict, timeout, verb)\n\n # 503 - BitMEX temporary downtime, likely due to a deploy. Try again\n elif response.status_code == 503:\n self.logger.warning(\"Unable to contact the BitMEX API (503), retrying. \" +\n \"Request: %s \\n %s\" % (url, json.dumps(postdict)))\n sleep(1)\n return self._curl_bitmex(api, query, postdict, timeout, verb)\n # Unknown Error\n else:\n self.logger.error(\"Unhandled Error: %s: %s %s\" % (e, response.text, json.dumps(response.json(), indent=4)))\n self.logger.error(\"Endpoint was: %s %s\" % (verb, api))\n except requests.exceptions.Timeout as e:\n # Timeout, re-run this request\n self.logger.warning(\"Timed out, retrying...\")\n return self._curl_bitmex(api, query, postdict, timeout, verb)\n\n except requests.exceptions.ConnectionError as e:\n self.logger.warning(\"Unable to contact the BitMEX API (ConnectionError). Please check the URL. Retrying. \" +\n \"Request: %s \\n %s\" % (url, json.dumps(postdict)))\n sleep(1)\n return self._curl_bitmex(api, query, postdict, timeout, verb)\n\n return response.json()\n"
] | class BitMEX(object):
"""BitMEX API Connector."""
def __init__(self, base_url=None, login=None, password=None, otpToken=None,
apiKey=None, apiSecret=None, orderIDPrefix='mm_bitmex_'):
"""Init connector."""
self.logger = logging.getLogger('root')
self.base_url = base_url
self.token = None
self.login = login
self.password = password
self.otpToken = otpToken
self.apiKey = apiKey
self.apiSecret = apiSecret
if len(orderIDPrefix) > 13:
raise ValueError("settings.ORDERID_PREFIX must be at most 13 characters long!")
self.orderIDPrefix = orderIDPrefix
# Prepare HTTPS session
self.session = requests.Session()
# These headers are always sent
self.session.headers.update({'user-agent': 'bitmex-robot'})
#
# Authentication required methods
#
def authenticate(self):
"""Set BitMEX authentication information."""
if self.apiKey:
return
loginResponse = self._curl_bitmex(
api="user/login",
postdict={'email': self.login, 'password': self.password, 'token': self.otpToken})
self.token = loginResponse['id']
self.session.headers.update({'access-token': self.token})
def authentication_required(function):
"""Annotation for methods that require auth."""
def wrapped(self, *args, **kwargs):
if not (self.token or self.apiKey):
msg = "You must be authenticated to use this method"
raise AuthenticationError(msg)
else:
return function(self, *args, **kwargs)
return wrapped
@authentication_required
def position(self):
return self._curl_bitmex(api="position", verb="GET")
@authentication_required
@authentication_required
def open_orders(self, symbol=None):
"""Get open orders via HTTP. Used on close to ensure we catch them all."""
api = "order"
query = {'ordStatus.isTerminated': False }
if symbol != None:
query['symbol'] =symbol
orders = self._curl_bitmex(
api=api,
query={'filter': json.dumps(query)},
verb="GET"
)
return orders
# # Only return orders that start with our clOrdID prefix.
# return [o for o in orders if str(o['clOrdID']).startswith(self.orderIDPrefix)]
@authentication_required
def cancel(self, orderID):
"""Cancel an existing order."""
api = "order"
postdict = {
'orderID': orderID,
}
return self._curl_bitmex(api=api, postdict=postdict, verb="DELETE")
def _curl_bitmex(self, api, query=None, postdict=None, timeout=3, verb=None):
"""Send a request to BitMEX Servers."""
# Handle URL
url = self.base_url + api
# Default to POST if data is attached, GET otherwise
if not verb:
verb = 'POST' if postdict else 'GET'
# Auth: Use Access Token by default, API Key/Secret if provided
auth = AccessTokenAuth(self.token)
if self.apiKey:
auth = APIKeyAuthWithExpires(self.apiKey, self.apiSecret)
# Make the request
try:
# url = "http://httpbin.org/post"
req = requests.Request(verb, url, data=postdict, auth=auth, params=query)
prepped = self.session.prepare_request(req)
response = self.session.send(prepped, timeout=timeout)
# Make non-200s throw
response.raise_for_status()
except requests.exceptions.HTTPError as e:
# 401 - Auth error. Re-auth and re-run this request.
if response.status_code == 401:
if self.token is None:
self.logger.error("Login information or API Key incorrect, please check and restart.")
self.logger.error("Error: " + response.text)
if postdict:
self.logger.error(postdict)
self.logger.warning("Token expired, reauthenticating...")
sleep(1)
self.authenticate()
return self._curl_bitmex(api, query, postdict, timeout, verb)
# 404, can be thrown if order canceled does not exist.
elif response.status_code == 404:
if verb == 'DELETE':
self.logger.error("Order not found: %s" % postdict['orderID'])
return
self.logger.error("Unable to contact the BitMEX API (404). " +
"Request: %s \n %s" % (url, json.dumps(postdict)))
# 429, ratelimit
elif response.status_code == 429:
self.logger.error("Ratelimited on current request. Sleeping, then trying again. Try fewer " +
"order pairs or contact support@bitmex.com to raise your limits. " +
"Request: %s \n %s" % (url, json.dumps(postdict)))
sleep(1)
return self._curl_bitmex(api, query, postdict, timeout, verb)
# 503 - BitMEX temporary downtime, likely due to a deploy. Try again
elif response.status_code == 503:
self.logger.warning("Unable to contact the BitMEX API (503), retrying. " +
"Request: %s \n %s" % (url, json.dumps(postdict)))
sleep(1)
return self._curl_bitmex(api, query, postdict, timeout, verb)
# Unknown Error
else:
self.logger.error("Unhandled Error: %s: %s %s" % (e, response.text, json.dumps(response.json(), indent=4)))
self.logger.error("Endpoint was: %s %s" % (verb, api))
except requests.exceptions.Timeout as e:
# Timeout, re-run this request
self.logger.warning("Timed out, retrying...")
return self._curl_bitmex(api, query, postdict, timeout, verb)
except requests.exceptions.ConnectionError as e:
self.logger.warning("Unable to contact the BitMEX API (ConnectionError). Please check the URL. Retrying. " +
"Request: %s \n %s" % (url, json.dumps(postdict)))
sleep(1)
return self._curl_bitmex(api, query, postdict, timeout, verb)
return response.json()
|
joequant/cryptoexchange | cryptoexchange/bitmex.py | BitMEX.open_orders | python | def open_orders(self, symbol=None):
api = "order"
query = {'ordStatus.isTerminated': False }
if symbol != None:
query['symbol'] =symbol
orders = self._curl_bitmex(
api=api,
query={'filter': json.dumps(query)},
verb="GET"
)
return orders | Get open orders via HTTP. Used on close to ensure we catch them all. | train | https://github.com/joequant/cryptoexchange/blob/6690fbd9a2ba00e40d7484425808c84d44233f0c/cryptoexchange/bitmex.py#L157-L168 | [
" def _curl_bitmex(self, api, query=None, postdict=None, timeout=3, verb=None):\n \"\"\"Send a request to BitMEX Servers.\"\"\"\n # Handle URL\n url = self.base_url + api\n\n # Default to POST if data is attached, GET otherwise\n if not verb:\n verb = 'POST' if postdict else 'GET'\n\n # Auth: Use Access Token by default, API Key/Secret if provided\n auth = AccessTokenAuth(self.token)\n if self.apiKey:\n auth = APIKeyAuthWithExpires(self.apiKey, self.apiSecret)\n\n # Make the request\n try:\n# url = \"http://httpbin.org/post\"\n req = requests.Request(verb, url, data=postdict, auth=auth, params=query)\n prepped = self.session.prepare_request(req)\n response = self.session.send(prepped, timeout=timeout)\n # Make non-200s throw\n response.raise_for_status()\n\n except requests.exceptions.HTTPError as e:\n # 401 - Auth error. Re-auth and re-run this request.\n if response.status_code == 401:\n if self.token is None:\n self.logger.error(\"Login information or API Key incorrect, please check and restart.\")\n self.logger.error(\"Error: \" + response.text)\n if postdict:\n self.logger.error(postdict)\n self.logger.warning(\"Token expired, reauthenticating...\")\n sleep(1)\n self.authenticate()\n return self._curl_bitmex(api, query, postdict, timeout, verb)\n\n # 404, can be thrown if order canceled does not exist.\n elif response.status_code == 404:\n if verb == 'DELETE':\n self.logger.error(\"Order not found: %s\" % postdict['orderID'])\n return\n self.logger.error(\"Unable to contact the BitMEX API (404). \" +\n \"Request: %s \\n %s\" % (url, json.dumps(postdict)))\n # 429, ratelimit\n elif response.status_code == 429:\n self.logger.error(\"Ratelimited on current request. Sleeping, then trying again. Try fewer \" +\n \"order pairs or contact support@bitmex.com to raise your limits. \" +\n \"Request: %s \\n %s\" % (url, json.dumps(postdict)))\n sleep(1)\n return self._curl_bitmex(api, query, postdict, timeout, verb)\n\n # 503 - BitMEX temporary downtime, likely due to a deploy. Try again\n elif response.status_code == 503:\n self.logger.warning(\"Unable to contact the BitMEX API (503), retrying. \" +\n \"Request: %s \\n %s\" % (url, json.dumps(postdict)))\n sleep(1)\n return self._curl_bitmex(api, query, postdict, timeout, verb)\n # Unknown Error\n else:\n self.logger.error(\"Unhandled Error: %s: %s %s\" % (e, response.text, json.dumps(response.json(), indent=4)))\n self.logger.error(\"Endpoint was: %s %s\" % (verb, api))\n except requests.exceptions.Timeout as e:\n # Timeout, re-run this request\n self.logger.warning(\"Timed out, retrying...\")\n return self._curl_bitmex(api, query, postdict, timeout, verb)\n\n except requests.exceptions.ConnectionError as e:\n self.logger.warning(\"Unable to contact the BitMEX API (ConnectionError). Please check the URL. Retrying. \" +\n \"Request: %s \\n %s\" % (url, json.dumps(postdict)))\n sleep(1)\n return self._curl_bitmex(api, query, postdict, timeout, verb)\n\n return response.json()\n"
] | class BitMEX(object):
"""BitMEX API Connector."""
def __init__(self, base_url=None, login=None, password=None, otpToken=None,
apiKey=None, apiSecret=None, orderIDPrefix='mm_bitmex_'):
"""Init connector."""
self.logger = logging.getLogger('root')
self.base_url = base_url
self.token = None
self.login = login
self.password = password
self.otpToken = otpToken
self.apiKey = apiKey
self.apiSecret = apiSecret
if len(orderIDPrefix) > 13:
raise ValueError("settings.ORDERID_PREFIX must be at most 13 characters long!")
self.orderIDPrefix = orderIDPrefix
# Prepare HTTPS session
self.session = requests.Session()
# These headers are always sent
self.session.headers.update({'user-agent': 'bitmex-robot'})
#
# Authentication required methods
#
def authenticate(self):
"""Set BitMEX authentication information."""
if self.apiKey:
return
loginResponse = self._curl_bitmex(
api="user/login",
postdict={'email': self.login, 'password': self.password, 'token': self.otpToken})
self.token = loginResponse['id']
self.session.headers.update({'access-token': self.token})
def authentication_required(function):
"""Annotation for methods that require auth."""
def wrapped(self, *args, **kwargs):
if not (self.token or self.apiKey):
msg = "You must be authenticated to use this method"
raise AuthenticationError(msg)
else:
return function(self, *args, **kwargs)
return wrapped
@authentication_required
def position(self):
return self._curl_bitmex(api="position", verb="GET")
@authentication_required
def place_order(self, quantity, symbol, price):
"""Place an order."""
if price < 0:
raise Exception("Price must be positive.")
endpoint = "order"
# Generate a unique clOrdID with our prefix so we can identify it.
clOrdID = self.orderIDPrefix + base64.b64encode(uuid.uuid4().bytes).decode('ascii').rstrip('=\n')
postdict = {
'symbol': symbol,
'quantity': quantity,
'price': price,
'clOrdID': clOrdID
}
return self._curl_bitmex(api=endpoint, postdict=postdict, verb="POST")
@authentication_required
# # Only return orders that start with our clOrdID prefix.
# return [o for o in orders if str(o['clOrdID']).startswith(self.orderIDPrefix)]
@authentication_required
def cancel(self, orderID):
"""Cancel an existing order."""
api = "order"
postdict = {
'orderID': orderID,
}
return self._curl_bitmex(api=api, postdict=postdict, verb="DELETE")
def _curl_bitmex(self, api, query=None, postdict=None, timeout=3, verb=None):
"""Send a request to BitMEX Servers."""
# Handle URL
url = self.base_url + api
# Default to POST if data is attached, GET otherwise
if not verb:
verb = 'POST' if postdict else 'GET'
# Auth: Use Access Token by default, API Key/Secret if provided
auth = AccessTokenAuth(self.token)
if self.apiKey:
auth = APIKeyAuthWithExpires(self.apiKey, self.apiSecret)
# Make the request
try:
# url = "http://httpbin.org/post"
req = requests.Request(verb, url, data=postdict, auth=auth, params=query)
prepped = self.session.prepare_request(req)
response = self.session.send(prepped, timeout=timeout)
# Make non-200s throw
response.raise_for_status()
except requests.exceptions.HTTPError as e:
# 401 - Auth error. Re-auth and re-run this request.
if response.status_code == 401:
if self.token is None:
self.logger.error("Login information or API Key incorrect, please check and restart.")
self.logger.error("Error: " + response.text)
if postdict:
self.logger.error(postdict)
self.logger.warning("Token expired, reauthenticating...")
sleep(1)
self.authenticate()
return self._curl_bitmex(api, query, postdict, timeout, verb)
# 404, can be thrown if order canceled does not exist.
elif response.status_code == 404:
if verb == 'DELETE':
self.logger.error("Order not found: %s" % postdict['orderID'])
return
self.logger.error("Unable to contact the BitMEX API (404). " +
"Request: %s \n %s" % (url, json.dumps(postdict)))
# 429, ratelimit
elif response.status_code == 429:
self.logger.error("Ratelimited on current request. Sleeping, then trying again. Try fewer " +
"order pairs or contact support@bitmex.com to raise your limits. " +
"Request: %s \n %s" % (url, json.dumps(postdict)))
sleep(1)
return self._curl_bitmex(api, query, postdict, timeout, verb)
# 503 - BitMEX temporary downtime, likely due to a deploy. Try again
elif response.status_code == 503:
self.logger.warning("Unable to contact the BitMEX API (503), retrying. " +
"Request: %s \n %s" % (url, json.dumps(postdict)))
sleep(1)
return self._curl_bitmex(api, query, postdict, timeout, verb)
# Unknown Error
else:
self.logger.error("Unhandled Error: %s: %s %s" % (e, response.text, json.dumps(response.json(), indent=4)))
self.logger.error("Endpoint was: %s %s" % (verb, api))
except requests.exceptions.Timeout as e:
# Timeout, re-run this request
self.logger.warning("Timed out, retrying...")
return self._curl_bitmex(api, query, postdict, timeout, verb)
except requests.exceptions.ConnectionError as e:
self.logger.warning("Unable to contact the BitMEX API (ConnectionError). Please check the URL. Retrying. " +
"Request: %s \n %s" % (url, json.dumps(postdict)))
sleep(1)
return self._curl_bitmex(api, query, postdict, timeout, verb)
return response.json()
|
joequant/cryptoexchange | cryptoexchange/bitmex.py | BitMEX.cancel | python | def cancel(self, orderID):
api = "order"
postdict = {
'orderID': orderID,
}
return self._curl_bitmex(api=api, postdict=postdict, verb="DELETE") | Cancel an existing order. | train | https://github.com/joequant/cryptoexchange/blob/6690fbd9a2ba00e40d7484425808c84d44233f0c/cryptoexchange/bitmex.py#L173-L179 | [
" def _curl_bitmex(self, api, query=None, postdict=None, timeout=3, verb=None):\n \"\"\"Send a request to BitMEX Servers.\"\"\"\n # Handle URL\n url = self.base_url + api\n\n # Default to POST if data is attached, GET otherwise\n if not verb:\n verb = 'POST' if postdict else 'GET'\n\n # Auth: Use Access Token by default, API Key/Secret if provided\n auth = AccessTokenAuth(self.token)\n if self.apiKey:\n auth = APIKeyAuthWithExpires(self.apiKey, self.apiSecret)\n\n # Make the request\n try:\n# url = \"http://httpbin.org/post\"\n req = requests.Request(verb, url, data=postdict, auth=auth, params=query)\n prepped = self.session.prepare_request(req)\n response = self.session.send(prepped, timeout=timeout)\n # Make non-200s throw\n response.raise_for_status()\n\n except requests.exceptions.HTTPError as e:\n # 401 - Auth error. Re-auth and re-run this request.\n if response.status_code == 401:\n if self.token is None:\n self.logger.error(\"Login information or API Key incorrect, please check and restart.\")\n self.logger.error(\"Error: \" + response.text)\n if postdict:\n self.logger.error(postdict)\n self.logger.warning(\"Token expired, reauthenticating...\")\n sleep(1)\n self.authenticate()\n return self._curl_bitmex(api, query, postdict, timeout, verb)\n\n # 404, can be thrown if order canceled does not exist.\n elif response.status_code == 404:\n if verb == 'DELETE':\n self.logger.error(\"Order not found: %s\" % postdict['orderID'])\n return\n self.logger.error(\"Unable to contact the BitMEX API (404). \" +\n \"Request: %s \\n %s\" % (url, json.dumps(postdict)))\n # 429, ratelimit\n elif response.status_code == 429:\n self.logger.error(\"Ratelimited on current request. Sleeping, then trying again. Try fewer \" +\n \"order pairs or contact support@bitmex.com to raise your limits. \" +\n \"Request: %s \\n %s\" % (url, json.dumps(postdict)))\n sleep(1)\n return self._curl_bitmex(api, query, postdict, timeout, verb)\n\n # 503 - BitMEX temporary downtime, likely due to a deploy. Try again\n elif response.status_code == 503:\n self.logger.warning(\"Unable to contact the BitMEX API (503), retrying. \" +\n \"Request: %s \\n %s\" % (url, json.dumps(postdict)))\n sleep(1)\n return self._curl_bitmex(api, query, postdict, timeout, verb)\n # Unknown Error\n else:\n self.logger.error(\"Unhandled Error: %s: %s %s\" % (e, response.text, json.dumps(response.json(), indent=4)))\n self.logger.error(\"Endpoint was: %s %s\" % (verb, api))\n except requests.exceptions.Timeout as e:\n # Timeout, re-run this request\n self.logger.warning(\"Timed out, retrying...\")\n return self._curl_bitmex(api, query, postdict, timeout, verb)\n\n except requests.exceptions.ConnectionError as e:\n self.logger.warning(\"Unable to contact the BitMEX API (ConnectionError). Please check the URL. Retrying. \" +\n \"Request: %s \\n %s\" % (url, json.dumps(postdict)))\n sleep(1)\n return self._curl_bitmex(api, query, postdict, timeout, verb)\n\n return response.json()\n"
] | class BitMEX(object):
"""BitMEX API Connector."""
def __init__(self, base_url=None, login=None, password=None, otpToken=None,
apiKey=None, apiSecret=None, orderIDPrefix='mm_bitmex_'):
"""Init connector."""
self.logger = logging.getLogger('root')
self.base_url = base_url
self.token = None
self.login = login
self.password = password
self.otpToken = otpToken
self.apiKey = apiKey
self.apiSecret = apiSecret
if len(orderIDPrefix) > 13:
raise ValueError("settings.ORDERID_PREFIX must be at most 13 characters long!")
self.orderIDPrefix = orderIDPrefix
# Prepare HTTPS session
self.session = requests.Session()
# These headers are always sent
self.session.headers.update({'user-agent': 'bitmex-robot'})
#
# Authentication required methods
#
def authenticate(self):
"""Set BitMEX authentication information."""
if self.apiKey:
return
loginResponse = self._curl_bitmex(
api="user/login",
postdict={'email': self.login, 'password': self.password, 'token': self.otpToken})
self.token = loginResponse['id']
self.session.headers.update({'access-token': self.token})
def authentication_required(function):
"""Annotation for methods that require auth."""
def wrapped(self, *args, **kwargs):
if not (self.token or self.apiKey):
msg = "You must be authenticated to use this method"
raise AuthenticationError(msg)
else:
return function(self, *args, **kwargs)
return wrapped
@authentication_required
def position(self):
return self._curl_bitmex(api="position", verb="GET")
@authentication_required
def place_order(self, quantity, symbol, price):
"""Place an order."""
if price < 0:
raise Exception("Price must be positive.")
endpoint = "order"
# Generate a unique clOrdID with our prefix so we can identify it.
clOrdID = self.orderIDPrefix + base64.b64encode(uuid.uuid4().bytes).decode('ascii').rstrip('=\n')
postdict = {
'symbol': symbol,
'quantity': quantity,
'price': price,
'clOrdID': clOrdID
}
return self._curl_bitmex(api=endpoint, postdict=postdict, verb="POST")
@authentication_required
def open_orders(self, symbol=None):
"""Get open orders via HTTP. Used on close to ensure we catch them all."""
api = "order"
query = {'ordStatus.isTerminated': False }
if symbol != None:
query['symbol'] =symbol
orders = self._curl_bitmex(
api=api,
query={'filter': json.dumps(query)},
verb="GET"
)
return orders
# # Only return orders that start with our clOrdID prefix.
# return [o for o in orders if str(o['clOrdID']).startswith(self.orderIDPrefix)]
@authentication_required
def _curl_bitmex(self, api, query=None, postdict=None, timeout=3, verb=None):
"""Send a request to BitMEX Servers."""
# Handle URL
url = self.base_url + api
# Default to POST if data is attached, GET otherwise
if not verb:
verb = 'POST' if postdict else 'GET'
# Auth: Use Access Token by default, API Key/Secret if provided
auth = AccessTokenAuth(self.token)
if self.apiKey:
auth = APIKeyAuthWithExpires(self.apiKey, self.apiSecret)
# Make the request
try:
# url = "http://httpbin.org/post"
req = requests.Request(verb, url, data=postdict, auth=auth, params=query)
prepped = self.session.prepare_request(req)
response = self.session.send(prepped, timeout=timeout)
# Make non-200s throw
response.raise_for_status()
except requests.exceptions.HTTPError as e:
# 401 - Auth error. Re-auth and re-run this request.
if response.status_code == 401:
if self.token is None:
self.logger.error("Login information or API Key incorrect, please check and restart.")
self.logger.error("Error: " + response.text)
if postdict:
self.logger.error(postdict)
self.logger.warning("Token expired, reauthenticating...")
sleep(1)
self.authenticate()
return self._curl_bitmex(api, query, postdict, timeout, verb)
# 404, can be thrown if order canceled does not exist.
elif response.status_code == 404:
if verb == 'DELETE':
self.logger.error("Order not found: %s" % postdict['orderID'])
return
self.logger.error("Unable to contact the BitMEX API (404). " +
"Request: %s \n %s" % (url, json.dumps(postdict)))
# 429, ratelimit
elif response.status_code == 429:
self.logger.error("Ratelimited on current request. Sleeping, then trying again. Try fewer " +
"order pairs or contact support@bitmex.com to raise your limits. " +
"Request: %s \n %s" % (url, json.dumps(postdict)))
sleep(1)
return self._curl_bitmex(api, query, postdict, timeout, verb)
# 503 - BitMEX temporary downtime, likely due to a deploy. Try again
elif response.status_code == 503:
self.logger.warning("Unable to contact the BitMEX API (503), retrying. " +
"Request: %s \n %s" % (url, json.dumps(postdict)))
sleep(1)
return self._curl_bitmex(api, query, postdict, timeout, verb)
# Unknown Error
else:
self.logger.error("Unhandled Error: %s: %s %s" % (e, response.text, json.dumps(response.json(), indent=4)))
self.logger.error("Endpoint was: %s %s" % (verb, api))
except requests.exceptions.Timeout as e:
# Timeout, re-run this request
self.logger.warning("Timed out, retrying...")
return self._curl_bitmex(api, query, postdict, timeout, verb)
except requests.exceptions.ConnectionError as e:
self.logger.warning("Unable to contact the BitMEX API (ConnectionError). Please check the URL. Retrying. " +
"Request: %s \n %s" % (url, json.dumps(postdict)))
sleep(1)
return self._curl_bitmex(api, query, postdict, timeout, verb)
return response.json()
|
joequant/cryptoexchange | cryptoexchange/bitmex.py | BitMEX._curl_bitmex | python | def _curl_bitmex(self, api, query=None, postdict=None, timeout=3, verb=None):
# Handle URL
url = self.base_url + api
# Default to POST if data is attached, GET otherwise
if not verb:
verb = 'POST' if postdict else 'GET'
# Auth: Use Access Token by default, API Key/Secret if provided
auth = AccessTokenAuth(self.token)
if self.apiKey:
auth = APIKeyAuthWithExpires(self.apiKey, self.apiSecret)
# Make the request
try:
# url = "http://httpbin.org/post"
req = requests.Request(verb, url, data=postdict, auth=auth, params=query)
prepped = self.session.prepare_request(req)
response = self.session.send(prepped, timeout=timeout)
# Make non-200s throw
response.raise_for_status()
except requests.exceptions.HTTPError as e:
# 401 - Auth error. Re-auth and re-run this request.
if response.status_code == 401:
if self.token is None:
self.logger.error("Login information or API Key incorrect, please check and restart.")
self.logger.error("Error: " + response.text)
if postdict:
self.logger.error(postdict)
self.logger.warning("Token expired, reauthenticating...")
sleep(1)
self.authenticate()
return self._curl_bitmex(api, query, postdict, timeout, verb)
# 404, can be thrown if order canceled does not exist.
elif response.status_code == 404:
if verb == 'DELETE':
self.logger.error("Order not found: %s" % postdict['orderID'])
return
self.logger.error("Unable to contact the BitMEX API (404). " +
"Request: %s \n %s" % (url, json.dumps(postdict)))
# 429, ratelimit
elif response.status_code == 429:
self.logger.error("Ratelimited on current request. Sleeping, then trying again. Try fewer " +
"order pairs or contact support@bitmex.com to raise your limits. " +
"Request: %s \n %s" % (url, json.dumps(postdict)))
sleep(1)
return self._curl_bitmex(api, query, postdict, timeout, verb)
# 503 - BitMEX temporary downtime, likely due to a deploy. Try again
elif response.status_code == 503:
self.logger.warning("Unable to contact the BitMEX API (503), retrying. " +
"Request: %s \n %s" % (url, json.dumps(postdict)))
sleep(1)
return self._curl_bitmex(api, query, postdict, timeout, verb)
# Unknown Error
else:
self.logger.error("Unhandled Error: %s: %s %s" % (e, response.text, json.dumps(response.json(), indent=4)))
self.logger.error("Endpoint was: %s %s" % (verb, api))
except requests.exceptions.Timeout as e:
# Timeout, re-run this request
self.logger.warning("Timed out, retrying...")
return self._curl_bitmex(api, query, postdict, timeout, verb)
except requests.exceptions.ConnectionError as e:
self.logger.warning("Unable to contact the BitMEX API (ConnectionError). Please check the URL. Retrying. " +
"Request: %s \n %s" % (url, json.dumps(postdict)))
sleep(1)
return self._curl_bitmex(api, query, postdict, timeout, verb)
return response.json() | Send a request to BitMEX Servers. | train | https://github.com/joequant/cryptoexchange/blob/6690fbd9a2ba00e40d7484425808c84d44233f0c/cryptoexchange/bitmex.py#L181-L253 | [
" def _curl_bitmex(self, api, query=None, postdict=None, timeout=3, verb=None):\n \"\"\"Send a request to BitMEX Servers.\"\"\"\n # Handle URL\n url = self.base_url + api\n\n # Default to POST if data is attached, GET otherwise\n if not verb:\n verb = 'POST' if postdict else 'GET'\n\n # Auth: Use Access Token by default, API Key/Secret if provided\n auth = AccessTokenAuth(self.token)\n if self.apiKey:\n auth = APIKeyAuthWithExpires(self.apiKey, self.apiSecret)\n\n # Make the request\n try:\n# url = \"http://httpbin.org/post\"\n req = requests.Request(verb, url, data=postdict, auth=auth, params=query)\n prepped = self.session.prepare_request(req)\n response = self.session.send(prepped, timeout=timeout)\n # Make non-200s throw\n response.raise_for_status()\n\n except requests.exceptions.HTTPError as e:\n # 401 - Auth error. Re-auth and re-run this request.\n if response.status_code == 401:\n if self.token is None:\n self.logger.error(\"Login information or API Key incorrect, please check and restart.\")\n self.logger.error(\"Error: \" + response.text)\n if postdict:\n self.logger.error(postdict)\n self.logger.warning(\"Token expired, reauthenticating...\")\n sleep(1)\n self.authenticate()\n return self._curl_bitmex(api, query, postdict, timeout, verb)\n\n # 404, can be thrown if order canceled does not exist.\n elif response.status_code == 404:\n if verb == 'DELETE':\n self.logger.error(\"Order not found: %s\" % postdict['orderID'])\n return\n self.logger.error(\"Unable to contact the BitMEX API (404). \" +\n \"Request: %s \\n %s\" % (url, json.dumps(postdict)))\n # 429, ratelimit\n elif response.status_code == 429:\n self.logger.error(\"Ratelimited on current request. Sleeping, then trying again. Try fewer \" +\n \"order pairs or contact support@bitmex.com to raise your limits. \" +\n \"Request: %s \\n %s\" % (url, json.dumps(postdict)))\n sleep(1)\n return self._curl_bitmex(api, query, postdict, timeout, verb)\n\n # 503 - BitMEX temporary downtime, likely due to a deploy. Try again\n elif response.status_code == 503:\n self.logger.warning(\"Unable to contact the BitMEX API (503), retrying. \" +\n \"Request: %s \\n %s\" % (url, json.dumps(postdict)))\n sleep(1)\n return self._curl_bitmex(api, query, postdict, timeout, verb)\n # Unknown Error\n else:\n self.logger.error(\"Unhandled Error: %s: %s %s\" % (e, response.text, json.dumps(response.json(), indent=4)))\n self.logger.error(\"Endpoint was: %s %s\" % (verb, api))\n except requests.exceptions.Timeout as e:\n # Timeout, re-run this request\n self.logger.warning(\"Timed out, retrying...\")\n return self._curl_bitmex(api, query, postdict, timeout, verb)\n\n except requests.exceptions.ConnectionError as e:\n self.logger.warning(\"Unable to contact the BitMEX API (ConnectionError). Please check the URL. Retrying. \" +\n \"Request: %s \\n %s\" % (url, json.dumps(postdict)))\n sleep(1)\n return self._curl_bitmex(api, query, postdict, timeout, verb)\n\n return response.json()\n",
"def authenticate(self):\n \"\"\"Set BitMEX authentication information.\"\"\"\n if self.apiKey:\n return\n loginResponse = self._curl_bitmex(\n api=\"user/login\",\n postdict={'email': self.login, 'password': self.password, 'token': self.otpToken})\n self.token = loginResponse['id']\n self.session.headers.update({'access-token': self.token})\n"
] | class BitMEX(object):
"""BitMEX API Connector."""
def __init__(self, base_url=None, login=None, password=None, otpToken=None,
apiKey=None, apiSecret=None, orderIDPrefix='mm_bitmex_'):
"""Init connector."""
self.logger = logging.getLogger('root')
self.base_url = base_url
self.token = None
self.login = login
self.password = password
self.otpToken = otpToken
self.apiKey = apiKey
self.apiSecret = apiSecret
if len(orderIDPrefix) > 13:
raise ValueError("settings.ORDERID_PREFIX must be at most 13 characters long!")
self.orderIDPrefix = orderIDPrefix
# Prepare HTTPS session
self.session = requests.Session()
# These headers are always sent
self.session.headers.update({'user-agent': 'bitmex-robot'})
#
# Authentication required methods
#
def authenticate(self):
"""Set BitMEX authentication information."""
if self.apiKey:
return
loginResponse = self._curl_bitmex(
api="user/login",
postdict={'email': self.login, 'password': self.password, 'token': self.otpToken})
self.token = loginResponse['id']
self.session.headers.update({'access-token': self.token})
def authentication_required(function):
"""Annotation for methods that require auth."""
def wrapped(self, *args, **kwargs):
if not (self.token or self.apiKey):
msg = "You must be authenticated to use this method"
raise AuthenticationError(msg)
else:
return function(self, *args, **kwargs)
return wrapped
@authentication_required
def position(self):
return self._curl_bitmex(api="position", verb="GET")
@authentication_required
def place_order(self, quantity, symbol, price):
"""Place an order."""
if price < 0:
raise Exception("Price must be positive.")
endpoint = "order"
# Generate a unique clOrdID with our prefix so we can identify it.
clOrdID = self.orderIDPrefix + base64.b64encode(uuid.uuid4().bytes).decode('ascii').rstrip('=\n')
postdict = {
'symbol': symbol,
'quantity': quantity,
'price': price,
'clOrdID': clOrdID
}
return self._curl_bitmex(api=endpoint, postdict=postdict, verb="POST")
@authentication_required
def open_orders(self, symbol=None):
"""Get open orders via HTTP. Used on close to ensure we catch them all."""
api = "order"
query = {'ordStatus.isTerminated': False }
if symbol != None:
query['symbol'] =symbol
orders = self._curl_bitmex(
api=api,
query={'filter': json.dumps(query)},
verb="GET"
)
return orders
# # Only return orders that start with our clOrdID prefix.
# return [o for o in orders if str(o['clOrdID']).startswith(self.orderIDPrefix)]
@authentication_required
def cancel(self, orderID):
"""Cancel an existing order."""
api = "order"
postdict = {
'orderID': orderID,
}
return self._curl_bitmex(api=api, postdict=postdict, verb="DELETE")
|
joequant/cryptoexchange | cryptoexchange/api796.py | getUserInfoError | python | def getUserInfoError(sAccessToken):
import urllib.request, urllib.parse, urllib.error
payload = urllib.parse.urlencode({'access_token': sAccessToken})
c = http.client.HTTPSConnection('796.com')
c.request("GET", "/v1/user/get_info?"+payload)
r = c.getresponse()
data = r.read()
jsonDict = json.loads(data.decode('utf-8'));
print(jsonDict) | May be return {u'msg': u'Access_token repealed', u'errno': u'-102', u'data': []} | train | https://github.com/joequant/cryptoexchange/blob/6690fbd9a2ba00e40d7484425808c84d44233f0c/cryptoexchange/api796.py#L69-L80 | null | #!/usr/bin/env python3
#-*- coding:utf-8 -*-
"""
796 API Trading Example/DEMO in Python
After getToken.
"""
import urllib.request, urllib.error, urllib.parse
import time
import base64
import hashlib
import hmac
import http.client
import json
import os
def get_796_token(appid,apikey,secretkey):
timestamp = time.time()#"1414142919" #time.time()
params = {"apikey": apikey, "appid": appid, "secretkey": secretkey, "timestamp": str(timestamp)}
params = sorted(iter(params.items()), key=lambda d: d[0], reverse=False)
message = urllib.parse.urlencode(params)
print("secretkey=",secretkey)
print("message=",message)
s = hmac.new(secretkey.encode('utf-8'),
message.encode('utf-8'),
digestmod=hashlib.sha1).hexdigest()
print("hex=",s)
sig = base64.b64encode(s.encode('utf-8'))
print("sig=",sig)
payload = urllib.parse.urlencode({'appid': appid, 'apikey': apikey, 'timestamp': timestamp, 'sig': sig})
c = http.client.HTTPSConnection('796.com')
c.request("GET", "/oauth/token?"+payload)
r = c.getresponse()
if r.status == 200:
data = r.read()
jsonDict = json.loads(data.decode('utf-8'));
errno = jsonDict['errno']
if errno=="0":
return jsonDict['data']['access_token']
return None
def getUserInfo(sAccessToken):
sUrl = "/v1/user/get_info?access_token=%s" % (sAccessToken)
c = http.client.HTTPSConnection('796.com')
c.request("GET", sUrl)
r = c.getresponse()
print("r.status=",r.status)
print(r.read())
def getUserInfo1(sAccessToken):
sUrl = "https://796.com/v1/user/get_info?access_token=%s" % (sAccessToken)
response = urllib.request.urlopen(sUrl)
print(response.read())
def getUserInfo2(sAccessToken):
import requests
sUrl = "https://796.com/v1/user/get_info?access_token=%s" % (sAccessToken)
response = requests.get(sUrl, timeout=20)
print(response.content)
def testHMacSHA(secretkey,message):
print("secretkey=",secretkey)
print("message=",message)
s = hmac.new(secretkey, message.encode('utf-8'),
digestmod=hashlib.sha1).hexdigest()
print("hex=",s)
if __name__ == "__main__":
app_id = os.environ.get("APP_ID_796", None)
api_key = os.environ.get("API_KEY_796", None)
api_secret = os.environ.get("API_SECRET_796", None)
testHMacSHA(b"HF94bR940e1d9YZwfgickG5HR07SFJQGscgO+E3vFPQGwSzyGtUQLxIh6blv",
"apikey=5999a1ce-4312-8a3c-75a5-327c-f5cf5251&appid=11040&secretkey=HF94bR940e1d9YZwfgickG5HR07SFJQGscgO%2BE3vFPQGwSzyGtUQLxIh6blv×tamp=1414142919")
access_token = get_796_token(appid = app_id,
apikey=api_key,
secretkey=api_secret)
print("access_token=",access_token)
getUserInfo(access_token)
getUserInfo1(access_token)
getUserInfo2(access_token)
getUserInfoError(access_token)
|
joequant/cryptoexchange | cryptoexchange/bitmex_ws.py | generate_signature | python | def generate_signature(secret, verb, url, nonce, data):
# Parse the url so we can remove the base and extract just the path.
parsedURL = urllib.parse.urlparse(url)
path = parsedURL.path
if parsedURL.query:
path = path + '?' + parsedURL.query
# print "Computing HMAC: %s" % verb + path + str(nonce) + data
message = bytes(verb + path + str(nonce) + data, 'utf-8')
signature = hmac.new(secret.encode('utf-8'),
message,
digestmod=hashlib.sha256).hexdigest()
return signature | Generate a request signature compatible with BitMEX. | train | https://github.com/joequant/cryptoexchange/blob/6690fbd9a2ba00e40d7484425808c84d44233f0c/cryptoexchange/bitmex_ws.py#L31-L45 | null | #!/usr/bin/env python3
import sys
import websocket
import threading
import traceback
from time import sleep
import json
import string
import logging
import urllib.parse
import math
import time
import hmac
import hashlib
def generate_nonce():
return int(round(time.time() * 1000))
# Generates an API signature.
# A signature is HMAC_SHA256(secret, verb + path + nonce + data), hex encoded.
# Verb must be uppercased, url is relative, nonce must be an increasing 64-bit integer
# and the data, if present, must be JSON without whitespace between keys.
#
# For example, in psuedocode (and in real code below):
#
# verb=POST
# url=/api/v1/order
# nonce=1416993995705
# data={"symbol":"XBTZ14","quantity":1,"price":395.01}
# signature = HEX(HMAC_SHA256(secret, 'POST/api/v1/order1416993995705{"symbol":"XBTZ14","quantity":1,"price":395.01}'))
# Naive implementation of connecting to BitMEX websocket for streaming realtime data.
# The Marketmaker still interacts with this as if it were a REST Endpoint, but now it can get
# much more realtime data without polling the hell out of the API.
#
# The Websocket offers a bunch of data as raw properties right on the object.
# On connect, it synchronously asks for a push of all this data then returns.
# Right after, the MM can start using its data. It will be updated in realtime, so the MM can
# poll really often if it wants.
class BitMEXWebsocket():
def __init__(self, endpoint="", symbol="XBU24H", API_KEY=None, API_SECRET=None, LOGIN=None, PASSWORD=None):
'''Connect to the websocket and initialize data stores.'''
self.logger = logging.getLogger('root')
self.logger.debug("Initializing WebSocket.")
self.endpoint = endpoint
self.api_key = API_KEY
self.api_secret = API_SECRET
self.login = LOGIN
self.password = PASSWORD
self.data = {}
self.keys = {}
# We can subscribe right in the connection querystring, so let's build that.
# Subscribe to all pertinent endpoints
wsURL = self.__get_url(symbol)
self.logger.info("Connecting to %s" % wsURL)
self.__connect(wsURL, symbol)
self.logger.info('Connected to WS.')
# Connected. Push symbols
self.__push_account()
self.__push_symbol(symbol)
self.logger.info('Got all market data. Starting.')
def exit(self):
self.exited = True
self.ws.close()
def get_instrument(self):
# Turn the 'tickSize' into 'tickLog' for use in rounding
instrument = self.data['instrument'][0]
instrument['tickLog'] = int(math.fabs(math.log10(instrument['tickSize'])))
return instrument
def get_ticker(self):
'''Return a ticker object. Generated from quote and trade.'''
lastQuote = self.data['quote'][-1]
lastTrade = self.data['trade'][-1]
ticker = {
"last": lastTrade['price'],
"buy": lastQuote['bidPrice'],
"sell": lastQuote['askPrice'],
"mid": (float(lastQuote['bidPrice'] or 0) + float(lastQuote['askPrice'] or 0)) / 2
}
# The instrument has a tickSize. Use it to round values.
instrument = self.data['instrument'][0]
return {k: round(float(v or 0), instrument['tickLog']) for k, v in list(ticker.items())}
def funds(self):
return self.data['margin'][0]
def market_depth(self):
return self.data['orderBook25']
def open_orders(self, clOrdIDPrefix):
orders = self.data['order']
# Filter to only open orders (leavesQty > 0) and those that we actually placed
return [o for o in orders if str(o['clOrdID']).startswith(clOrdIDPrefix) and o['leavesQty'] > 0]
def recent_trades(self):
return self.data['trade']
def __connect(self, wsURL, symbol):
'''Connect to the websocket in a thread.'''
self.logger.debug("Starting thread")
self.ws = websocket.WebSocketApp(wsURL,
on_message=self.__on_message,
on_close=self.__on_close,
on_open=self.__on_open,
on_error=self.__on_error,
# We can login using email/pass or API key
header=self.__get_auth())
self.wst = threading.Thread(target=lambda: self.ws.run_forever())
self.wst.daemon = True
self.wst.start()
self.logger.debug("Started thread")
# Wait for connect before continuing
conn_timeout = 5
while not self.ws.sock or not self.ws.sock.connected and conn_timeout:
sleep(1)
conn_timeout -= 1
if not conn_timeout:
self.logger.error("Couldn't connect to WS! Exiting.")
self.exit()
sys.exit(1)
def __get_auth(self):
'''Return auth headers. Will use API Keys if present in settings.'''
if self.api_key == None and self.login == None:
self.logger.error("No authentication provided! Unable to connect.")
sys.exit(1)
if self.api_key == None:
self.logger.info("Authenticating with email/password.")
return [
"email: " + self.login,
"password: " + self.password
]
else:
self.logger.info("Authenticating with API Key.")
# To auth to the WS using an API key, we generate a signature of a nonce and
# the WS API endpoint.
nonce = generate_nonce()
return [
"api-nonce: " + str(nonce),
"api-signature: " + generate_signature(self.api_secret, 'GET', '/realtime', nonce, ''),
"api-key:" + self.api_key
]
def __get_url(self, symbol):
subscriptions = [sub + ':' + symbol for sub in ["order", "execution", "position", "quote", "trade"]]
subscriptions += ["margin"]
urlParts = list(urllib.parse.urlparse(self.endpoint))
urlParts[0] = urlParts[0].replace('http', 'ws')
urlParts[2] = "/realtime?subscribe=" + ",".join(subscriptions)
return urllib.parse.urlunparse(urlParts)
def __push_account(self):
'''Ask the websocket for an account push. Gets margin, positions, and open orders'''
self.__send_command("getAccount")
# Wait for the keys to show up from the ws
while not {'margin', 'position', 'order'} <= set(self.data):
sleep(0.1)
def __push_symbol(self, symbol):
'''Ask the websocket for a symbol push. Gets instrument, orderBook, quote, and trade'''
self.__send_command("getSymbol", symbol)
while not {'instrument', 'trade', 'orderBook25'} <= set(self.data):
sleep(0.1)
def __send_command(self, command, args=[]):
'''Send a raw command.'''
self.ws.send(json.dumps({"op": command, "args": args}))
def __on_message(self, ws, message):
'''Handler for parsing WS messages.'''
message = json.loads(message)
self.logger.debug(json.dumps(message))
table = message['table'] if 'table' in message else None
action = message['action'] if 'action' in message else None
try:
if 'subscribe' in message:
self.logger.debug("Subscribed to %s." % message['subscribe'])
elif action:
if table not in self.data:
self.data[table] = []
# There are four possible actions from the WS:
# 'partial' - full table image
# 'insert' - new row
# 'update' - update row
# 'delete' - delete row
if action == 'partial':
self.logger.debug("%s: partial" % table)
self.data[table] += message['data']
# Keys are communicated on partials to let you know how to uniquely identify
# an item. We use it for updates.
self.keys[table] = message['keys']
elif action == 'insert':
self.logger.debug('%s: inserting %s' % (table, message['data']))
self.data[table] += message['data']
elif action == 'update':
self.logger.debug('%s: updating %s' % (table, message['data']))
# Locate the item in the collection and update it.
for updateData in message['data']:
item = findItemByKeys(self.keys[table], self.data[table], updateData)
if not item:
return # No item found to update. Could happen before push
item.update(updateData)
# Remove cancelled / filled orders
if table == 'order' and item['leavesQty'] <= 0:
self.data[table].remove(item)
elif action == 'delete':
self.logger.debug('%s: deleting %s' % (table, message['data']))
# Locate the item in the collection and remove it.
for deleteData in message['data']:
item = findItemByKeys(self.keys[table], self.data[table], deleteData)
self.data[table].remove(item)
else:
raise Exception("Unknown action: %s" % action)
except:
self.logger.error(traceback.format_exc())
def __on_error(self, ws, error):
if not self.exited:
self.logger.error("Error : %s" % error)
sys.exit(1)
def __on_open(self, ws):
self.logger.debug("Websocket Opened.")
def __on_close(self, ws):
self.logger.info('Websocket Closed')
sys.exit(1)
def findItemByKeys(keys, table, matchData):
for item in table:
matched = True
for key in keys:
if item[key] != matchData[key]:
matched = False
if matched:
return item
if __name__ == "__main__":
# create console handler and set level to debug
logger = logging.getLogger()
logger.setLevel(logging.DEBUG)
ch = logging.StreamHandler()
# create formatter
formatter = logging.Formatter("%(asctime)s - %(name)s - %(levelname)s - %(message)s")
# add formatter to ch
ch.setFormatter(formatter)
logger.addHandler(ch)
ws = BitMEXWebsocket("https://testnet.bitmex.com/api/v1",
API_KEY="FOO",
API_SECRET="BAR")
while(ws.ws.sock.connected):
sleep(1)
|
joequant/cryptoexchange | cryptoexchange/bitmex_ws.py | BitMEXWebsocket.get_ticker | python | def get_ticker(self):
'''Return a ticker object. Generated from quote and trade.'''
lastQuote = self.data['quote'][-1]
lastTrade = self.data['trade'][-1]
ticker = {
"last": lastTrade['price'],
"buy": lastQuote['bidPrice'],
"sell": lastQuote['askPrice'],
"mid": (float(lastQuote['bidPrice'] or 0) + float(lastQuote['askPrice'] or 0)) / 2
}
# The instrument has a tickSize. Use it to round values.
instrument = self.data['instrument'][0]
return {k: round(float(v or 0), instrument['tickLog']) for k, v in list(ticker.items())} | Return a ticker object. Generated from quote and trade. | train | https://github.com/joequant/cryptoexchange/blob/6690fbd9a2ba00e40d7484425808c84d44233f0c/cryptoexchange/bitmex_ws.py#L92-L105 | null | class BitMEXWebsocket():
def __init__(self, endpoint="", symbol="XBU24H", API_KEY=None, API_SECRET=None, LOGIN=None, PASSWORD=None):
'''Connect to the websocket and initialize data stores.'''
self.logger = logging.getLogger('root')
self.logger.debug("Initializing WebSocket.")
self.endpoint = endpoint
self.api_key = API_KEY
self.api_secret = API_SECRET
self.login = LOGIN
self.password = PASSWORD
self.data = {}
self.keys = {}
# We can subscribe right in the connection querystring, so let's build that.
# Subscribe to all pertinent endpoints
wsURL = self.__get_url(symbol)
self.logger.info("Connecting to %s" % wsURL)
self.__connect(wsURL, symbol)
self.logger.info('Connected to WS.')
# Connected. Push symbols
self.__push_account()
self.__push_symbol(symbol)
self.logger.info('Got all market data. Starting.')
def exit(self):
self.exited = True
self.ws.close()
def get_instrument(self):
# Turn the 'tickSize' into 'tickLog' for use in rounding
instrument = self.data['instrument'][0]
instrument['tickLog'] = int(math.fabs(math.log10(instrument['tickSize'])))
return instrument
def funds(self):
return self.data['margin'][0]
def market_depth(self):
return self.data['orderBook25']
def open_orders(self, clOrdIDPrefix):
orders = self.data['order']
# Filter to only open orders (leavesQty > 0) and those that we actually placed
return [o for o in orders if str(o['clOrdID']).startswith(clOrdIDPrefix) and o['leavesQty'] > 0]
def recent_trades(self):
return self.data['trade']
def __connect(self, wsURL, symbol):
'''Connect to the websocket in a thread.'''
self.logger.debug("Starting thread")
self.ws = websocket.WebSocketApp(wsURL,
on_message=self.__on_message,
on_close=self.__on_close,
on_open=self.__on_open,
on_error=self.__on_error,
# We can login using email/pass or API key
header=self.__get_auth())
self.wst = threading.Thread(target=lambda: self.ws.run_forever())
self.wst.daemon = True
self.wst.start()
self.logger.debug("Started thread")
# Wait for connect before continuing
conn_timeout = 5
while not self.ws.sock or not self.ws.sock.connected and conn_timeout:
sleep(1)
conn_timeout -= 1
if not conn_timeout:
self.logger.error("Couldn't connect to WS! Exiting.")
self.exit()
sys.exit(1)
def __get_auth(self):
'''Return auth headers. Will use API Keys if present in settings.'''
if self.api_key == None and self.login == None:
self.logger.error("No authentication provided! Unable to connect.")
sys.exit(1)
if self.api_key == None:
self.logger.info("Authenticating with email/password.")
return [
"email: " + self.login,
"password: " + self.password
]
else:
self.logger.info("Authenticating with API Key.")
# To auth to the WS using an API key, we generate a signature of a nonce and
# the WS API endpoint.
nonce = generate_nonce()
return [
"api-nonce: " + str(nonce),
"api-signature: " + generate_signature(self.api_secret, 'GET', '/realtime', nonce, ''),
"api-key:" + self.api_key
]
def __get_url(self, symbol):
subscriptions = [sub + ':' + symbol for sub in ["order", "execution", "position", "quote", "trade"]]
subscriptions += ["margin"]
urlParts = list(urllib.parse.urlparse(self.endpoint))
urlParts[0] = urlParts[0].replace('http', 'ws')
urlParts[2] = "/realtime?subscribe=" + ",".join(subscriptions)
return urllib.parse.urlunparse(urlParts)
def __push_account(self):
'''Ask the websocket for an account push. Gets margin, positions, and open orders'''
self.__send_command("getAccount")
# Wait for the keys to show up from the ws
while not {'margin', 'position', 'order'} <= set(self.data):
sleep(0.1)
def __push_symbol(self, symbol):
'''Ask the websocket for a symbol push. Gets instrument, orderBook, quote, and trade'''
self.__send_command("getSymbol", symbol)
while not {'instrument', 'trade', 'orderBook25'} <= set(self.data):
sleep(0.1)
def __send_command(self, command, args=[]):
'''Send a raw command.'''
self.ws.send(json.dumps({"op": command, "args": args}))
def __on_message(self, ws, message):
'''Handler for parsing WS messages.'''
message = json.loads(message)
self.logger.debug(json.dumps(message))
table = message['table'] if 'table' in message else None
action = message['action'] if 'action' in message else None
try:
if 'subscribe' in message:
self.logger.debug("Subscribed to %s." % message['subscribe'])
elif action:
if table not in self.data:
self.data[table] = []
# There are four possible actions from the WS:
# 'partial' - full table image
# 'insert' - new row
# 'update' - update row
# 'delete' - delete row
if action == 'partial':
self.logger.debug("%s: partial" % table)
self.data[table] += message['data']
# Keys are communicated on partials to let you know how to uniquely identify
# an item. We use it for updates.
self.keys[table] = message['keys']
elif action == 'insert':
self.logger.debug('%s: inserting %s' % (table, message['data']))
self.data[table] += message['data']
elif action == 'update':
self.logger.debug('%s: updating %s' % (table, message['data']))
# Locate the item in the collection and update it.
for updateData in message['data']:
item = findItemByKeys(self.keys[table], self.data[table], updateData)
if not item:
return # No item found to update. Could happen before push
item.update(updateData)
# Remove cancelled / filled orders
if table == 'order' and item['leavesQty'] <= 0:
self.data[table].remove(item)
elif action == 'delete':
self.logger.debug('%s: deleting %s' % (table, message['data']))
# Locate the item in the collection and remove it.
for deleteData in message['data']:
item = findItemByKeys(self.keys[table], self.data[table], deleteData)
self.data[table].remove(item)
else:
raise Exception("Unknown action: %s" % action)
except:
self.logger.error(traceback.format_exc())
def __on_error(self, ws, error):
if not self.exited:
self.logger.error("Error : %s" % error)
sys.exit(1)
def __on_open(self, ws):
self.logger.debug("Websocket Opened.")
def __on_close(self, ws):
self.logger.info('Websocket Closed')
sys.exit(1)
|
joequant/cryptoexchange | cryptoexchange/bitmex_ws.py | BitMEXWebsocket.__connect | python | def __connect(self, wsURL, symbol):
'''Connect to the websocket in a thread.'''
self.logger.debug("Starting thread")
self.ws = websocket.WebSocketApp(wsURL,
on_message=self.__on_message,
on_close=self.__on_close,
on_open=self.__on_open,
on_error=self.__on_error,
# We can login using email/pass or API key
header=self.__get_auth())
self.wst = threading.Thread(target=lambda: self.ws.run_forever())
self.wst.daemon = True
self.wst.start()
self.logger.debug("Started thread")
# Wait for connect before continuing
conn_timeout = 5
while not self.ws.sock or not self.ws.sock.connected and conn_timeout:
sleep(1)
conn_timeout -= 1
if not conn_timeout:
self.logger.error("Couldn't connect to WS! Exiting.")
self.exit()
sys.exit(1) | Connect to the websocket in a thread. | train | https://github.com/joequant/cryptoexchange/blob/6690fbd9a2ba00e40d7484425808c84d44233f0c/cryptoexchange/bitmex_ws.py#L121-L146 | [
"def exit(self):\n self.exited = True\n self.ws.close()\n",
"def __get_auth(self):\n '''Return auth headers. Will use API Keys if present in settings.'''\n if self.api_key == None and self.login == None:\n self.logger.error(\"No authentication provided! Unable to connect.\")\n sys.exit(1)\n\n if self.api_key == None:\n self.logger.info(\"Authenticating with email/password.\")\n return [\n \"email: \" + self.login,\n \"password: \" + self.password\n ]\n else:\n self.logger.info(\"Authenticating with API Key.\")\n # To auth to the WS using an API key, we generate a signature of a nonce and\n # the WS API endpoint.\n nonce = generate_nonce()\n return [\n \"api-nonce: \" + str(nonce),\n \"api-signature: \" + generate_signature(self.api_secret, 'GET', '/realtime', nonce, ''),\n \"api-key:\" + self.api_key\n ]\n"
] | class BitMEXWebsocket():
def __init__(self, endpoint="", symbol="XBU24H", API_KEY=None, API_SECRET=None, LOGIN=None, PASSWORD=None):
'''Connect to the websocket and initialize data stores.'''
self.logger = logging.getLogger('root')
self.logger.debug("Initializing WebSocket.")
self.endpoint = endpoint
self.api_key = API_KEY
self.api_secret = API_SECRET
self.login = LOGIN
self.password = PASSWORD
self.data = {}
self.keys = {}
# We can subscribe right in the connection querystring, so let's build that.
# Subscribe to all pertinent endpoints
wsURL = self.__get_url(symbol)
self.logger.info("Connecting to %s" % wsURL)
self.__connect(wsURL, symbol)
self.logger.info('Connected to WS.')
# Connected. Push symbols
self.__push_account()
self.__push_symbol(symbol)
self.logger.info('Got all market data. Starting.')
def exit(self):
self.exited = True
self.ws.close()
def get_instrument(self):
# Turn the 'tickSize' into 'tickLog' for use in rounding
instrument = self.data['instrument'][0]
instrument['tickLog'] = int(math.fabs(math.log10(instrument['tickSize'])))
return instrument
def get_ticker(self):
'''Return a ticker object. Generated from quote and trade.'''
lastQuote = self.data['quote'][-1]
lastTrade = self.data['trade'][-1]
ticker = {
"last": lastTrade['price'],
"buy": lastQuote['bidPrice'],
"sell": lastQuote['askPrice'],
"mid": (float(lastQuote['bidPrice'] or 0) + float(lastQuote['askPrice'] or 0)) / 2
}
# The instrument has a tickSize. Use it to round values.
instrument = self.data['instrument'][0]
return {k: round(float(v or 0), instrument['tickLog']) for k, v in list(ticker.items())}
def funds(self):
return self.data['margin'][0]
def market_depth(self):
return self.data['orderBook25']
def open_orders(self, clOrdIDPrefix):
orders = self.data['order']
# Filter to only open orders (leavesQty > 0) and those that we actually placed
return [o for o in orders if str(o['clOrdID']).startswith(clOrdIDPrefix) and o['leavesQty'] > 0]
def recent_trades(self):
return self.data['trade']
def __get_auth(self):
'''Return auth headers. Will use API Keys if present in settings.'''
if self.api_key == None and self.login == None:
self.logger.error("No authentication provided! Unable to connect.")
sys.exit(1)
if self.api_key == None:
self.logger.info("Authenticating with email/password.")
return [
"email: " + self.login,
"password: " + self.password
]
else:
self.logger.info("Authenticating with API Key.")
# To auth to the WS using an API key, we generate a signature of a nonce and
# the WS API endpoint.
nonce = generate_nonce()
return [
"api-nonce: " + str(nonce),
"api-signature: " + generate_signature(self.api_secret, 'GET', '/realtime', nonce, ''),
"api-key:" + self.api_key
]
def __get_url(self, symbol):
subscriptions = [sub + ':' + symbol for sub in ["order", "execution", "position", "quote", "trade"]]
subscriptions += ["margin"]
urlParts = list(urllib.parse.urlparse(self.endpoint))
urlParts[0] = urlParts[0].replace('http', 'ws')
urlParts[2] = "/realtime?subscribe=" + ",".join(subscriptions)
return urllib.parse.urlunparse(urlParts)
def __push_account(self):
'''Ask the websocket for an account push. Gets margin, positions, and open orders'''
self.__send_command("getAccount")
# Wait for the keys to show up from the ws
while not {'margin', 'position', 'order'} <= set(self.data):
sleep(0.1)
def __push_symbol(self, symbol):
'''Ask the websocket for a symbol push. Gets instrument, orderBook, quote, and trade'''
self.__send_command("getSymbol", symbol)
while not {'instrument', 'trade', 'orderBook25'} <= set(self.data):
sleep(0.1)
def __send_command(self, command, args=[]):
'''Send a raw command.'''
self.ws.send(json.dumps({"op": command, "args": args}))
def __on_message(self, ws, message):
'''Handler for parsing WS messages.'''
message = json.loads(message)
self.logger.debug(json.dumps(message))
table = message['table'] if 'table' in message else None
action = message['action'] if 'action' in message else None
try:
if 'subscribe' in message:
self.logger.debug("Subscribed to %s." % message['subscribe'])
elif action:
if table not in self.data:
self.data[table] = []
# There are four possible actions from the WS:
# 'partial' - full table image
# 'insert' - new row
# 'update' - update row
# 'delete' - delete row
if action == 'partial':
self.logger.debug("%s: partial" % table)
self.data[table] += message['data']
# Keys are communicated on partials to let you know how to uniquely identify
# an item. We use it for updates.
self.keys[table] = message['keys']
elif action == 'insert':
self.logger.debug('%s: inserting %s' % (table, message['data']))
self.data[table] += message['data']
elif action == 'update':
self.logger.debug('%s: updating %s' % (table, message['data']))
# Locate the item in the collection and update it.
for updateData in message['data']:
item = findItemByKeys(self.keys[table], self.data[table], updateData)
if not item:
return # No item found to update. Could happen before push
item.update(updateData)
# Remove cancelled / filled orders
if table == 'order' and item['leavesQty'] <= 0:
self.data[table].remove(item)
elif action == 'delete':
self.logger.debug('%s: deleting %s' % (table, message['data']))
# Locate the item in the collection and remove it.
for deleteData in message['data']:
item = findItemByKeys(self.keys[table], self.data[table], deleteData)
self.data[table].remove(item)
else:
raise Exception("Unknown action: %s" % action)
except:
self.logger.error(traceback.format_exc())
def __on_error(self, ws, error):
if not self.exited:
self.logger.error("Error : %s" % error)
sys.exit(1)
def __on_open(self, ws):
self.logger.debug("Websocket Opened.")
def __on_close(self, ws):
self.logger.info('Websocket Closed')
sys.exit(1)
|
joequant/cryptoexchange | cryptoexchange/bitmex_ws.py | BitMEXWebsocket.__get_auth | python | def __get_auth(self):
'''Return auth headers. Will use API Keys if present in settings.'''
if self.api_key == None and self.login == None:
self.logger.error("No authentication provided! Unable to connect.")
sys.exit(1)
if self.api_key == None:
self.logger.info("Authenticating with email/password.")
return [
"email: " + self.login,
"password: " + self.password
]
else:
self.logger.info("Authenticating with API Key.")
# To auth to the WS using an API key, we generate a signature of a nonce and
# the WS API endpoint.
nonce = generate_nonce()
return [
"api-nonce: " + str(nonce),
"api-signature: " + generate_signature(self.api_secret, 'GET', '/realtime', nonce, ''),
"api-key:" + self.api_key
] | Return auth headers. Will use API Keys if present in settings. | train | https://github.com/joequant/cryptoexchange/blob/6690fbd9a2ba00e40d7484425808c84d44233f0c/cryptoexchange/bitmex_ws.py#L148-L169 | null | class BitMEXWebsocket():
def __init__(self, endpoint="", symbol="XBU24H", API_KEY=None, API_SECRET=None, LOGIN=None, PASSWORD=None):
'''Connect to the websocket and initialize data stores.'''
self.logger = logging.getLogger('root')
self.logger.debug("Initializing WebSocket.")
self.endpoint = endpoint
self.api_key = API_KEY
self.api_secret = API_SECRET
self.login = LOGIN
self.password = PASSWORD
self.data = {}
self.keys = {}
# We can subscribe right in the connection querystring, so let's build that.
# Subscribe to all pertinent endpoints
wsURL = self.__get_url(symbol)
self.logger.info("Connecting to %s" % wsURL)
self.__connect(wsURL, symbol)
self.logger.info('Connected to WS.')
# Connected. Push symbols
self.__push_account()
self.__push_symbol(symbol)
self.logger.info('Got all market data. Starting.')
def exit(self):
self.exited = True
self.ws.close()
def get_instrument(self):
# Turn the 'tickSize' into 'tickLog' for use in rounding
instrument = self.data['instrument'][0]
instrument['tickLog'] = int(math.fabs(math.log10(instrument['tickSize'])))
return instrument
def get_ticker(self):
'''Return a ticker object. Generated from quote and trade.'''
lastQuote = self.data['quote'][-1]
lastTrade = self.data['trade'][-1]
ticker = {
"last": lastTrade['price'],
"buy": lastQuote['bidPrice'],
"sell": lastQuote['askPrice'],
"mid": (float(lastQuote['bidPrice'] or 0) + float(lastQuote['askPrice'] or 0)) / 2
}
# The instrument has a tickSize. Use it to round values.
instrument = self.data['instrument'][0]
return {k: round(float(v or 0), instrument['tickLog']) for k, v in list(ticker.items())}
def funds(self):
return self.data['margin'][0]
def market_depth(self):
return self.data['orderBook25']
def open_orders(self, clOrdIDPrefix):
orders = self.data['order']
# Filter to only open orders (leavesQty > 0) and those that we actually placed
return [o for o in orders if str(o['clOrdID']).startswith(clOrdIDPrefix) and o['leavesQty'] > 0]
def recent_trades(self):
return self.data['trade']
def __connect(self, wsURL, symbol):
'''Connect to the websocket in a thread.'''
self.logger.debug("Starting thread")
self.ws = websocket.WebSocketApp(wsURL,
on_message=self.__on_message,
on_close=self.__on_close,
on_open=self.__on_open,
on_error=self.__on_error,
# We can login using email/pass or API key
header=self.__get_auth())
self.wst = threading.Thread(target=lambda: self.ws.run_forever())
self.wst.daemon = True
self.wst.start()
self.logger.debug("Started thread")
# Wait for connect before continuing
conn_timeout = 5
while not self.ws.sock or not self.ws.sock.connected and conn_timeout:
sleep(1)
conn_timeout -= 1
if not conn_timeout:
self.logger.error("Couldn't connect to WS! Exiting.")
self.exit()
sys.exit(1)
def __get_url(self, symbol):
subscriptions = [sub + ':' + symbol for sub in ["order", "execution", "position", "quote", "trade"]]
subscriptions += ["margin"]
urlParts = list(urllib.parse.urlparse(self.endpoint))
urlParts[0] = urlParts[0].replace('http', 'ws')
urlParts[2] = "/realtime?subscribe=" + ",".join(subscriptions)
return urllib.parse.urlunparse(urlParts)
def __push_account(self):
'''Ask the websocket for an account push. Gets margin, positions, and open orders'''
self.__send_command("getAccount")
# Wait for the keys to show up from the ws
while not {'margin', 'position', 'order'} <= set(self.data):
sleep(0.1)
def __push_symbol(self, symbol):
'''Ask the websocket for a symbol push. Gets instrument, orderBook, quote, and trade'''
self.__send_command("getSymbol", symbol)
while not {'instrument', 'trade', 'orderBook25'} <= set(self.data):
sleep(0.1)
def __send_command(self, command, args=[]):
'''Send a raw command.'''
self.ws.send(json.dumps({"op": command, "args": args}))
def __on_message(self, ws, message):
'''Handler for parsing WS messages.'''
message = json.loads(message)
self.logger.debug(json.dumps(message))
table = message['table'] if 'table' in message else None
action = message['action'] if 'action' in message else None
try:
if 'subscribe' in message:
self.logger.debug("Subscribed to %s." % message['subscribe'])
elif action:
if table not in self.data:
self.data[table] = []
# There are four possible actions from the WS:
# 'partial' - full table image
# 'insert' - new row
# 'update' - update row
# 'delete' - delete row
if action == 'partial':
self.logger.debug("%s: partial" % table)
self.data[table] += message['data']
# Keys are communicated on partials to let you know how to uniquely identify
# an item. We use it for updates.
self.keys[table] = message['keys']
elif action == 'insert':
self.logger.debug('%s: inserting %s' % (table, message['data']))
self.data[table] += message['data']
elif action == 'update':
self.logger.debug('%s: updating %s' % (table, message['data']))
# Locate the item in the collection and update it.
for updateData in message['data']:
item = findItemByKeys(self.keys[table], self.data[table], updateData)
if not item:
return # No item found to update. Could happen before push
item.update(updateData)
# Remove cancelled / filled orders
if table == 'order' and item['leavesQty'] <= 0:
self.data[table].remove(item)
elif action == 'delete':
self.logger.debug('%s: deleting %s' % (table, message['data']))
# Locate the item in the collection and remove it.
for deleteData in message['data']:
item = findItemByKeys(self.keys[table], self.data[table], deleteData)
self.data[table].remove(item)
else:
raise Exception("Unknown action: %s" % action)
except:
self.logger.error(traceback.format_exc())
def __on_error(self, ws, error):
if not self.exited:
self.logger.error("Error : %s" % error)
sys.exit(1)
def __on_open(self, ws):
self.logger.debug("Websocket Opened.")
def __on_close(self, ws):
self.logger.info('Websocket Closed')
sys.exit(1)
|
joequant/cryptoexchange | cryptoexchange/bitmex_ws.py | BitMEXWebsocket.__push_symbol | python | def __push_symbol(self, symbol):
'''Ask the websocket for a symbol push. Gets instrument, orderBook, quote, and trade'''
self.__send_command("getSymbol", symbol)
while not {'instrument', 'trade', 'orderBook25'} <= set(self.data):
sleep(0.1) | Ask the websocket for a symbol push. Gets instrument, orderBook, quote, and trade | train | https://github.com/joequant/cryptoexchange/blob/6690fbd9a2ba00e40d7484425808c84d44233f0c/cryptoexchange/bitmex_ws.py#L186-L190 | [
"def __send_command(self, command, args=[]):\n '''Send a raw command.'''\n self.ws.send(json.dumps({\"op\": command, \"args\": args}))\n"
] | class BitMEXWebsocket():
def __init__(self, endpoint="", symbol="XBU24H", API_KEY=None, API_SECRET=None, LOGIN=None, PASSWORD=None):
'''Connect to the websocket and initialize data stores.'''
self.logger = logging.getLogger('root')
self.logger.debug("Initializing WebSocket.")
self.endpoint = endpoint
self.api_key = API_KEY
self.api_secret = API_SECRET
self.login = LOGIN
self.password = PASSWORD
self.data = {}
self.keys = {}
# We can subscribe right in the connection querystring, so let's build that.
# Subscribe to all pertinent endpoints
wsURL = self.__get_url(symbol)
self.logger.info("Connecting to %s" % wsURL)
self.__connect(wsURL, symbol)
self.logger.info('Connected to WS.')
# Connected. Push symbols
self.__push_account()
self.__push_symbol(symbol)
self.logger.info('Got all market data. Starting.')
def exit(self):
self.exited = True
self.ws.close()
def get_instrument(self):
# Turn the 'tickSize' into 'tickLog' for use in rounding
instrument = self.data['instrument'][0]
instrument['tickLog'] = int(math.fabs(math.log10(instrument['tickSize'])))
return instrument
def get_ticker(self):
'''Return a ticker object. Generated from quote and trade.'''
lastQuote = self.data['quote'][-1]
lastTrade = self.data['trade'][-1]
ticker = {
"last": lastTrade['price'],
"buy": lastQuote['bidPrice'],
"sell": lastQuote['askPrice'],
"mid": (float(lastQuote['bidPrice'] or 0) + float(lastQuote['askPrice'] or 0)) / 2
}
# The instrument has a tickSize. Use it to round values.
instrument = self.data['instrument'][0]
return {k: round(float(v or 0), instrument['tickLog']) for k, v in list(ticker.items())}
def funds(self):
return self.data['margin'][0]
def market_depth(self):
return self.data['orderBook25']
def open_orders(self, clOrdIDPrefix):
orders = self.data['order']
# Filter to only open orders (leavesQty > 0) and those that we actually placed
return [o for o in orders if str(o['clOrdID']).startswith(clOrdIDPrefix) and o['leavesQty'] > 0]
def recent_trades(self):
return self.data['trade']
def __connect(self, wsURL, symbol):
'''Connect to the websocket in a thread.'''
self.logger.debug("Starting thread")
self.ws = websocket.WebSocketApp(wsURL,
on_message=self.__on_message,
on_close=self.__on_close,
on_open=self.__on_open,
on_error=self.__on_error,
# We can login using email/pass or API key
header=self.__get_auth())
self.wst = threading.Thread(target=lambda: self.ws.run_forever())
self.wst.daemon = True
self.wst.start()
self.logger.debug("Started thread")
# Wait for connect before continuing
conn_timeout = 5
while not self.ws.sock or not self.ws.sock.connected and conn_timeout:
sleep(1)
conn_timeout -= 1
if not conn_timeout:
self.logger.error("Couldn't connect to WS! Exiting.")
self.exit()
sys.exit(1)
def __get_auth(self):
'''Return auth headers. Will use API Keys if present in settings.'''
if self.api_key == None and self.login == None:
self.logger.error("No authentication provided! Unable to connect.")
sys.exit(1)
if self.api_key == None:
self.logger.info("Authenticating with email/password.")
return [
"email: " + self.login,
"password: " + self.password
]
else:
self.logger.info("Authenticating with API Key.")
# To auth to the WS using an API key, we generate a signature of a nonce and
# the WS API endpoint.
nonce = generate_nonce()
return [
"api-nonce: " + str(nonce),
"api-signature: " + generate_signature(self.api_secret, 'GET', '/realtime', nonce, ''),
"api-key:" + self.api_key
]
def __get_url(self, symbol):
subscriptions = [sub + ':' + symbol for sub in ["order", "execution", "position", "quote", "trade"]]
subscriptions += ["margin"]
urlParts = list(urllib.parse.urlparse(self.endpoint))
urlParts[0] = urlParts[0].replace('http', 'ws')
urlParts[2] = "/realtime?subscribe=" + ",".join(subscriptions)
return urllib.parse.urlunparse(urlParts)
def __push_account(self):
'''Ask the websocket for an account push. Gets margin, positions, and open orders'''
self.__send_command("getAccount")
# Wait for the keys to show up from the ws
while not {'margin', 'position', 'order'} <= set(self.data):
sleep(0.1)
def __send_command(self, command, args=[]):
'''Send a raw command.'''
self.ws.send(json.dumps({"op": command, "args": args}))
def __on_message(self, ws, message):
'''Handler for parsing WS messages.'''
message = json.loads(message)
self.logger.debug(json.dumps(message))
table = message['table'] if 'table' in message else None
action = message['action'] if 'action' in message else None
try:
if 'subscribe' in message:
self.logger.debug("Subscribed to %s." % message['subscribe'])
elif action:
if table not in self.data:
self.data[table] = []
# There are four possible actions from the WS:
# 'partial' - full table image
# 'insert' - new row
# 'update' - update row
# 'delete' - delete row
if action == 'partial':
self.logger.debug("%s: partial" % table)
self.data[table] += message['data']
# Keys are communicated on partials to let you know how to uniquely identify
# an item. We use it for updates.
self.keys[table] = message['keys']
elif action == 'insert':
self.logger.debug('%s: inserting %s' % (table, message['data']))
self.data[table] += message['data']
elif action == 'update':
self.logger.debug('%s: updating %s' % (table, message['data']))
# Locate the item in the collection and update it.
for updateData in message['data']:
item = findItemByKeys(self.keys[table], self.data[table], updateData)
if not item:
return # No item found to update. Could happen before push
item.update(updateData)
# Remove cancelled / filled orders
if table == 'order' and item['leavesQty'] <= 0:
self.data[table].remove(item)
elif action == 'delete':
self.logger.debug('%s: deleting %s' % (table, message['data']))
# Locate the item in the collection and remove it.
for deleteData in message['data']:
item = findItemByKeys(self.keys[table], self.data[table], deleteData)
self.data[table].remove(item)
else:
raise Exception("Unknown action: %s" % action)
except:
self.logger.error(traceback.format_exc())
def __on_error(self, ws, error):
if not self.exited:
self.logger.error("Error : %s" % error)
sys.exit(1)
def __on_open(self, ws):
self.logger.debug("Websocket Opened.")
def __on_close(self, ws):
self.logger.info('Websocket Closed')
sys.exit(1)
|
joequant/cryptoexchange | cryptoexchange/bitmex_ws.py | BitMEXWebsocket.__send_command | python | def __send_command(self, command, args=[]):
'''Send a raw command.'''
self.ws.send(json.dumps({"op": command, "args": args})) | Send a raw command. | train | https://github.com/joequant/cryptoexchange/blob/6690fbd9a2ba00e40d7484425808c84d44233f0c/cryptoexchange/bitmex_ws.py#L192-L194 | null | class BitMEXWebsocket():
def __init__(self, endpoint="", symbol="XBU24H", API_KEY=None, API_SECRET=None, LOGIN=None, PASSWORD=None):
'''Connect to the websocket and initialize data stores.'''
self.logger = logging.getLogger('root')
self.logger.debug("Initializing WebSocket.")
self.endpoint = endpoint
self.api_key = API_KEY
self.api_secret = API_SECRET
self.login = LOGIN
self.password = PASSWORD
self.data = {}
self.keys = {}
# We can subscribe right in the connection querystring, so let's build that.
# Subscribe to all pertinent endpoints
wsURL = self.__get_url(symbol)
self.logger.info("Connecting to %s" % wsURL)
self.__connect(wsURL, symbol)
self.logger.info('Connected to WS.')
# Connected. Push symbols
self.__push_account()
self.__push_symbol(symbol)
self.logger.info('Got all market data. Starting.')
def exit(self):
self.exited = True
self.ws.close()
def get_instrument(self):
# Turn the 'tickSize' into 'tickLog' for use in rounding
instrument = self.data['instrument'][0]
instrument['tickLog'] = int(math.fabs(math.log10(instrument['tickSize'])))
return instrument
def get_ticker(self):
'''Return a ticker object. Generated from quote and trade.'''
lastQuote = self.data['quote'][-1]
lastTrade = self.data['trade'][-1]
ticker = {
"last": lastTrade['price'],
"buy": lastQuote['bidPrice'],
"sell": lastQuote['askPrice'],
"mid": (float(lastQuote['bidPrice'] or 0) + float(lastQuote['askPrice'] or 0)) / 2
}
# The instrument has a tickSize. Use it to round values.
instrument = self.data['instrument'][0]
return {k: round(float(v or 0), instrument['tickLog']) for k, v in list(ticker.items())}
def funds(self):
return self.data['margin'][0]
def market_depth(self):
return self.data['orderBook25']
def open_orders(self, clOrdIDPrefix):
orders = self.data['order']
# Filter to only open orders (leavesQty > 0) and those that we actually placed
return [o for o in orders if str(o['clOrdID']).startswith(clOrdIDPrefix) and o['leavesQty'] > 0]
def recent_trades(self):
return self.data['trade']
def __connect(self, wsURL, symbol):
'''Connect to the websocket in a thread.'''
self.logger.debug("Starting thread")
self.ws = websocket.WebSocketApp(wsURL,
on_message=self.__on_message,
on_close=self.__on_close,
on_open=self.__on_open,
on_error=self.__on_error,
# We can login using email/pass or API key
header=self.__get_auth())
self.wst = threading.Thread(target=lambda: self.ws.run_forever())
self.wst.daemon = True
self.wst.start()
self.logger.debug("Started thread")
# Wait for connect before continuing
conn_timeout = 5
while not self.ws.sock or not self.ws.sock.connected and conn_timeout:
sleep(1)
conn_timeout -= 1
if not conn_timeout:
self.logger.error("Couldn't connect to WS! Exiting.")
self.exit()
sys.exit(1)
def __get_auth(self):
'''Return auth headers. Will use API Keys if present in settings.'''
if self.api_key == None and self.login == None:
self.logger.error("No authentication provided! Unable to connect.")
sys.exit(1)
if self.api_key == None:
self.logger.info("Authenticating with email/password.")
return [
"email: " + self.login,
"password: " + self.password
]
else:
self.logger.info("Authenticating with API Key.")
# To auth to the WS using an API key, we generate a signature of a nonce and
# the WS API endpoint.
nonce = generate_nonce()
return [
"api-nonce: " + str(nonce),
"api-signature: " + generate_signature(self.api_secret, 'GET', '/realtime', nonce, ''),
"api-key:" + self.api_key
]
def __get_url(self, symbol):
subscriptions = [sub + ':' + symbol for sub in ["order", "execution", "position", "quote", "trade"]]
subscriptions += ["margin"]
urlParts = list(urllib.parse.urlparse(self.endpoint))
urlParts[0] = urlParts[0].replace('http', 'ws')
urlParts[2] = "/realtime?subscribe=" + ",".join(subscriptions)
return urllib.parse.urlunparse(urlParts)
def __push_account(self):
'''Ask the websocket for an account push. Gets margin, positions, and open orders'''
self.__send_command("getAccount")
# Wait for the keys to show up from the ws
while not {'margin', 'position', 'order'} <= set(self.data):
sleep(0.1)
def __push_symbol(self, symbol):
'''Ask the websocket for a symbol push. Gets instrument, orderBook, quote, and trade'''
self.__send_command("getSymbol", symbol)
while not {'instrument', 'trade', 'orderBook25'} <= set(self.data):
sleep(0.1)
def __on_message(self, ws, message):
'''Handler for parsing WS messages.'''
message = json.loads(message)
self.logger.debug(json.dumps(message))
table = message['table'] if 'table' in message else None
action = message['action'] if 'action' in message else None
try:
if 'subscribe' in message:
self.logger.debug("Subscribed to %s." % message['subscribe'])
elif action:
if table not in self.data:
self.data[table] = []
# There are four possible actions from the WS:
# 'partial' - full table image
# 'insert' - new row
# 'update' - update row
# 'delete' - delete row
if action == 'partial':
self.logger.debug("%s: partial" % table)
self.data[table] += message['data']
# Keys are communicated on partials to let you know how to uniquely identify
# an item. We use it for updates.
self.keys[table] = message['keys']
elif action == 'insert':
self.logger.debug('%s: inserting %s' % (table, message['data']))
self.data[table] += message['data']
elif action == 'update':
self.logger.debug('%s: updating %s' % (table, message['data']))
# Locate the item in the collection and update it.
for updateData in message['data']:
item = findItemByKeys(self.keys[table], self.data[table], updateData)
if not item:
return # No item found to update. Could happen before push
item.update(updateData)
# Remove cancelled / filled orders
if table == 'order' and item['leavesQty'] <= 0:
self.data[table].remove(item)
elif action == 'delete':
self.logger.debug('%s: deleting %s' % (table, message['data']))
# Locate the item in the collection and remove it.
for deleteData in message['data']:
item = findItemByKeys(self.keys[table], self.data[table], deleteData)
self.data[table].remove(item)
else:
raise Exception("Unknown action: %s" % action)
except:
self.logger.error(traceback.format_exc())
def __on_error(self, ws, error):
if not self.exited:
self.logger.error("Error : %s" % error)
sys.exit(1)
def __on_open(self, ws):
self.logger.debug("Websocket Opened.")
def __on_close(self, ws):
self.logger.info('Websocket Closed')
sys.exit(1)
|
joequant/cryptoexchange | cryptoexchange/bitmex_ws.py | BitMEXWebsocket.__on_message | python | def __on_message(self, ws, message):
'''Handler for parsing WS messages.'''
message = json.loads(message)
self.logger.debug(json.dumps(message))
table = message['table'] if 'table' in message else None
action = message['action'] if 'action' in message else None
try:
if 'subscribe' in message:
self.logger.debug("Subscribed to %s." % message['subscribe'])
elif action:
if table not in self.data:
self.data[table] = []
# There are four possible actions from the WS:
# 'partial' - full table image
# 'insert' - new row
# 'update' - update row
# 'delete' - delete row
if action == 'partial':
self.logger.debug("%s: partial" % table)
self.data[table] += message['data']
# Keys are communicated on partials to let you know how to uniquely identify
# an item. We use it for updates.
self.keys[table] = message['keys']
elif action == 'insert':
self.logger.debug('%s: inserting %s' % (table, message['data']))
self.data[table] += message['data']
elif action == 'update':
self.logger.debug('%s: updating %s' % (table, message['data']))
# Locate the item in the collection and update it.
for updateData in message['data']:
item = findItemByKeys(self.keys[table], self.data[table], updateData)
if not item:
return # No item found to update. Could happen before push
item.update(updateData)
# Remove cancelled / filled orders
if table == 'order' and item['leavesQty'] <= 0:
self.data[table].remove(item)
elif action == 'delete':
self.logger.debug('%s: deleting %s' % (table, message['data']))
# Locate the item in the collection and remove it.
for deleteData in message['data']:
item = findItemByKeys(self.keys[table], self.data[table], deleteData)
self.data[table].remove(item)
else:
raise Exception("Unknown action: %s" % action)
except:
self.logger.error(traceback.format_exc()) | Handler for parsing WS messages. | train | https://github.com/joequant/cryptoexchange/blob/6690fbd9a2ba00e40d7484425808c84d44233f0c/cryptoexchange/bitmex_ws.py#L196-L245 | null | class BitMEXWebsocket():
def __init__(self, endpoint="", symbol="XBU24H", API_KEY=None, API_SECRET=None, LOGIN=None, PASSWORD=None):
'''Connect to the websocket and initialize data stores.'''
self.logger = logging.getLogger('root')
self.logger.debug("Initializing WebSocket.")
self.endpoint = endpoint
self.api_key = API_KEY
self.api_secret = API_SECRET
self.login = LOGIN
self.password = PASSWORD
self.data = {}
self.keys = {}
# We can subscribe right in the connection querystring, so let's build that.
# Subscribe to all pertinent endpoints
wsURL = self.__get_url(symbol)
self.logger.info("Connecting to %s" % wsURL)
self.__connect(wsURL, symbol)
self.logger.info('Connected to WS.')
# Connected. Push symbols
self.__push_account()
self.__push_symbol(symbol)
self.logger.info('Got all market data. Starting.')
def exit(self):
self.exited = True
self.ws.close()
def get_instrument(self):
# Turn the 'tickSize' into 'tickLog' for use in rounding
instrument = self.data['instrument'][0]
instrument['tickLog'] = int(math.fabs(math.log10(instrument['tickSize'])))
return instrument
def get_ticker(self):
'''Return a ticker object. Generated from quote and trade.'''
lastQuote = self.data['quote'][-1]
lastTrade = self.data['trade'][-1]
ticker = {
"last": lastTrade['price'],
"buy": lastQuote['bidPrice'],
"sell": lastQuote['askPrice'],
"mid": (float(lastQuote['bidPrice'] or 0) + float(lastQuote['askPrice'] or 0)) / 2
}
# The instrument has a tickSize. Use it to round values.
instrument = self.data['instrument'][0]
return {k: round(float(v or 0), instrument['tickLog']) for k, v in list(ticker.items())}
def funds(self):
return self.data['margin'][0]
def market_depth(self):
return self.data['orderBook25']
def open_orders(self, clOrdIDPrefix):
orders = self.data['order']
# Filter to only open orders (leavesQty > 0) and those that we actually placed
return [o for o in orders if str(o['clOrdID']).startswith(clOrdIDPrefix) and o['leavesQty'] > 0]
def recent_trades(self):
return self.data['trade']
def __connect(self, wsURL, symbol):
'''Connect to the websocket in a thread.'''
self.logger.debug("Starting thread")
self.ws = websocket.WebSocketApp(wsURL,
on_message=self.__on_message,
on_close=self.__on_close,
on_open=self.__on_open,
on_error=self.__on_error,
# We can login using email/pass or API key
header=self.__get_auth())
self.wst = threading.Thread(target=lambda: self.ws.run_forever())
self.wst.daemon = True
self.wst.start()
self.logger.debug("Started thread")
# Wait for connect before continuing
conn_timeout = 5
while not self.ws.sock or not self.ws.sock.connected and conn_timeout:
sleep(1)
conn_timeout -= 1
if not conn_timeout:
self.logger.error("Couldn't connect to WS! Exiting.")
self.exit()
sys.exit(1)
def __get_auth(self):
'''Return auth headers. Will use API Keys if present in settings.'''
if self.api_key == None and self.login == None:
self.logger.error("No authentication provided! Unable to connect.")
sys.exit(1)
if self.api_key == None:
self.logger.info("Authenticating with email/password.")
return [
"email: " + self.login,
"password: " + self.password
]
else:
self.logger.info("Authenticating with API Key.")
# To auth to the WS using an API key, we generate a signature of a nonce and
# the WS API endpoint.
nonce = generate_nonce()
return [
"api-nonce: " + str(nonce),
"api-signature: " + generate_signature(self.api_secret, 'GET', '/realtime', nonce, ''),
"api-key:" + self.api_key
]
def __get_url(self, symbol):
subscriptions = [sub + ':' + symbol for sub in ["order", "execution", "position", "quote", "trade"]]
subscriptions += ["margin"]
urlParts = list(urllib.parse.urlparse(self.endpoint))
urlParts[0] = urlParts[0].replace('http', 'ws')
urlParts[2] = "/realtime?subscribe=" + ",".join(subscriptions)
return urllib.parse.urlunparse(urlParts)
def __push_account(self):
'''Ask the websocket for an account push. Gets margin, positions, and open orders'''
self.__send_command("getAccount")
# Wait for the keys to show up from the ws
while not {'margin', 'position', 'order'} <= set(self.data):
sleep(0.1)
def __push_symbol(self, symbol):
'''Ask the websocket for a symbol push. Gets instrument, orderBook, quote, and trade'''
self.__send_command("getSymbol", symbol)
while not {'instrument', 'trade', 'orderBook25'} <= set(self.data):
sleep(0.1)
def __send_command(self, command, args=[]):
'''Send a raw command.'''
self.ws.send(json.dumps({"op": command, "args": args}))
def __on_error(self, ws, error):
if not self.exited:
self.logger.error("Error : %s" % error)
sys.exit(1)
def __on_open(self, ws):
self.logger.debug("Websocket Opened.")
def __on_close(self, ws):
self.logger.info('Websocket Closed')
sys.exit(1)
|
ilblackdragon/django-misc | misc/managers.py | SoftDeleteManager.filter | python | def filter(self, *args, **kwargs):
if 'pk' in kwargs or 'id' in kwargs:
return self.all_with_deleted().filter(*args, **kwargs)
return self.get_query_set().filter(*args, **kwargs) | If id or pk was specified as a kwargs, return even if it's deleteted. | train | https://github.com/ilblackdragon/django-misc/blob/0accd2dc97de656a1c9e275be81e817f78a2eb9d/misc/managers.py#L43-L47 | [
"def get_query_set(self):\n\treturn SoftDeleteQuerySet(self.model, using=self._db).alive()\n",
"def all_with_deleted(self):\n\treturn SoftDeleteQuerySet(self.model, using=self._db)\n"
] | class SoftDeleteManager(models.Manager):
"""Manager provides soft deletion of records.
Your model must have BooleanField or LiveField "alive".
"""
def get_query_set(self):
return SoftDeleteQuerySet(self.model, using=self._db).alive()
def all_with_deleted(self):
return SoftDeleteQuerySet(self.model, using=self._db)
def deleted(self):
return SoftDeleteQuerySet(self.model, using=self._db).deleted()
def hard_delete(self):
return self.get_query_set().hard_delete()
def get(self, *args, **kwargs):
"""If specific record was requested, return if even if it's deleteted."""
return self.all_with_deleted.get(*args, **kwargs)
|
ilblackdragon/django-misc | misc/parser.py | XlsReader.formatrow | python | def formatrow(self, types, values, wanttupledate):
## Data Type Codes:
## EMPTY 0
## TEXT 1 a Unicode string
## NUMBER 2 float
## DATE 3 float
## BOOLEAN 4 int; 1 means TRUE, 0 means FALSE
## ERROR 5
returnrow = []
for i in range(len(types)):
type,value = types[i],values[i]
if type == 2:
if value == int(value):
value = int(value)
elif type == 3:
datetuple = xlrd.xldate_as_tuple(value, self.__book__.datemode)
if wanttupledate:
value = datetuple
else:
# time only no date component
if datetuple[0] == 0 and datetuple[1] == 0 and \
datetuple[2] == 0:
value = "%02d:%02d:%02d" % datetuple[3:]
# date only, no time
elif datetuple[3] == 0 and datetuple[4] == 0 and \
datetuple[5] == 0:
value = "%04d-%02d-%02d" % datetuple[:3]
else: # full date
value = "%04d-%02d-%02d %02d:%02d:%02d" % datetuple
elif type == 5:
value = xlrd.error_text_from_code[value]
returnrow.append(value)
return returnrow | Internal function used to clean up the incoming excel data | train | https://github.com/ilblackdragon/django-misc/blob/0accd2dc97de656a1c9e275be81e817f78a2eb9d/misc/parser.py#L21-L54 | null | class XlsReader(Reader):
def __init__(self, fileName):
super(XlsReader, self).__init__(self)
self.__book__ = xlrd.open_workbook(fileName)
self.__sheet__ = self.__book__._sheet_list[0]
self.__row__ = 0
def readline(self):
try:
types,values = self.__sheet__.row_types(self.__row__),self.__sheet__.row_values(self.__row__)
except IndexError:
return ['']
self.__row__ += 1
return self.formatrow(types,values,False)
|
ilblackdragon/django-misc | misc/utils.py | str_to_class | python | def str_to_class(class_name):
mod_str, cls_str = class_name.rsplit('.', 1)
mod = __import__(mod_str, globals(), locals(), [''])
cls = getattr(mod, cls_str)
return cls | Returns a class based on class name | train | https://github.com/ilblackdragon/django-misc/blob/0accd2dc97de656a1c9e275be81e817f78a2eb9d/misc/utils.py#L44-L51 | null | # -*- coding: utf-8 -*-
import os
import string
import re
from django.conf import settings
from django.contrib.auth import SESSION_KEY, BACKEND_SESSION_KEY, load_backend
from django.contrib.auth.models import AnonymousUser
from django.http import HttpResponse
from django.utils.encoding import smart_str, force_unicode, iri_to_uri
AUTH_USER_LANGUAGE_FIELD = getattr(settings, 'AUTH_USER_LANGUAGE_FIELD', 'language')
class HttpResponseReload(HttpResponse):
"""
Reload page and stay on the same page from where request was made.
example:
def simple_view(request, form_class=CommentForm, template_name='some_template.html'):
form = CommentForm(request.POST or None)
if form.valid():
form.save()
return HttpResponseReload(request)
return render(template_name, {'form': form})
"""
status_code = 302
def __init__(self, request):
HttpResponse.__init__(self)
referer = request.META.get('HTTP_REFERER')
self['Location'] = iri_to_uri(referer or "/")
def custom_spaceless(value):
"""
Remove spaces between tags and leading spaces in lines.
WARNING: It works buggly for <pre> tag.
"""
return re.sub('(\n|\r|(>))[ \t]+((?(2)<))', '\\1\\3', force_unicode(value))
# .replace('\n', '').replace('\r', '')
def get_alphabets():
alphabet_en = unicode(string.ascii_uppercase)
alphabet_ru = []
first = ord(u'а')
last = ord(u'я')+1
for ch in range(first, last):
alphabet_ru.append(unichr(ch).upper())
return (alphabet_en, alphabet_ru)
alphabet_en, alphabet_ru = get_alphabets()
def user_from_session_key(session_key):
session_engine = __import__(settings.SESSION_ENGINE, {}, {}, [''])
session_wrapper = session_engine.SessionStore(session_key)
user_id = session_wrapper.get(SESSION_KEY)
auth_backend = load_backend(session_wrapper.get(BACKEND_SESSION_KEY))
if user_id and auth_backend:
return auth_backend.get_user(user_id)
else:
return AnonymousUser()
def get_hierarchy_uploader(root):
"""
Returns uploader, that uses get_hierarch_path to store files
"""
# Workaround to avoid Django 1.7 makemigrations wierd behaviour:
# More details: https://code.djangoproject.com/ticket/22436
import sys
if len(sys.argv) > 1 and sys.argv[1] in ('makemigrations', 'migrate'):
# Hide ourselves from Django migrations
return None
from pymisc.utils.files import get_hierarchy_path
def upload_to(instance, filename):
file_name, file_ext = os.path.splitext(filename)
return get_hierarchy_path(str(instance.id), file_ext, root, prefix_path_length=settings.PREFIX_PATH_LENGTH)
return upload_to
|
ilblackdragon/django-misc | misc/utils.py | get_hierarchy_uploader | python | def get_hierarchy_uploader(root):
# Workaround to avoid Django 1.7 makemigrations wierd behaviour:
# More details: https://code.djangoproject.com/ticket/22436
import sys
if len(sys.argv) > 1 and sys.argv[1] in ('makemigrations', 'migrate'):
# Hide ourselves from Django migrations
return None
from pymisc.utils.files import get_hierarchy_path
def upload_to(instance, filename):
file_name, file_ext = os.path.splitext(filename)
return get_hierarchy_path(str(instance.id), file_ext, root, prefix_path_length=settings.PREFIX_PATH_LENGTH)
return upload_to | Returns uploader, that uses get_hierarch_path to store files | train | https://github.com/ilblackdragon/django-misc/blob/0accd2dc97de656a1c9e275be81e817f78a2eb9d/misc/utils.py#L75-L90 | null | # -*- coding: utf-8 -*-
import os
import string
import re
from django.conf import settings
from django.contrib.auth import SESSION_KEY, BACKEND_SESSION_KEY, load_backend
from django.contrib.auth.models import AnonymousUser
from django.http import HttpResponse
from django.utils.encoding import smart_str, force_unicode, iri_to_uri
AUTH_USER_LANGUAGE_FIELD = getattr(settings, 'AUTH_USER_LANGUAGE_FIELD', 'language')
class HttpResponseReload(HttpResponse):
"""
Reload page and stay on the same page from where request was made.
example:
def simple_view(request, form_class=CommentForm, template_name='some_template.html'):
form = CommentForm(request.POST or None)
if form.valid():
form.save()
return HttpResponseReload(request)
return render(template_name, {'form': form})
"""
status_code = 302
def __init__(self, request):
HttpResponse.__init__(self)
referer = request.META.get('HTTP_REFERER')
self['Location'] = iri_to_uri(referer or "/")
def custom_spaceless(value):
"""
Remove spaces between tags and leading spaces in lines.
WARNING: It works buggly for <pre> tag.
"""
return re.sub('(\n|\r|(>))[ \t]+((?(2)<))', '\\1\\3', force_unicode(value))
# .replace('\n', '').replace('\r', '')
def str_to_class(class_name):
"""
Returns a class based on class name
"""
mod_str, cls_str = class_name.rsplit('.', 1)
mod = __import__(mod_str, globals(), locals(), [''])
cls = getattr(mod, cls_str)
return cls
def get_alphabets():
alphabet_en = unicode(string.ascii_uppercase)
alphabet_ru = []
first = ord(u'а')
last = ord(u'я')+1
for ch in range(first, last):
alphabet_ru.append(unichr(ch).upper())
return (alphabet_en, alphabet_ru)
alphabet_en, alphabet_ru = get_alphabets()
def user_from_session_key(session_key):
session_engine = __import__(settings.SESSION_ENGINE, {}, {}, [''])
session_wrapper = session_engine.SessionStore(session_key)
user_id = session_wrapper.get(SESSION_KEY)
auth_backend = load_backend(session_wrapper.get(BACKEND_SESSION_KEY))
if user_id and auth_backend:
return auth_backend.get_user(user_id)
else:
return AnonymousUser()
|
ilblackdragon/django-misc | misc/views.py | handler500 | python | def handler500(request, template_name='500.html'):
t = loader.get_template(template_name) # You need to create a 500.html template.
return http.HttpResponseServerError(t.render(Context({
'MEDIA_URL': settings.MEDIA_URL,
'STATIC_URL': settings.STATIC_URL
}))) | 500 error handler.
Templates: `500.html`
Context:
MEDIA_URL
Path of static media (e.g. "media.example.org")
STATIC_URL | train | https://github.com/ilblackdragon/django-misc/blob/0accd2dc97de656a1c9e275be81e817f78a2eb9d/misc/views.py#L22-L36 | null | # -*- coding: utf-8 -*-
from django import http
from django.conf import settings
from django.dispatch import Signal
from django.template import Context, loader
from django.shortcuts import redirect
# Django 1.7 Removed custom profiles
try:
from django.contrib.auth.models import SiteProfileNotAvailable
except ImportError:
SiteProfileNotAvailable = None
if 'coffin' in settings.INSTALLED_APPS:
is_coffin = True
from coffin.template.response import TemplateResponse
from .signals import language_changed
from utils import AUTH_USER_LANGUAGE_FIELD
def handler404(request, template_name='404.html'):
"""
404 error handler.
Templates: `404.html`
Context:
MEDIA_URL
Path of static media (e.g. "media.example.org")
STATIC_URL
"""
t = loader.get_template(template_name) # You need to create a 404.html template.
return http.HttpResponseNotFound(t.render(Context({
'MEDIA_URL': settings.MEDIA_URL,
'STATIC_URL': settings.STATIC_URL
})))
def redirect_by_name(request, name, **kwargs):
keys = kwargs.keys()
for k in keys:
if k in kwargs and callable(kwargs[k]):
kwargs[k] = kwargs[k](kwargs)
return redirect(name, **kwargs)
def language_change(request, lang):
next = request.REQUEST.get('next', None)
if not next:
next = request.META.get('HTTP_REFERER', None) or '/'
response = redirect(next)
if lang and lang in map(lambda x: x[0], settings.LANGUAGES):
language_saved = False
if request.user.is_authenticated():
user = request.user
if hasattr(user, AUTH_USER_LANGUAGE_FIELD):
setattr(user, AUTH_USER_LANGUAGE_FIELD, lang)
user.save()
language_saved = True
else:
if SiteProfileNotAvailable is None:
profile = user
else:
try:
profile = user.get_profile()
except SiteProfileNotAvailable:
profile = None
if profile is not None and hasattr(profile, AUTH_USER_LANGUAGE_FIELD):
setattr(profile, AUTH_USER_LANGUAGE_FIELD, lang)
profile.save()
language_saved = True
if not language_saved:
response.set_cookie(settings.LANGUAGE_COOKIE_NAME, lang,
max_age=settings.SESSION_COOKIE_AGE)
language_changed.send(None, request=request, lang=lang)
return response
def coffin_template_response(request, view, **kwargs):
response = view(request, **kwargs)
if is_coffin and hasattr(response, 'template_name'):
return TemplateResponse(request, response.template_name, response.context_data)
return response
|
ilblackdragon/django-misc | misc/views.py | handler404 | python | def handler404(request, template_name='404.html'):
t = loader.get_template(template_name) # You need to create a 404.html template.
return http.HttpResponseNotFound(t.render(Context({
'MEDIA_URL': settings.MEDIA_URL,
'STATIC_URL': settings.STATIC_URL
}))) | 404 error handler.
Templates: `404.html`
Context:
MEDIA_URL
Path of static media (e.g. "media.example.org")
STATIC_URL | train | https://github.com/ilblackdragon/django-misc/blob/0accd2dc97de656a1c9e275be81e817f78a2eb9d/misc/views.py#L38-L52 | null | # -*- coding: utf-8 -*-
from django import http
from django.conf import settings
from django.dispatch import Signal
from django.template import Context, loader
from django.shortcuts import redirect
# Django 1.7 Removed custom profiles
try:
from django.contrib.auth.models import SiteProfileNotAvailable
except ImportError:
SiteProfileNotAvailable = None
if 'coffin' in settings.INSTALLED_APPS:
is_coffin = True
from coffin.template.response import TemplateResponse
from .signals import language_changed
from utils import AUTH_USER_LANGUAGE_FIELD
def handler500(request, template_name='500.html'):
"""
500 error handler.
Templates: `500.html`
Context:
MEDIA_URL
Path of static media (e.g. "media.example.org")
STATIC_URL
"""
t = loader.get_template(template_name) # You need to create a 500.html template.
return http.HttpResponseServerError(t.render(Context({
'MEDIA_URL': settings.MEDIA_URL,
'STATIC_URL': settings.STATIC_URL
})))
def redirect_by_name(request, name, **kwargs):
keys = kwargs.keys()
for k in keys:
if k in kwargs and callable(kwargs[k]):
kwargs[k] = kwargs[k](kwargs)
return redirect(name, **kwargs)
def language_change(request, lang):
next = request.REQUEST.get('next', None)
if not next:
next = request.META.get('HTTP_REFERER', None) or '/'
response = redirect(next)
if lang and lang in map(lambda x: x[0], settings.LANGUAGES):
language_saved = False
if request.user.is_authenticated():
user = request.user
if hasattr(user, AUTH_USER_LANGUAGE_FIELD):
setattr(user, AUTH_USER_LANGUAGE_FIELD, lang)
user.save()
language_saved = True
else:
if SiteProfileNotAvailable is None:
profile = user
else:
try:
profile = user.get_profile()
except SiteProfileNotAvailable:
profile = None
if profile is not None and hasattr(profile, AUTH_USER_LANGUAGE_FIELD):
setattr(profile, AUTH_USER_LANGUAGE_FIELD, lang)
profile.save()
language_saved = True
if not language_saved:
response.set_cookie(settings.LANGUAGE_COOKIE_NAME, lang,
max_age=settings.SESSION_COOKIE_AGE)
language_changed.send(None, request=request, lang=lang)
return response
def coffin_template_response(request, view, **kwargs):
response = view(request, **kwargs)
if is_coffin and hasattr(response, 'template_name'):
return TemplateResponse(request, response.template_name, response.context_data)
return response
|
ilblackdragon/django-misc | misc/decorators.py | to_template | python | def to_template(template_name=None):
def decorator(view_func):
def _wrapped_view(request, *args, **kwargs):
result = view_func(request, *args, **kwargs)
if isinstance(result, dict):
return TemplateResponse(request, result.pop('TEMPLATE', template_name), result)
return result
return wraps(view_func, assigned=available_attrs(view_func))(_wrapped_view)
return decorator | Decorator for simple call TemplateResponse
Examples:
@to_template("test.html")
def test(request):
return {'test': 100}
@to_template
def test2(request):
return {'test': 100, 'TEMPLATE': 'test.html'}
@to_template
def test2(request, template_name='test.html'):
return {'test': 100, 'TEMPLATE': template_name} | train | https://github.com/ilblackdragon/django-misc/blob/0accd2dc97de656a1c9e275be81e817f78a2eb9d/misc/decorators.py#L12-L36 | null | from functools import update_wrapper, wraps
from django.conf import settings
from django.core.cache import cache
from django.utils.decorators import available_attrs
if 'coffin' in settings.INSTALLED_APPS:
from coffin.template.response import TemplateResponse
else:
from django.template.response import TemplateResponse
render_to = to_template
def receiver(signal, **kwargs):
"""
Introduced in Django 1.3 (django.dispatch.receiver)
A decorator for connecting receivers to signals. Used by passing in the
signal and keyword arguments to connect::
@receiver(post_save, sender=MyModel)
def signal_receiver(sender, **kwargs):
do_stuff()
"""
def _decorator(func):
signal.connect(func, **kwargs)
return func
return _decorator
def cached(cache_key=None, invalidate_signals=None, timeout=None):
def decorator(function):
def invalidate(sender, *args, **kwargs):
"""
Simple cache invalidate fallback function.
"""
cache.delete(cache_key)
def wrapped(*args, **kwargs):
if cache_key is None:
if invalidate_signals is not None:
raise AttributeError("You cannot use function-level caching (cache_key=None) "
"with invalidate signals.")
# Store cached data into function itself
if not hasattr(function, '_cached'):
function._cached = function(*args, **kwargs)
return function._cached
else:
# Cache to external cache backend
if callable(cache_key):
_cache_key = cache_key(*args, **kwargs)
else:
_cache_key = cache_key
result = cache.get(_cache_key)
if result is None:
result = function(*args, **kwargs)
if _cache_key is not None:
cache.set(_cache_key, result, timeout)
return result
if invalidate_signals:
wrapped.invalidate = invalidate
for signal, sender, _invalidate in invalidate_signals:
# weak - Django stores signal handlers as weak references by default. Thus, if your
# receiver is a local function, it may be garbage collected. To prevent this, pass
# weak=False when you call the signal`s connect() method.
if _invalidate is None:
if callable(cache_key):
continue
_invalidate = wrapped.invalidate
signal.connect(_invalidate, sender=sender, weak=False)
return wrapped
return decorator
|
ilblackdragon/django-misc | misc/decorators.py | receiver | python | def receiver(signal, **kwargs):
def _decorator(func):
signal.connect(func, **kwargs)
return func
return _decorator | Introduced in Django 1.3 (django.dispatch.receiver)
A decorator for connecting receivers to signals. Used by passing in the
signal and keyword arguments to connect::
@receiver(post_save, sender=MyModel)
def signal_receiver(sender, **kwargs):
do_stuff() | train | https://github.com/ilblackdragon/django-misc/blob/0accd2dc97de656a1c9e275be81e817f78a2eb9d/misc/decorators.py#L40-L54 | null | from functools import update_wrapper, wraps
from django.conf import settings
from django.core.cache import cache
from django.utils.decorators import available_attrs
if 'coffin' in settings.INSTALLED_APPS:
from coffin.template.response import TemplateResponse
else:
from django.template.response import TemplateResponse
def to_template(template_name=None):
"""
Decorator for simple call TemplateResponse
Examples:
@to_template("test.html")
def test(request):
return {'test': 100}
@to_template
def test2(request):
return {'test': 100, 'TEMPLATE': 'test.html'}
@to_template
def test2(request, template_name='test.html'):
return {'test': 100, 'TEMPLATE': template_name}
"""
def decorator(view_func):
def _wrapped_view(request, *args, **kwargs):
result = view_func(request, *args, **kwargs)
if isinstance(result, dict):
return TemplateResponse(request, result.pop('TEMPLATE', template_name), result)
return result
return wraps(view_func, assigned=available_attrs(view_func))(_wrapped_view)
return decorator
render_to = to_template
def receiver(signal, **kwargs):
"""
Introduced in Django 1.3 (django.dispatch.receiver)
A decorator for connecting receivers to signals. Used by passing in the
signal and keyword arguments to connect::
@receiver(post_save, sender=MyModel)
def signal_receiver(sender, **kwargs):
do_stuff()
"""
def _decorator(func):
signal.connect(func, **kwargs)
return func
return _decorator
def cached(cache_key=None, invalidate_signals=None, timeout=None):
def decorator(function):
def invalidate(sender, *args, **kwargs):
"""
Simple cache invalidate fallback function.
"""
cache.delete(cache_key)
def wrapped(*args, **kwargs):
if cache_key is None:
if invalidate_signals is not None:
raise AttributeError("You cannot use function-level caching (cache_key=None) "
"with invalidate signals.")
# Store cached data into function itself
if not hasattr(function, '_cached'):
function._cached = function(*args, **kwargs)
return function._cached
else:
# Cache to external cache backend
if callable(cache_key):
_cache_key = cache_key(*args, **kwargs)
else:
_cache_key = cache_key
result = cache.get(_cache_key)
if result is None:
result = function(*args, **kwargs)
if _cache_key is not None:
cache.set(_cache_key, result, timeout)
return result
if invalidate_signals:
wrapped.invalidate = invalidate
for signal, sender, _invalidate in invalidate_signals:
# weak - Django stores signal handlers as weak references by default. Thus, if your
# receiver is a local function, it may be garbage collected. To prevent this, pass
# weak=False when you call the signal`s connect() method.
if _invalidate is None:
if callable(cache_key):
continue
_invalidate = wrapped.invalidate
signal.connect(_invalidate, sender=sender, weak=False)
return wrapped
return decorator
|
ilblackdragon/django-misc | misc/templatetags/misc_tags.py | find_element | python | def find_element(list, index, index2=1):
for x in list:
if x[0] == index:
return x[index2]
return None | When you have list like: a = [(0, 10), (1, 20), (2, 30)] and you need to get value from tuple with first value == index
Usage:
{% find_element 1 %} will return 20 | train | https://github.com/ilblackdragon/django-misc/blob/0accd2dc97de656a1c9e275be81e817f78a2eb9d/misc/templatetags/misc_tags.py#L27-L36 | null | # -*- coding: utf-8 -*-
from django.template.base import Library, Token, TOKEN_BLOCK, Node, Variable, VariableDoesNotExist
from django.conf import settings
from django.template.defaultfilters import stringfilter
register = Library()
@register.filter
@stringfilter
def cutafter(text, length):
if len(text) > int(length)+3:
return text[:int(length)] + "..."
else:
return text
@register.filter
def get_range(length):
return range(length)
@register.simple_tag
def get_element(list, key, key2=None):
if not key2:
return list[key1]
return list[key1][key2]
@register.simple_tag
@register.tag
def get_dict(parser, token):
"""
Call {% get_dict dict key default_key %} or {% get_dict dict key %}
Return value from dict of key element. If there are no key in get_dict it returns default_key (or '')
Return value will be in parameter 'value'
"""
bits = token.contents.split(' ')
return GetDict(bits[1], bits[2], ((len(bits) > 3) and bits[3]) or '', ((len(bits) > 4) and bits[4]) or '', ((len(bits) > 5) and bits[5]) or '')
class GetDict(Node):
def __init__(self, dict, key, *args):
self.dict = dict
self.key = key
self.default = ''
self.context_key = 'value'
if args[1] == '':
self.default = args[0]
elif (args[0] == 'as'):
self.context_key = args[1]
elif (args[1] == 'as') and (args[2] != ''):
self.default = args[0]
self.context_key = args[2]
else:
# raise BadFormat
pass
def render(self, context):
dict = Variable(self.dict).resolve(context)
key = context.get(self.key, self.key)
default = context.get(self.default, self.default)
if dict:
context[self.context_key] = dict.get(key, default)
else:
context[self.context_key] = default
return ''
class CallableVariable(Variable):
def _resolve_lookup(self, context):
"""
Performs resolution of a real variable (i.e. not a literal) against the
given context.
As indicated by the method's name, this method is an implementation
detail and shouldn't be called by external code. Use Variable.resolve()
instead.
"""
current = context
try: # catch-all for silent variable failures
for bit in self.lookups:
if callable(current):
if getattr(current, 'alters_data', False):
current = settings.TEMPLATE_STRING_IF_INVALID
else:
try: # method call (assuming no args required)
current = current()
except TypeError: # arguments *were* required
# GOTCHA: This will also catch any TypeError
# raised in the function itself.
current = settings.TEMPLATE_STRING_IF_INVALID # invalid method call
try: # dictionary lookup
current = current[bit]
except (TypeError, AttributeError, KeyError):
try: # attribute lookup
current = getattr(current, bit)
except (TypeError, AttributeError):
try: # list-index lookup
current = current[int(bit)]
except (IndexError, # list index out of range
ValueError, # invalid literal for int()
KeyError, # current is a dict without `int(bit)` key
TypeError, # unsubscriptable object
):
raise VariableDoesNotExist("Failed lookup for key [%s] in %r", (bit, current)) # missing attribute
except Exception, e:
if getattr(e, 'silent_variable_failure', False):
current = settings.TEMPLATE_STRING_IF_INVALID
else:
raise
return current
@register.tag
def set(parser, token):
"""
Usage:
{% set templ_tag var1 var2 ... key %}
{% set variable key %}
This tag save result of {% templ_tag var1 var2 ... %} to variable with name key,
Or will save value of variable to new variable with name key.
"""
bits = token.contents.split(' ')[1:]
new_token = Token(TOKEN_BLOCK, ' '.join(bits[:-1]))
if bits[0] in parser.tags:
func = parser.tags[bits[0]](parser, new_token)
args = []
else:
func = CallableVariable(bits[0])
args = [Variable(bit) for bit in bits[1:-1]]
return SetNode(func, args, bits[-1])
class SetNode(Node):
def __init__(self, func, args, key):
self.func = func
self.args = args
self.key = key
def render(self, context):
if isinstance(self.func, Node):
context[self.key] = self.func.render(context)
else:
f = self.func.resolve(context)
if callable(f):
args = [arg.resolve(context) for arg in self.args]
context[self.key] = f(*args)
else:
context[self.key] = f
return ''
@register.filter_function
def order_by(queryset, args):
args = [x.strip() for x in args.split(',')]
return queryset.order_by(*args)
@register.tag
def filter(parser, token):
"""
Filter tag for Query sets. Use with set tag =)
{% set filter posts status 0 drafts %}
"""
bits = token.contents.split(' ')
return FilterTag(bits[1], bits[2:])
class FilterTag(Node):
def __init__(self, query_list_name, args):
self.query_list_name = query_list_name
self.kwargs = {}
for i, x in enumerate(args):
if i % 2 == 0:
self.kwargs[str(x)] = str(args[i + 1])
def render(self, context):
kwargs = {}
for key in self.kwargs:
kwargs[key] = Variable(self.kwargs[key]).resolve(context)
query_list = Variable(self.query_list_name).resolve(context)
return query_list.filter(**kwargs)
@register.simple_tag
def get_settings(key, default=None):
return getattr(settings, key, default)
|
ilblackdragon/django-misc | misc/templatetags/misc_tags.py | get_dict | python | def get_dict(parser, token):
bits = token.contents.split(' ')
return GetDict(bits[1], bits[2], ((len(bits) > 3) and bits[3]) or '', ((len(bits) > 4) and bits[4]) or '', ((len(bits) > 5) and bits[5]) or '') | Call {% get_dict dict key default_key %} or {% get_dict dict key %}
Return value from dict of key element. If there are no key in get_dict it returns default_key (or '')
Return value will be in parameter 'value' | train | https://github.com/ilblackdragon/django-misc/blob/0accd2dc97de656a1c9e275be81e817f78a2eb9d/misc/templatetags/misc_tags.py#L39-L46 | null | # -*- coding: utf-8 -*-
from django.template.base import Library, Token, TOKEN_BLOCK, Node, Variable, VariableDoesNotExist
from django.conf import settings
from django.template.defaultfilters import stringfilter
register = Library()
@register.filter
@stringfilter
def cutafter(text, length):
if len(text) > int(length)+3:
return text[:int(length)] + "..."
else:
return text
@register.filter
def get_range(length):
return range(length)
@register.simple_tag
def get_element(list, key, key2=None):
if not key2:
return list[key1]
return list[key1][key2]
@register.simple_tag
def find_element(list, index, index2=1):
"""
When you have list like: a = [(0, 10), (1, 20), (2, 30)] and you need to get value from tuple with first value == index
Usage:
{% find_element 1 %} will return 20
"""
for x in list:
if x[0] == index:
return x[index2]
return None
@register.tag
class GetDict(Node):
def __init__(self, dict, key, *args):
self.dict = dict
self.key = key
self.default = ''
self.context_key = 'value'
if args[1] == '':
self.default = args[0]
elif (args[0] == 'as'):
self.context_key = args[1]
elif (args[1] == 'as') and (args[2] != ''):
self.default = args[0]
self.context_key = args[2]
else:
# raise BadFormat
pass
def render(self, context):
dict = Variable(self.dict).resolve(context)
key = context.get(self.key, self.key)
default = context.get(self.default, self.default)
if dict:
context[self.context_key] = dict.get(key, default)
else:
context[self.context_key] = default
return ''
class CallableVariable(Variable):
def _resolve_lookup(self, context):
"""
Performs resolution of a real variable (i.e. not a literal) against the
given context.
As indicated by the method's name, this method is an implementation
detail and shouldn't be called by external code. Use Variable.resolve()
instead.
"""
current = context
try: # catch-all for silent variable failures
for bit in self.lookups:
if callable(current):
if getattr(current, 'alters_data', False):
current = settings.TEMPLATE_STRING_IF_INVALID
else:
try: # method call (assuming no args required)
current = current()
except TypeError: # arguments *were* required
# GOTCHA: This will also catch any TypeError
# raised in the function itself.
current = settings.TEMPLATE_STRING_IF_INVALID # invalid method call
try: # dictionary lookup
current = current[bit]
except (TypeError, AttributeError, KeyError):
try: # attribute lookup
current = getattr(current, bit)
except (TypeError, AttributeError):
try: # list-index lookup
current = current[int(bit)]
except (IndexError, # list index out of range
ValueError, # invalid literal for int()
KeyError, # current is a dict without `int(bit)` key
TypeError, # unsubscriptable object
):
raise VariableDoesNotExist("Failed lookup for key [%s] in %r", (bit, current)) # missing attribute
except Exception, e:
if getattr(e, 'silent_variable_failure', False):
current = settings.TEMPLATE_STRING_IF_INVALID
else:
raise
return current
@register.tag
def set(parser, token):
"""
Usage:
{% set templ_tag var1 var2 ... key %}
{% set variable key %}
This tag save result of {% templ_tag var1 var2 ... %} to variable with name key,
Or will save value of variable to new variable with name key.
"""
bits = token.contents.split(' ')[1:]
new_token = Token(TOKEN_BLOCK, ' '.join(bits[:-1]))
if bits[0] in parser.tags:
func = parser.tags[bits[0]](parser, new_token)
args = []
else:
func = CallableVariable(bits[0])
args = [Variable(bit) for bit in bits[1:-1]]
return SetNode(func, args, bits[-1])
class SetNode(Node):
def __init__(self, func, args, key):
self.func = func
self.args = args
self.key = key
def render(self, context):
if isinstance(self.func, Node):
context[self.key] = self.func.render(context)
else:
f = self.func.resolve(context)
if callable(f):
args = [arg.resolve(context) for arg in self.args]
context[self.key] = f(*args)
else:
context[self.key] = f
return ''
@register.filter_function
def order_by(queryset, args):
args = [x.strip() for x in args.split(',')]
return queryset.order_by(*args)
@register.tag
def filter(parser, token):
"""
Filter tag for Query sets. Use with set tag =)
{% set filter posts status 0 drafts %}
"""
bits = token.contents.split(' ')
return FilterTag(bits[1], bits[2:])
class FilterTag(Node):
def __init__(self, query_list_name, args):
self.query_list_name = query_list_name
self.kwargs = {}
for i, x in enumerate(args):
if i % 2 == 0:
self.kwargs[str(x)] = str(args[i + 1])
def render(self, context):
kwargs = {}
for key in self.kwargs:
kwargs[key] = Variable(self.kwargs[key]).resolve(context)
query_list = Variable(self.query_list_name).resolve(context)
return query_list.filter(**kwargs)
@register.simple_tag
def get_settings(key, default=None):
return getattr(settings, key, default)
|
ilblackdragon/django-misc | misc/templatetags/misc_tags.py | filter | python | def filter(parser, token):
bits = token.contents.split(' ')
return FilterTag(bits[1], bits[2:]) | Filter tag for Query sets. Use with set tag =)
{% set filter posts status 0 drafts %} | train | https://github.com/ilblackdragon/django-misc/blob/0accd2dc97de656a1c9e275be81e817f78a2eb9d/misc/templatetags/misc_tags.py#L166-L172 | null | # -*- coding: utf-8 -*-
from django.template.base import Library, Token, TOKEN_BLOCK, Node, Variable, VariableDoesNotExist
from django.conf import settings
from django.template.defaultfilters import stringfilter
register = Library()
@register.filter
@stringfilter
def cutafter(text, length):
if len(text) > int(length)+3:
return text[:int(length)] + "..."
else:
return text
@register.filter
def get_range(length):
return range(length)
@register.simple_tag
def get_element(list, key, key2=None):
if not key2:
return list[key1]
return list[key1][key2]
@register.simple_tag
def find_element(list, index, index2=1):
"""
When you have list like: a = [(0, 10), (1, 20), (2, 30)] and you need to get value from tuple with first value == index
Usage:
{% find_element 1 %} will return 20
"""
for x in list:
if x[0] == index:
return x[index2]
return None
@register.tag
def get_dict(parser, token):
"""
Call {% get_dict dict key default_key %} or {% get_dict dict key %}
Return value from dict of key element. If there are no key in get_dict it returns default_key (or '')
Return value will be in parameter 'value'
"""
bits = token.contents.split(' ')
return GetDict(bits[1], bits[2], ((len(bits) > 3) and bits[3]) or '', ((len(bits) > 4) and bits[4]) or '', ((len(bits) > 5) and bits[5]) or '')
class GetDict(Node):
def __init__(self, dict, key, *args):
self.dict = dict
self.key = key
self.default = ''
self.context_key = 'value'
if args[1] == '':
self.default = args[0]
elif (args[0] == 'as'):
self.context_key = args[1]
elif (args[1] == 'as') and (args[2] != ''):
self.default = args[0]
self.context_key = args[2]
else:
# raise BadFormat
pass
def render(self, context):
dict = Variable(self.dict).resolve(context)
key = context.get(self.key, self.key)
default = context.get(self.default, self.default)
if dict:
context[self.context_key] = dict.get(key, default)
else:
context[self.context_key] = default
return ''
class CallableVariable(Variable):
def _resolve_lookup(self, context):
"""
Performs resolution of a real variable (i.e. not a literal) against the
given context.
As indicated by the method's name, this method is an implementation
detail and shouldn't be called by external code. Use Variable.resolve()
instead.
"""
current = context
try: # catch-all for silent variable failures
for bit in self.lookups:
if callable(current):
if getattr(current, 'alters_data', False):
current = settings.TEMPLATE_STRING_IF_INVALID
else:
try: # method call (assuming no args required)
current = current()
except TypeError: # arguments *were* required
# GOTCHA: This will also catch any TypeError
# raised in the function itself.
current = settings.TEMPLATE_STRING_IF_INVALID # invalid method call
try: # dictionary lookup
current = current[bit]
except (TypeError, AttributeError, KeyError):
try: # attribute lookup
current = getattr(current, bit)
except (TypeError, AttributeError):
try: # list-index lookup
current = current[int(bit)]
except (IndexError, # list index out of range
ValueError, # invalid literal for int()
KeyError, # current is a dict without `int(bit)` key
TypeError, # unsubscriptable object
):
raise VariableDoesNotExist("Failed lookup for key [%s] in %r", (bit, current)) # missing attribute
except Exception, e:
if getattr(e, 'silent_variable_failure', False):
current = settings.TEMPLATE_STRING_IF_INVALID
else:
raise
return current
@register.tag
def set(parser, token):
"""
Usage:
{% set templ_tag var1 var2 ... key %}
{% set variable key %}
This tag save result of {% templ_tag var1 var2 ... %} to variable with name key,
Or will save value of variable to new variable with name key.
"""
bits = token.contents.split(' ')[1:]
new_token = Token(TOKEN_BLOCK, ' '.join(bits[:-1]))
if bits[0] in parser.tags:
func = parser.tags[bits[0]](parser, new_token)
args = []
else:
func = CallableVariable(bits[0])
args = [Variable(bit) for bit in bits[1:-1]]
return SetNode(func, args, bits[-1])
class SetNode(Node):
def __init__(self, func, args, key):
self.func = func
self.args = args
self.key = key
def render(self, context):
if isinstance(self.func, Node):
context[self.key] = self.func.render(context)
else:
f = self.func.resolve(context)
if callable(f):
args = [arg.resolve(context) for arg in self.args]
context[self.key] = f(*args)
else:
context[self.key] = f
return ''
@register.filter_function
def order_by(queryset, args):
args = [x.strip() for x in args.split(',')]
return queryset.order_by(*args)
@register.tag
class FilterTag(Node):
def __init__(self, query_list_name, args):
self.query_list_name = query_list_name
self.kwargs = {}
for i, x in enumerate(args):
if i % 2 == 0:
self.kwargs[str(x)] = str(args[i + 1])
def render(self, context):
kwargs = {}
for key in self.kwargs:
kwargs[key] = Variable(self.kwargs[key]).resolve(context)
query_list = Variable(self.query_list_name).resolve(context)
return query_list.filter(**kwargs)
@register.simple_tag
def get_settings(key, default=None):
return getattr(settings, key, default)
|
ilblackdragon/django-misc | misc/templatetags/misc_tags.py | CallableVariable._resolve_lookup | python | def _resolve_lookup(self, context):
current = context
try: # catch-all for silent variable failures
for bit in self.lookups:
if callable(current):
if getattr(current, 'alters_data', False):
current = settings.TEMPLATE_STRING_IF_INVALID
else:
try: # method call (assuming no args required)
current = current()
except TypeError: # arguments *were* required
# GOTCHA: This will also catch any TypeError
# raised in the function itself.
current = settings.TEMPLATE_STRING_IF_INVALID # invalid method call
try: # dictionary lookup
current = current[bit]
except (TypeError, AttributeError, KeyError):
try: # attribute lookup
current = getattr(current, bit)
except (TypeError, AttributeError):
try: # list-index lookup
current = current[int(bit)]
except (IndexError, # list index out of range
ValueError, # invalid literal for int()
KeyError, # current is a dict without `int(bit)` key
TypeError, # unsubscriptable object
):
raise VariableDoesNotExist("Failed lookup for key [%s] in %r", (bit, current)) # missing attribute
except Exception, e:
if getattr(e, 'silent_variable_failure', False):
current = settings.TEMPLATE_STRING_IF_INVALID
else:
raise
return current | Performs resolution of a real variable (i.e. not a literal) against the
given context.
As indicated by the method's name, this method is an implementation
detail and shouldn't be called by external code. Use Variable.resolve()
instead. | train | https://github.com/ilblackdragon/django-misc/blob/0accd2dc97de656a1c9e275be81e817f78a2eb9d/misc/templatetags/misc_tags.py#L77-L119 | null | class CallableVariable(Variable):
|
ilblackdragon/django-misc | misc/admin.py | admin_tagify | python | def admin_tagify(short_description=None, allow_tags=True):
def tagify(func):
func.allow_tags = bool(allow_tags)
if short_description:
func.short_description = short_description
return func
return tagify | Decorator that add short_description and allow_tags to ModelAdmin list_display function.
Example:
class AlbumAdmin(admin.ModelAdmin):
list_display = ['title', 'year', 'artist', 'total_tacks', 'view_track']
@admin_tagify("View track")
def view_songs(self, obj):
return "<a href=\"%s?album=%d\">%d tracks</a>" % (reverse("admin:music_track_changelist"), obj.id, obj.tracks.count()) | train | https://github.com/ilblackdragon/django-misc/blob/0accd2dc97de656a1c9e275be81e817f78a2eb9d/misc/admin.py#L4-L23 | null | from django.contrib import admin
from django.contrib.admin.views.main import ChangeList
def admin_tagify(short_description=None, allow_tags=True):
"""
Decorator that add short_description and allow_tags to ModelAdmin list_display function.
Example:
class AlbumAdmin(admin.ModelAdmin):
list_display = ['title', 'year', 'artist', 'total_tacks', 'view_track']
@admin_tagify("View track")
def view_songs(self, obj):
return "<a href=\"%s?album=%d\">%d tracks</a>" % (reverse("admin:music_track_changelist"), obj.id, obj.tracks.count())
"""
def tagify(func):
func.allow_tags = bool(allow_tags)
if short_description:
func.short_description = short_description
return func
return tagify
def foreign_field_func(field_name, short_description=None, admin_order_field=None):
"""
Allow to use ForeignKey field attributes at list_display in a simple way.
Example:
from misc.admin import foreign_field_func as ff
class SongAdmin(admin.ModelAdmin):
list_display = ['name', 'time', 'artist', 'album', ff('track__num', "Track number"), ff('album__total_tracks')]
"""
def accessor(obj):
val = obj
for part in field_name.split('__'):
val = getattr(val, part)
return val
if short_description:
accessor.short_description = short_description
else:
accessor.__name__ = field_name
if admin_order_field:
accessor.admin_order_field = admin_order_field
else:
accessor.admin_order_field = (field_name,)
return accessor
class SpecialOrderingChangeList(ChangeList):
"""
Override change list for improve multiordering in admin change list.
`Django will only honor the first element in the list/tuple ordering attribute; any others will be ignored.`
Example:
class SongAdmin(admin.ModelAdmin):
list_display = ['name', 'time', 'artist', 'album', 'track', 'total_tracks']
special_ordering = {'artist': ('artist', 'album', 'track'), 'album': ('album', 'track')}
default_special_ordering = 'artist'
def get_changelist(self, request, **kwargs):
return SpecialOrderingChangeList
"""
def apply_special_ordering(self, queryset):
order_type, order_by = [self.params.get(param, None) for param in ('ot', 'o')]
special_ordering = self.model_admin.special_ordering
if special_ordering:
try:
if order_type and order_by:
order_field = self.list_display[int(order_by)]
ordering = special_ordering[order_field]
if order_type == 'desc':
ordering = ['-' + field for field in ordering]
else:
ordering = special_ordering[self.model_admin.default_special_ordering]
queryset = queryset.order_by(*ordering)
except IndexError:
return queryset
except KeyError:
return queryset
return queryset
def get_query_set(self):
queryset = super(SpecialOrderingChangeList, self).get_query_set()
queryset = self.apply_special_ordering(queryset)
return queryset
class SoftDeleteAdmin(admin.ModelAdmin):
"""Custom admin page for models with SoftDeleteManager."""
list_display = ('id', '__unicode__', 'alive', )
list_filter = ('alive', )
def queryset(self, request):
"""Returns a Queryset of all model instances that can be edited by the
admin site. This is used by changelist_view."""
query_set = self.model._default_manager.all_with_deleted()
ordering = self.ordering or ()
if ordering:
query_set = query_set.order_by(*ordering)
return query_set
|
ilblackdragon/django-misc | misc/admin.py | foreign_field_func | python | def foreign_field_func(field_name, short_description=None, admin_order_field=None):
"""
Allow to use ForeignKey field attributes at list_display in a simple way.
Example:
from misc.admin import foreign_field_func as ff
class SongAdmin(admin.ModelAdmin):
list_display = ['name', 'time', 'artist', 'album', ff('track__num', "Track number"), ff('album__total_tracks')]
"""
def accessor(obj):
val = obj
for part in field_name.split('__'):
val = getattr(val, part)
return val
if short_description:
accessor.short_description = short_description
else:
accessor.__name__ = field_name
if admin_order_field:
accessor.admin_order_field = admin_order_field
else:
accessor.admin_order_field = (field_name,)
return accessor | Allow to use ForeignKey field attributes at list_display in a simple way.
Example:
from misc.admin import foreign_field_func as ff
class SongAdmin(admin.ModelAdmin):
list_display = ['name', 'time', 'artist', 'album', ff('track__num', "Track number"), ff('album__total_tracks')] | train | https://github.com/ilblackdragon/django-misc/blob/0accd2dc97de656a1c9e275be81e817f78a2eb9d/misc/admin.py#L26-L52 | null | from django.contrib import admin
from django.contrib.admin.views.main import ChangeList
def admin_tagify(short_description=None, allow_tags=True):
"""
Decorator that add short_description and allow_tags to ModelAdmin list_display function.
Example:
class AlbumAdmin(admin.ModelAdmin):
list_display = ['title', 'year', 'artist', 'total_tacks', 'view_track']
@admin_tagify("View track")
def view_songs(self, obj):
return "<a href=\"%s?album=%d\">%d tracks</a>" % (reverse("admin:music_track_changelist"), obj.id, obj.tracks.count())
"""
def tagify(func):
func.allow_tags = bool(allow_tags)
if short_description:
func.short_description = short_description
return func
return tagify
def foreign_field_func(field_name, short_description=None, admin_order_field=None):
"""
Allow to use ForeignKey field attributes at list_display in a simple way.
Example:
from misc.admin import foreign_field_func as ff
class SongAdmin(admin.ModelAdmin):
list_display = ['name', 'time', 'artist', 'album', ff('track__num', "Track number"), ff('album__total_tracks')]
"""
def accessor(obj):
val = obj
for part in field_name.split('__'):
val = getattr(val, part)
return val
if short_description:
accessor.short_description = short_description
else:
accessor.__name__ = field_name
if admin_order_field:
accessor.admin_order_field = admin_order_field
else:
accessor.admin_order_field = (field_name,)
return accessor
class SpecialOrderingChangeList(ChangeList):
"""
Override change list for improve multiordering in admin change list.
`Django will only honor the first element in the list/tuple ordering attribute; any others will be ignored.`
Example:
class SongAdmin(admin.ModelAdmin):
list_display = ['name', 'time', 'artist', 'album', 'track', 'total_tracks']
special_ordering = {'artist': ('artist', 'album', 'track'), 'album': ('album', 'track')}
default_special_ordering = 'artist'
def get_changelist(self, request, **kwargs):
return SpecialOrderingChangeList
"""
def apply_special_ordering(self, queryset):
order_type, order_by = [self.params.get(param, None) for param in ('ot', 'o')]
special_ordering = self.model_admin.special_ordering
if special_ordering:
try:
if order_type and order_by:
order_field = self.list_display[int(order_by)]
ordering = special_ordering[order_field]
if order_type == 'desc':
ordering = ['-' + field for field in ordering]
else:
ordering = special_ordering[self.model_admin.default_special_ordering]
queryset = queryset.order_by(*ordering)
except IndexError:
return queryset
except KeyError:
return queryset
return queryset
def get_query_set(self):
queryset = super(SpecialOrderingChangeList, self).get_query_set()
queryset = self.apply_special_ordering(queryset)
return queryset
class SoftDeleteAdmin(admin.ModelAdmin):
"""Custom admin page for models with SoftDeleteManager."""
list_display = ('id', '__unicode__', 'alive', )
list_filter = ('alive', )
def queryset(self, request):
"""Returns a Queryset of all model instances that can be edited by the
admin site. This is used by changelist_view."""
query_set = self.model._default_manager.all_with_deleted()
ordering = self.ordering or ()
if ordering:
query_set = query_set.order_by(*ordering)
return query_set
|
ilblackdragon/django-misc | misc/admin.py | SoftDeleteAdmin.queryset | python | def queryset(self, request):
query_set = self.model._default_manager.all_with_deleted()
ordering = self.ordering or ()
if ordering:
query_set = query_set.order_by(*ordering)
return query_set | Returns a Queryset of all model instances that can be edited by the
admin site. This is used by changelist_view. | train | https://github.com/ilblackdragon/django-misc/blob/0accd2dc97de656a1c9e275be81e817f78a2eb9d/misc/admin.py#L99-L106 | null | class SoftDeleteAdmin(admin.ModelAdmin):
"""Custom admin page for models with SoftDeleteManager."""
list_display = ('id', '__unicode__', 'alive', )
list_filter = ('alive', )
|
ilblackdragon/django-misc | misc/templatetags/share_buttons.py | current_site_url | python | def current_site_url():
protocol = getattr(settings, 'MY_SITE_PROTOCOL', 'https')
port = getattr(settings, 'MY_SITE_PORT', '')
url = '%s://%s' % (protocol, settings.SITE_DOMAIN)
if port:
url += ':%s' % port
return url | Returns fully qualified URL (no trailing slash) for the current site. | train | https://github.com/ilblackdragon/django-misc/blob/0accd2dc97de656a1c9e275be81e817f78a2eb9d/misc/templatetags/share_buttons.py#L23-L30 | null | # -*- coding: utf-8 -*-
import urllib
import hashlib
from django.conf import settings
from django.contrib.sites.models import Site
from django.utils.translation import get_language_from_request, ugettext
if 'coffin' in settings.INSTALLED_APPS:
from coffin.template import Library
from jinja2 import Markup as mark_safe
else:
from django.template import Library
from django.utils.safestring import mark_safe
register = Library()
if 'coffin' in settings.INSTALLED_APPS:
register.simple_tag = register.object
@register.simple_tag
def tweet_it(request, url, title):
return mark_safe("""
<div class="twitter">
<a href="http://twitter.com/home/?%s" title="%s" target="_blank"></a>
</div>
""" % (urllib.urlencode({'status': title + (u" " + url + u" #escalibro").encode('utf-8')}), ugettext("Tweet it")))
@register.simple_tag
def tweet_like(request, url, title):
return mark_safe("""
<iframe allowtransparency="true" frameborder="0" scrolling="no" tabindex="0" class="twitter-share-button twitter-count-horizontal"
src="https://platform.twitter.com/widgets/tweet_button.html?_=1302382076454&count=horizontal&lang=en&via=escalibro&%s"
style="width: 110px; height: 20px; " title="Twitter For Websites: Tweet Button"></iframe>
<script type="text/javascript" src="https://platform.twitter.com/widgets.js"></script>
""" % ('text=' + title + ' %23escalibro&' + urllib.urlencode({'url': url})))
@register.simple_tag
def facebook_it(request, url, title):
return mark_safe("""
<div class="facebook">
<a onclick="window.open(this.href, '%s', 'width=800,height=300'); return false" href="https://www.facebook.com/sharer.php?%s" title="%s" target="_blank"></a>
</div>
""" % (ugettext("Share link on FaceBook"), urllib.urlencode({'u': url, 't': title}), ugettext("To FaceBook")))
@register.simple_tag
def facebook_like(request, url, title):
return mark_safe("""
<iframe src="https://www.facebook.com/plugins/like.php?href%s&layout=button_count&show_faces=true&width=85&action=like&colorscheme=light&height=21" scrolling="no" frameborder="0" style="border:none; overflow:hidden; width:85px; height:21px;" allowtransparency="true"></iframe>
""" % (urllib.urlencode({'': url})))
@register.simple_tag
def vk_it(request, url, title):
return mark_safe("""
<div class="vk">
<a onclick="window.open(this.href, '%s', 'width=800,height=300'); return false" href="https://vkontakte.ru/share.php?%s" title="%s"></a>
</div>
""" % (ugettext("Share link on VKontakte"), urllib.urlencode({'url': url, 'title': title}), ugettext("To VKontakte")))
@register.simple_tag
def vk_like(request, url, title):
block_id = (hashlib.md5(url + title)).hexdigest()
if not hasattr(request, '_vk_js'):
request._vk_js = ''
request._vk_js += 'VK.Widgets.Like("vk_like_%s", {type: "button", pageUrl: "%s", pageTitle: "%s", height: "28px"});' \
% (block_id, url, settings.SITE_NAME + " - " + title)
return mark_safe('<div id="vk_like_%s"></div>' % block_id)
@register.simple_tag
def vk_js(request):
return mark_safe("""
<script type="text/javascript">
VK.init({apiId: "%s", onlyWidgets: true});
%s
</script>""" % (settings.VKONTAKTE_APPLICATION_ID, request._vk_js if hasattr(request, '_vk_js') else ''))
@register.simple_tag
def gplus_it(request, url, title):
return mark_safe("""
<div class="gplus">
<g:plusone size="small" annotation="none"></g:plusone>
</div>""")
@register.simple_tag
def gplus_js(request):
return mark_safe("""
<script type="text/javascript">
window.___gcfg = {lang: 'ru'};
(function() {
var po = document.createElement('script'); po.type = 'text/javascript'; po.async = true;
po.src = 'https://apis.google.com/js/plusone.js';
var s = document.getElementsByTagName('script')[0]; s.parentNode.insertBefore(po, s);
})();
</script>""")
@register.simple_tag
def gplus_like(request, url, title):
return mark_safe("""
<div class="gplus_like">
<g:plusone size="small"></g:plusone>
</div>""")
share_functions = [tweet_it, vk_it, facebook_it, gplus_it] # Ordering
like_functions = [tweet_like, vk_like, facebook_like, gplus_like]
share_js_functions = [gplus_js]
like_js_functions = [vk_js, gplus_js]
def group_buttons(request, url, title, funcs, block_class):
url = current_site_url() + url
url = url.encode('utf-8')
title = title.encode('utf-8')
res = mark_safe("<div class=\"%s\">" % block_class)
for f in funcs:
res += f(request, url, title)
res += mark_safe("</div>")
return mark_safe(res)
@register.simple_tag
def share_it(request, url, title):
return group_buttons(request, url, title, share_functions, "share_buttons")
@register.simple_tag
def like_it(request, url, title):
return group_buttons(request, url, title, like_functions, "like_buttons")
@register.simple_tag
def share_js(request):
return mark_safe(' ').join([f(request) for f in share_js_functions])
@register.simple_tag
def like_js(request):
return mark_safe(' ').join([f(request) for f in like_js_functions])
|
ilblackdragon/django-misc | misc/json_encode.py | json_encode | python | def json_encode(data):
def _any(data):
ret = None
# Opps, we used to check if it is of type list, but that fails
# i.e. in the case of django.newforms.utils.ErrorList, which extends
# the type "list". Oh man, that was a dumb mistake!
if isinstance(data, list):
ret = _list(data)
# Same as for lists above.
elif isinstance(data, dict):
ret = _dict(data)
elif isinstance(data, Decimal):
# json.dumps() cant handle Decimal
ret = str(data)
elif isinstance(data, models.query.QuerySet):
# Actually its the same as a list ...
ret = _list(data)
elif isinstance(data, models.Model):
ret = _model(data)
# here we need to encode the string as unicode (otherwise we get utf-16 in the json-response)
elif isinstance(data, basestring):
ret = unicode(data)
# see http://code.djangoproject.com/ticket/5868
elif isinstance(data, Promise):
ret = force_unicode(data)
else:
ret = data
return ret
def _model(data):
ret = {}
# If we only have a model, we only want to encode the fields.
for f in data._meta.fields:
ret[f.attname] = _any(getattr(data, f.attname))
# And additionally encode arbitrary properties that had been added.
fields = dir(data.__class__) + ret.keys()
add_ons = [k for k in dir(data) if k not in fields if k != '_state']
for k in add_ons:
ret[k] = _any(getattr(data, k))
return ret
def _list(data):
ret = []
for v in data:
ret.append(_any(v))
return ret
def _dict(data):
ret = {}
for k,v in data.items():
ret[k] = _any(v)
return ret
ret = _any(data)
return json.dumps(ret, cls=DateTimeAwareJSONEncoder) | The main issues with django's default json serializer is that properties that
had been added to an object dynamically are being ignored (and it also has
problems with some models). | train | https://github.com/ilblackdragon/django-misc/blob/0accd2dc97de656a1c9e275be81e817f78a2eb9d/misc/json_encode.py#L24-L84 | [
"def _any(data):\n ret = None\n # Opps, we used to check if it is of type list, but that fails \n # i.e. in the case of django.newforms.utils.ErrorList, which extends\n # the type \"list\". Oh man, that was a dumb mistake!\n if isinstance(data, list):\n ret = _list(data)\n # Same as for lists above.\n elif isinstance(data, dict):\n ret = _dict(data)\n elif isinstance(data, Decimal):\n # json.dumps() cant handle Decimal\n ret = str(data)\n elif isinstance(data, models.query.QuerySet):\n # Actually its the same as a list ...\n ret = _list(data)\n elif isinstance(data, models.Model):\n ret = _model(data)\n # here we need to encode the string as unicode (otherwise we get utf-16 in the json-response)\n elif isinstance(data, basestring):\n ret = unicode(data)\n # see http://code.djangoproject.com/ticket/5868\n elif isinstance(data, Promise):\n ret = force_unicode(data)\n else:\n ret = data\n return ret\n"
] | from django.conf import settings
from django.core.serializers.json import DateTimeAwareJSONEncoder
from django.core.serializers import serialize
from django.db import models
from django.http import HttpResponse
from django.utils.encoding import force_unicode
from django.utils.functional import Promise
#Fix for Django 1.5 and simplejson compatibility
try:
import json
except ImportError:
from django.utils import simplejson as json
if 'coffin' in settings.INSTALLED_APPS:
from coffin.template.loader import render_to_string
from coffin.template.response import TemplateResponse
else:
from django.template.loader import render_to_string
from django.template.response import TemplateResponse
from decimal import Decimal
def json_encode(data):
"""
The main issues with django's default json serializer is that properties that
had been added to an object dynamically are being ignored (and it also has
problems with some models).
"""
def _any(data):
ret = None
# Opps, we used to check if it is of type list, but that fails
# i.e. in the case of django.newforms.utils.ErrorList, which extends
# the type "list". Oh man, that was a dumb mistake!
if isinstance(data, list):
ret = _list(data)
# Same as for lists above.
elif isinstance(data, dict):
ret = _dict(data)
elif isinstance(data, Decimal):
# json.dumps() cant handle Decimal
ret = str(data)
elif isinstance(data, models.query.QuerySet):
# Actually its the same as a list ...
ret = _list(data)
elif isinstance(data, models.Model):
ret = _model(data)
# here we need to encode the string as unicode (otherwise we get utf-16 in the json-response)
elif isinstance(data, basestring):
ret = unicode(data)
# see http://code.djangoproject.com/ticket/5868
elif isinstance(data, Promise):
ret = force_unicode(data)
else:
ret = data
return ret
def _model(data):
ret = {}
# If we only have a model, we only want to encode the fields.
for f in data._meta.fields:
ret[f.attname] = _any(getattr(data, f.attname))
# And additionally encode arbitrary properties that had been added.
fields = dir(data.__class__) + ret.keys()
add_ons = [k for k in dir(data) if k not in fields if k != '_state']
for k in add_ons:
ret[k] = _any(getattr(data, k))
return ret
def _list(data):
ret = []
for v in data:
ret.append(_any(v))
return ret
def _dict(data):
ret = {}
for k,v in data.items():
ret[k] = _any(v)
return ret
ret = _any(data)
return json.dumps(ret, cls=DateTimeAwareJSONEncoder)
def json_response(data):
return HttpResponse(json_encode(data), content_type='application/json')
def json_template(data, template_name, template_context):
"""Old style, use JSONTemplateResponse instead of this.
"""
html = render_to_string(template_name, template_context)
data = data or {}
data['html'] = html
return HttpResponse(json_encode(data), content_type='application/json')
class JSONTemplateResponse(TemplateResponse):
def __init__(self, *args, **kwargs):
"""There are extra arguments in kwargs:
`data` dict for extra JSON data
`html_varname` string for specify where rendered template will be
stored, by default "html"
Example:
Py-code:
return JSONTemplateResponse(request, template_name, template_context,
data={'status': 'ok', 'user': request.user})
This line will create response:
{
"status": "ok",
"user": {
"username": "frol",
"first_name": "",
"last_name": "",
"is_active": true,
"email": "qq@qq.qq",
"is_superuser": true,
"is_staff": true,
"last_login": "2012-01-24 18:59:55",
"password": "sha1$fffff$1b4d68b3731ec29a797d61658c716e2400000000",
"id": 1,
"date_joined": "2011-07-09 05:57:21"
},
"html": "<rendered HTML>"
}
WARNING: Be carefull with serialization of model objects. As you can
see in example, password hash has been serialized.
"""
if 'content_type' not in kwargs:
kwargs['content_type'] = 'application/json'
self.data = kwargs.pop('data', dict())
self.html_varname = kwargs.pop('html_varname', 'html')
super(JSONTemplateResponse, self).__init__(*args, **kwargs)
@property
def rendered_content(self):
html = super(JSONTemplateResponse, self).rendered_content
self.data[self.html_varname] = html
return json_encode(self.data)
|
ilblackdragon/django-misc | misc/json_encode.py | json_template | python | def json_template(data, template_name, template_context):
html = render_to_string(template_name, template_context)
data = data or {}
data['html'] = html
return HttpResponse(json_encode(data), content_type='application/json') | Old style, use JSONTemplateResponse instead of this. | train | https://github.com/ilblackdragon/django-misc/blob/0accd2dc97de656a1c9e275be81e817f78a2eb9d/misc/json_encode.py#L89-L95 | [
"def json_encode(data):\n \"\"\"\n The main issues with django's default json serializer is that properties that\n had been added to an object dynamically are being ignored (and it also has \n problems with some models).\n \"\"\"\n\n def _any(data):\n ret = None\n # Opps, we used to check if it is of type list, but that fails \n # i.e. in the case of django.newforms.utils.ErrorList, which extends\n # the type \"list\". Oh man, that was a dumb mistake!\n if isinstance(data, list):\n ret = _list(data)\n # Same as for lists above.\n elif isinstance(data, dict):\n ret = _dict(data)\n elif isinstance(data, Decimal):\n # json.dumps() cant handle Decimal\n ret = str(data)\n elif isinstance(data, models.query.QuerySet):\n # Actually its the same as a list ...\n ret = _list(data)\n elif isinstance(data, models.Model):\n ret = _model(data)\n # here we need to encode the string as unicode (otherwise we get utf-16 in the json-response)\n elif isinstance(data, basestring):\n ret = unicode(data)\n # see http://code.djangoproject.com/ticket/5868\n elif isinstance(data, Promise):\n ret = force_unicode(data)\n else:\n ret = data\n return ret\n\n def _model(data):\n ret = {}\n # If we only have a model, we only want to encode the fields.\n for f in data._meta.fields:\n ret[f.attname] = _any(getattr(data, f.attname))\n # And additionally encode arbitrary properties that had been added.\n fields = dir(data.__class__) + ret.keys()\n add_ons = [k for k in dir(data) if k not in fields if k != '_state']\n for k in add_ons:\n ret[k] = _any(getattr(data, k))\n return ret\n\n def _list(data):\n ret = []\n for v in data:\n ret.append(_any(v))\n return ret\n\n def _dict(data):\n ret = {}\n for k,v in data.items():\n ret[k] = _any(v)\n return ret\n\n ret = _any(data)\n return json.dumps(ret, cls=DateTimeAwareJSONEncoder)\n"
] | from django.conf import settings
from django.core.serializers.json import DateTimeAwareJSONEncoder
from django.core.serializers import serialize
from django.db import models
from django.http import HttpResponse
from django.utils.encoding import force_unicode
from django.utils.functional import Promise
#Fix for Django 1.5 and simplejson compatibility
try:
import json
except ImportError:
from django.utils import simplejson as json
if 'coffin' in settings.INSTALLED_APPS:
from coffin.template.loader import render_to_string
from coffin.template.response import TemplateResponse
else:
from django.template.loader import render_to_string
from django.template.response import TemplateResponse
from decimal import Decimal
def json_encode(data):
"""
The main issues with django's default json serializer is that properties that
had been added to an object dynamically are being ignored (and it also has
problems with some models).
"""
def _any(data):
ret = None
# Opps, we used to check if it is of type list, but that fails
# i.e. in the case of django.newforms.utils.ErrorList, which extends
# the type "list". Oh man, that was a dumb mistake!
if isinstance(data, list):
ret = _list(data)
# Same as for lists above.
elif isinstance(data, dict):
ret = _dict(data)
elif isinstance(data, Decimal):
# json.dumps() cant handle Decimal
ret = str(data)
elif isinstance(data, models.query.QuerySet):
# Actually its the same as a list ...
ret = _list(data)
elif isinstance(data, models.Model):
ret = _model(data)
# here we need to encode the string as unicode (otherwise we get utf-16 in the json-response)
elif isinstance(data, basestring):
ret = unicode(data)
# see http://code.djangoproject.com/ticket/5868
elif isinstance(data, Promise):
ret = force_unicode(data)
else:
ret = data
return ret
def _model(data):
ret = {}
# If we only have a model, we only want to encode the fields.
for f in data._meta.fields:
ret[f.attname] = _any(getattr(data, f.attname))
# And additionally encode arbitrary properties that had been added.
fields = dir(data.__class__) + ret.keys()
add_ons = [k for k in dir(data) if k not in fields if k != '_state']
for k in add_ons:
ret[k] = _any(getattr(data, k))
return ret
def _list(data):
ret = []
for v in data:
ret.append(_any(v))
return ret
def _dict(data):
ret = {}
for k,v in data.items():
ret[k] = _any(v)
return ret
ret = _any(data)
return json.dumps(ret, cls=DateTimeAwareJSONEncoder)
def json_response(data):
return HttpResponse(json_encode(data), content_type='application/json')
class JSONTemplateResponse(TemplateResponse):
def __init__(self, *args, **kwargs):
"""There are extra arguments in kwargs:
`data` dict for extra JSON data
`html_varname` string for specify where rendered template will be
stored, by default "html"
Example:
Py-code:
return JSONTemplateResponse(request, template_name, template_context,
data={'status': 'ok', 'user': request.user})
This line will create response:
{
"status": "ok",
"user": {
"username": "frol",
"first_name": "",
"last_name": "",
"is_active": true,
"email": "qq@qq.qq",
"is_superuser": true,
"is_staff": true,
"last_login": "2012-01-24 18:59:55",
"password": "sha1$fffff$1b4d68b3731ec29a797d61658c716e2400000000",
"id": 1,
"date_joined": "2011-07-09 05:57:21"
},
"html": "<rendered HTML>"
}
WARNING: Be carefull with serialization of model objects. As you can
see in example, password hash has been serialized.
"""
if 'content_type' not in kwargs:
kwargs['content_type'] = 'application/json'
self.data = kwargs.pop('data', dict())
self.html_varname = kwargs.pop('html_varname', 'html')
super(JSONTemplateResponse, self).__init__(*args, **kwargs)
@property
def rendered_content(self):
html = super(JSONTemplateResponse, self).rendered_content
self.data[self.html_varname] = html
return json_encode(self.data)
|
ilblackdragon/django-misc | misc/management/utils.py | handle_lock | python | def handle_lock(handle):
def wrapper(self, *args, **options):
def on_interrupt(signum, frame):
# It's necessary to release lockfile
sys.exit()
signal.signal(signal.SIGTERM, on_interrupt)
start_time = time.time()
try:
verbosity = int(options.get('verbosity', 0))
except ValueError:
verbosity = 0
logger = logging.getLogger(self.__module__)
if verbosity == 0:
logger.level = logging.WARNING
elif verbosity == 1:
logger.level = logging.INFO
else:
logger.level = logging.DEBUG
logger.debug("-" * 72)
lock_name = self.__module__.split('.').pop()
lock = FileLock(os.path.join(LOCK_ROOT, lock_name))
logger.debug("%s - acquiring lock..." % lock_name)
try:
lock.acquire(LOCK_WAIT_TIMEOUT)
except AlreadyLocked:
logger.debug("lock already in place. quitting.")
return
except LockTimeout:
logger.debug("waiting for the lock timed out. quitting.")
return
logger.debug("acquired.")
try:
handle(self, logger, *args, **options)
except (KeyboardInterrupt, SystemExit):
pass
except:
import traceback
logging.warn("Command Failed")
logging.warn('=' * 72)
logging.warn(traceback.format_exc())
logging.warn('=' * 72)
logger.debug("releasing lock...")
lock.release()
logger.debug("released.")
logger.info("done in %.2f seconds" % (time.time() - start_time))
return
return wrapper | Decorate the handle method with a file lock to ensure there is only ever
one process running at any one time. | train | https://github.com/ilblackdragon/django-misc/blob/0accd2dc97de656a1c9e275be81e817f78a2eb9d/misc/management/utils.py#L24-L83 | null | """
A decorator for management commands (or any class method) to ensure that there is
only ever one process running the method at any one time.
Requires lockfile - (pip install lockfile)
Author: Ross Lawley
"""
import logging
import os
import signal
import sys
import time
from lockfile import FileLock, AlreadyLocked, LockTimeout
from django.conf import settings
# Lock timeout value - how long to wait for the lock to become available.
# Default behavior is to never wait for the lock to be available (fail fast)
LOCK_WAIT_TIMEOUT = getattr(settings, 'DEFAULT_LOCK_WAIT_TIMEOUT', -1)
LOCK_ROOT = getattr(settings, 'LOCK_ROOT', '')
def handle_lock(handle):
"""
Decorate the handle method with a file lock to ensure there is only ever
one process running at any one time.
"""
def wrapper(self, *args, **options):
def on_interrupt(signum, frame):
# It's necessary to release lockfile
sys.exit()
signal.signal(signal.SIGTERM, on_interrupt)
start_time = time.time()
try:
verbosity = int(options.get('verbosity', 0))
except ValueError:
verbosity = 0
logger = logging.getLogger(self.__module__)
if verbosity == 0:
logger.level = logging.WARNING
elif verbosity == 1:
logger.level = logging.INFO
else:
logger.level = logging.DEBUG
logger.debug("-" * 72)
lock_name = self.__module__.split('.').pop()
lock = FileLock(os.path.join(LOCK_ROOT, lock_name))
logger.debug("%s - acquiring lock..." % lock_name)
try:
lock.acquire(LOCK_WAIT_TIMEOUT)
except AlreadyLocked:
logger.debug("lock already in place. quitting.")
return
except LockTimeout:
logger.debug("waiting for the lock timed out. quitting.")
return
logger.debug("acquired.")
try:
handle(self, logger, *args, **options)
except (KeyboardInterrupt, SystemExit):
pass
except:
import traceback
logging.warn("Command Failed")
logging.warn('=' * 72)
logging.warn(traceback.format_exc())
logging.warn('=' * 72)
logger.debug("releasing lock...")
lock.release()
logger.debug("released.")
logger.info("done in %.2f seconds" % (time.time() - start_time))
return
return wrapper
|
jtmoulia/switchboard-python | examples/lamsonworker.py | main | python | def main(url, lamson_host, lamson_port, lamson_debug):
try:
worker = LamsonWorker(url=url,
lamson_host=lamson_host,
lamson_port=lamson_port,
lamson_debug=lamson_debug)
worker.connect()
worker.run_forever()
except KeyboardInterrupt:
worker.close() | Create, connect, and block on the Lamson worker. | train | https://github.com/jtmoulia/switchboard-python/blob/c9b0cb74cb12a64385465091be633e78d39f08b1/examples/lamsonworker.py#L47-L59 | null | #! /usr/bin/env python
# -*- coding: utf-8 -*-
"""
This Switchboard worker delivers emails to Lamson.
"""
__author__ = u"Thomas Moulia <jtmoulia@pocketknife.io>"
__copyright__ = u"Copyright © 2014, ThusFresh Inc All rights reserved."
import switchboard
from lamson import server
import logging
logging.basicConfig(level=logging.DEBUG)
logger = logging.getLogger(__name__)
class LamsonWorker(switchboard.Fetcher):
def __init__(self, lamson_host='127.0.0.1', lamson_port=8823, lamson_debug=0,
*args, **kwargs):
super(LamsonWorker, self).__init__(*args, **kwargs)
self._relay = server.Relay(lamson_host, port=lamson_port, debug=lamson_debug)
def opened(self):
"""Connect to the websocket, and ensure the account is connected and
the INBOX is being watched, and then start watchingAll.
"""
def post_setup((cmds, resps)):
"""Post setup callback."""
logger.info("Setup complete, listening...")
self.send_cmds(('watchAll', {})).then(post_setup)
def received_new(self, msg):
"""
As new messages arrive, deliver them to the lamson relay.
"""
logger.info("Receiving msg, delivering to Lamson...")
logger.debug("Relaying msg to lamson: From: %s, To: %s",
msg['From'], msg['To'])
self._relay.deliver(msg)
if __name__ == '__main__':
import argparse
parser = argparse.ArgumentParser(description="Switchboard -> Lamson Worker")
parser.add_argument('--url', default= "ws://192.168.50.2:8080/workers",
help="switchboard's websocket url")
parser.add_argument('--host', default='127.0.0.1',
help="lamson's host")
parser.add_argument('--port', default=8823,
help="lamson's port")
parser.add_argument('--debug', default=0, help="lamson's debug level")
args = parser.parse_args()
main(args.url, args.host, args.port, args.debug)
|
jtmoulia/switchboard-python | examples/lamsonworker.py | LamsonWorker.received_new | python | def received_new(self, msg):
logger.info("Receiving msg, delivering to Lamson...")
logger.debug("Relaying msg to lamson: From: %s, To: %s",
msg['From'], msg['To'])
self._relay.deliver(msg) | As new messages arrive, deliver them to the lamson relay. | train | https://github.com/jtmoulia/switchboard-python/blob/c9b0cb74cb12a64385465091be633e78d39f08b1/examples/lamsonworker.py#L37-L44 | null | class LamsonWorker(switchboard.Fetcher):
def __init__(self, lamson_host='127.0.0.1', lamson_port=8823, lamson_debug=0,
*args, **kwargs):
super(LamsonWorker, self).__init__(*args, **kwargs)
self._relay = server.Relay(lamson_host, port=lamson_port, debug=lamson_debug)
def opened(self):
"""Connect to the websocket, and ensure the account is connected and
the INBOX is being watched, and then start watchingAll.
"""
def post_setup((cmds, resps)):
"""Post setup callback."""
logger.info("Setup complete, listening...")
self.send_cmds(('watchAll', {})).then(post_setup)
|
jtmoulia/switchboard-python | aplus/__init__.py | _isPromise | python | def _isPromise(obj):
return hasattr(obj, "fulfill") and \
_isFunction(getattr(obj, "fulfill")) and \
hasattr(obj, "reject") and \
_isFunction(getattr(obj, "reject")) and \
hasattr(obj, "then") and \
_isFunction(getattr(obj, "then")) | A utility function to determine if the specified
object is a promise using "duck typing". | train | https://github.com/jtmoulia/switchboard-python/blob/c9b0cb74cb12a64385465091be633e78d39f08b1/aplus/__init__.py#L261-L271 | [
"def _isFunction(v):\n \"\"\"\n A utility function to determine if the specified\n value is a function.\n \"\"\"\n if v==None:\n return False\n if hasattr(v, \"__call__\"):\n return True\n return False\n"
] | from threading import Thread
class Promise:
"""
This is a class that attempts to comply with the
Promises/A+ specification and test suite:
http://promises-aplus.github.io/promises-spec/
"""
# These are the potential states of a promise
PENDING = -1
REJECTED = 0
FULFILLED = 1
def __init__(self):
"""
Initialize the Promise into a pending state.
"""
self._state = self.PENDING;
self.value = None;
self.reason = None;
self._callbacks = [];
self._errbacks = [];
def fulfill(self, value):
"""
Fulfill the promise with a given value.
"""
assert self._state==self.PENDING
self._state=self.FULFILLED;
self.value = value
for callback in self._callbacks:
try:
callback(value)
except Exception:
# Ignore errors in callbacks
pass
# We will never call these callbacks again, so allow
# them to be garbage collected. This is important since
# they probably include closures which are binding variables
# that might otherwise be garbage collected.
self._callbacks = []
def reject(self, reason):
"""
Reject this promise for a given reason.
"""
assert self._state==self.PENDING
self._state=self.REJECTED;
self.reason = reason
for errback in self._errbacks:
try:
errback(reason)
except Exception:
# Ignore errors in callbacks
pass
# We will never call these errbacks again, so allow
# them to be garbage collected. This is important since
# they probably include closures which are binding variables
# that might otherwise be garbage collected.
self._errbacks = []
def isPending(self):
"""Indicate whether the Promise is still pending."""
return self._state==self.PENDING
def isFulfilled(self):
"""Indicate whether the Promise has been fulfilled."""
return self._state==self.FULFILLED
def isRejected(self):
"""Indicate whether the Promise has been rejected."""
return self._state==self.REJECTED
def get(self, timeout=None):
"""Get the value of the promise, waiting if necessary."""
self.wait(timeout)
if self._state==self.FULFILLED:
return self.value
else:
raise ValueError("Calculation didn't yield a value")
def wait(self, timeout=None):
"""
An implementation of the wait method which doesn't involve
polling but instead utilizes a "real" synchronization
scheme.
"""
import threading
if self._state!=self.PENDING:
return
e = threading.Event()
self.addCallback(lambda v: e.set())
self.addErrback(lambda r: e.set())
e.wait(timeout)
def addCallback(self, f):
"""
Add a callback for when this promis is fulfilled. Note that
if you intend to use the value of the promise somehow in
the callback, it is more convenient to use the 'then' method.
"""
self._callbacks.append(f)
def addErrback(self, f):
"""
Add a callback for when this promis is rejected. Note that
if you intend to use the rejection reason of the promise
somehow in the callback, it is more convenient to use
the 'then' method.
"""
self._errbacks.append(f)
def then(self, success=None, failure=None):
"""
This method takes two optional arguments. The first argument
is used if the "self promise" is fulfilled and the other is
used if the "self promise" is rejected. In either case, this
method returns another promise that effectively represents
the result of either the first of the second argument (in the
case that the "self promise" is fulfilled or rejected,
respectively).
Each argument can be either:
* None - Meaning no action is taken
* A function - which will be called with either the value
of the "self promise" or the reason for rejection of
the "self promise". The function may return:
* A value - which will be used to fulfill the promise
returned by this method.
* A promise - which, when fulfilled or rejected, will
cascade its value or reason to the promise returned
by this method.
* A value - which will be assigned as either the value
or the reason for the promise returned by this method
when the "self promise" is either fulfilled or rejected,
respectively.
"""
ret = Promise()
def callAndFulfill(v):
"""
A callback to be invoked if the "self promise"
is fulfilled.
"""
try:
# From 3.2.1, don't call non-functions values
if _isFunction(success):
newvalue = success(v)
if _isPromise(newvalue):
newvalue.then(lambda v: ret.fulfill(v),
lambda r: ret.reject(r))
else:
ret.fulfill(newvalue)
elif success!=None:
# From 3.2.6.4
ret.fulfill(v)
else:
pass
except Exception as e:
ret.reject(e)
def callAndReject(r):
"""
A callback to be invoked if the "self promise"
is rejected.
"""
try:
if _isFunction(failure):
newvalue = failure(r)
if _isPromise(newvalue):
newvalue.then(lambda v: ret.fulfill(v),
lambda r: ret.reject(r))
else:
ret.fulfill(newvalue)
elif failure!=None:
# From 3.2.6.5
ret.reject(r)
else:
pass
except Exception as e:
ret.reject(e)
if self._state==self.PENDING:
"""
If this is still pending, then add callbacks to the
existing promise that call either the success or
rejected functions supplied and then fulfill the
promise being returned by this method
"""
if success!=None:
self._callbacks.append(callAndFulfill)
if failure!=None:
self._errbacks.append(callAndReject)
elif self._state==self.FULFILLED:
"""
If this promise was already fulfilled, then
we need to use the first argument to this method
to determine the value to use in fulfilling the
promise that we return from this method.
"""
try:
if _isFunction(success):
newvalue = success(self.value)
if _isPromise(newvalue):
newvalue.then(lambda v: ret.fulfill(v),
lambda r: ret.reject(r))
else:
ret.fulfill(newvalue)
elif success!=None:
# From 3.2.6.4
ret.fulfill(self.value)
else:
pass
except Exception as e:
ret.reject(e)
elif self._state==self.REJECTED:
"""
If this promise was already rejected, then
we need to use the second argument to this method
to determine the value to use in fulfilling the
promise that we return from this method.
"""
try:
if _isFunction(failure):
newvalue = failure(self.reason)
if _isPromise(newvalue):
newvalue.then(lambda v: ret.fulfill(v),
lambda r: ret.reject(r))
else:
ret.fulfill(newvalue)
elif failure!=None:
# From 3.2.6.5
ret.reject(self.reason)
else:
pass
except Exception as e:
ret.reject(e)
return ret
def _isFunction(v):
"""
A utility function to determine if the specified
value is a function.
"""
if v==None:
return False
if hasattr(v, "__call__"):
return True
return False
def listPromise(*args):
"""
A special function that takes a bunch of promises
and turns them into a promise for a vector of values.
In other words, this turns an list of promises for values
into a promise for a list of values.
"""
ret = Promise()
def handleSuccess(v, ret):
for arg in args:
if not arg.isFulfilled():
return
value = map(lambda p: p.value, args)
ret.fulfill(value)
for arg in args:
arg.addCallback(lambda v: handleSuccess(v, ret))
arg.addErrback(lambda r: ret.reject(r))
# Check to see if all the promises are already fulfilled
handleSuccess(None, ret)
return ret
def dictPromise(m):
"""
A special function that takes a dictionary of promises
and turns them into a promise for a dictionary of values.
In other words, this turns an dictionary of promises for values
into a promise for a dictionary of values.
"""
ret = Promise()
def handleSuccess(v, ret):
for p in m.values():
if not p.isFulfilled():
return
value = {}
for k in m:
value[k] = m[k].value
ret.fulfill(value)
for p in m.values():
p.addCallback(lambda v: handleSuccess(v, ret))
p.addErrback(lambda r: ret.reject(r))
# Check to see if all the promises are already fulfilled
handleSuccess(None, ret)
return ret
class BackgroundThread(Thread):
def __init__(self, promise, func):
self.promise = promise;
self.func = func;
Thread.__init__(self)
def run(self):
try:
val = self.func()
self.promise.fulfill(val)
except Exception as e:
self.promise.reject(e)
def background(f):
p = Promise()
t = BackgroundThread(p, f)
t.start()
return p
def spawn(f):
from gevent import spawn
def process(p, f):
try:
val = f()
p.fulfill(val)
except Exception as e:
p.reject(e)
p = Promise()
g = spawn(lambda: process(p, f))
return p
|
jtmoulia/switchboard-python | aplus/__init__.py | listPromise | python | def listPromise(*args):
ret = Promise()
def handleSuccess(v, ret):
for arg in args:
if not arg.isFulfilled():
return
value = map(lambda p: p.value, args)
ret.fulfill(value)
for arg in args:
arg.addCallback(lambda v: handleSuccess(v, ret))
arg.addErrback(lambda r: ret.reject(r))
# Check to see if all the promises are already fulfilled
handleSuccess(None, ret)
return ret | A special function that takes a bunch of promises
and turns them into a promise for a vector of values.
In other words, this turns an list of promises for values
into a promise for a list of values. | train | https://github.com/jtmoulia/switchboard-python/blob/c9b0cb74cb12a64385465091be633e78d39f08b1/aplus/__init__.py#L273-L297 | [
"def handleSuccess(v, ret):\n for arg in args:\n if not arg.isFulfilled():\n return\n\n value = map(lambda p: p.value, args)\n ret.fulfill(value)\n"
] | from threading import Thread
class Promise:
"""
This is a class that attempts to comply with the
Promises/A+ specification and test suite:
http://promises-aplus.github.io/promises-spec/
"""
# These are the potential states of a promise
PENDING = -1
REJECTED = 0
FULFILLED = 1
def __init__(self):
"""
Initialize the Promise into a pending state.
"""
self._state = self.PENDING;
self.value = None;
self.reason = None;
self._callbacks = [];
self._errbacks = [];
def fulfill(self, value):
"""
Fulfill the promise with a given value.
"""
assert self._state==self.PENDING
self._state=self.FULFILLED;
self.value = value
for callback in self._callbacks:
try:
callback(value)
except Exception:
# Ignore errors in callbacks
pass
# We will never call these callbacks again, so allow
# them to be garbage collected. This is important since
# they probably include closures which are binding variables
# that might otherwise be garbage collected.
self._callbacks = []
def reject(self, reason):
"""
Reject this promise for a given reason.
"""
assert self._state==self.PENDING
self._state=self.REJECTED;
self.reason = reason
for errback in self._errbacks:
try:
errback(reason)
except Exception:
# Ignore errors in callbacks
pass
# We will never call these errbacks again, so allow
# them to be garbage collected. This is important since
# they probably include closures which are binding variables
# that might otherwise be garbage collected.
self._errbacks = []
def isPending(self):
"""Indicate whether the Promise is still pending."""
return self._state==self.PENDING
def isFulfilled(self):
"""Indicate whether the Promise has been fulfilled."""
return self._state==self.FULFILLED
def isRejected(self):
"""Indicate whether the Promise has been rejected."""
return self._state==self.REJECTED
def get(self, timeout=None):
"""Get the value of the promise, waiting if necessary."""
self.wait(timeout)
if self._state==self.FULFILLED:
return self.value
else:
raise ValueError("Calculation didn't yield a value")
def wait(self, timeout=None):
"""
An implementation of the wait method which doesn't involve
polling but instead utilizes a "real" synchronization
scheme.
"""
import threading
if self._state!=self.PENDING:
return
e = threading.Event()
self.addCallback(lambda v: e.set())
self.addErrback(lambda r: e.set())
e.wait(timeout)
def addCallback(self, f):
"""
Add a callback for when this promis is fulfilled. Note that
if you intend to use the value of the promise somehow in
the callback, it is more convenient to use the 'then' method.
"""
self._callbacks.append(f)
def addErrback(self, f):
"""
Add a callback for when this promis is rejected. Note that
if you intend to use the rejection reason of the promise
somehow in the callback, it is more convenient to use
the 'then' method.
"""
self._errbacks.append(f)
def then(self, success=None, failure=None):
"""
This method takes two optional arguments. The first argument
is used if the "self promise" is fulfilled and the other is
used if the "self promise" is rejected. In either case, this
method returns another promise that effectively represents
the result of either the first of the second argument (in the
case that the "self promise" is fulfilled or rejected,
respectively).
Each argument can be either:
* None - Meaning no action is taken
* A function - which will be called with either the value
of the "self promise" or the reason for rejection of
the "self promise". The function may return:
* A value - which will be used to fulfill the promise
returned by this method.
* A promise - which, when fulfilled or rejected, will
cascade its value or reason to the promise returned
by this method.
* A value - which will be assigned as either the value
or the reason for the promise returned by this method
when the "self promise" is either fulfilled or rejected,
respectively.
"""
ret = Promise()
def callAndFulfill(v):
"""
A callback to be invoked if the "self promise"
is fulfilled.
"""
try:
# From 3.2.1, don't call non-functions values
if _isFunction(success):
newvalue = success(v)
if _isPromise(newvalue):
newvalue.then(lambda v: ret.fulfill(v),
lambda r: ret.reject(r))
else:
ret.fulfill(newvalue)
elif success!=None:
# From 3.2.6.4
ret.fulfill(v)
else:
pass
except Exception as e:
ret.reject(e)
def callAndReject(r):
"""
A callback to be invoked if the "self promise"
is rejected.
"""
try:
if _isFunction(failure):
newvalue = failure(r)
if _isPromise(newvalue):
newvalue.then(lambda v: ret.fulfill(v),
lambda r: ret.reject(r))
else:
ret.fulfill(newvalue)
elif failure!=None:
# From 3.2.6.5
ret.reject(r)
else:
pass
except Exception as e:
ret.reject(e)
if self._state==self.PENDING:
"""
If this is still pending, then add callbacks to the
existing promise that call either the success or
rejected functions supplied and then fulfill the
promise being returned by this method
"""
if success!=None:
self._callbacks.append(callAndFulfill)
if failure!=None:
self._errbacks.append(callAndReject)
elif self._state==self.FULFILLED:
"""
If this promise was already fulfilled, then
we need to use the first argument to this method
to determine the value to use in fulfilling the
promise that we return from this method.
"""
try:
if _isFunction(success):
newvalue = success(self.value)
if _isPromise(newvalue):
newvalue.then(lambda v: ret.fulfill(v),
lambda r: ret.reject(r))
else:
ret.fulfill(newvalue)
elif success!=None:
# From 3.2.6.4
ret.fulfill(self.value)
else:
pass
except Exception as e:
ret.reject(e)
elif self._state==self.REJECTED:
"""
If this promise was already rejected, then
we need to use the second argument to this method
to determine the value to use in fulfilling the
promise that we return from this method.
"""
try:
if _isFunction(failure):
newvalue = failure(self.reason)
if _isPromise(newvalue):
newvalue.then(lambda v: ret.fulfill(v),
lambda r: ret.reject(r))
else:
ret.fulfill(newvalue)
elif failure!=None:
# From 3.2.6.5
ret.reject(self.reason)
else:
pass
except Exception as e:
ret.reject(e)
return ret
def _isFunction(v):
"""
A utility function to determine if the specified
value is a function.
"""
if v==None:
return False
if hasattr(v, "__call__"):
return True
return False
def _isPromise(obj):
"""
A utility function to determine if the specified
object is a promise using "duck typing".
"""
return hasattr(obj, "fulfill") and \
_isFunction(getattr(obj, "fulfill")) and \
hasattr(obj, "reject") and \
_isFunction(getattr(obj, "reject")) and \
hasattr(obj, "then") and \
_isFunction(getattr(obj, "then"))
def dictPromise(m):
"""
A special function that takes a dictionary of promises
and turns them into a promise for a dictionary of values.
In other words, this turns an dictionary of promises for values
into a promise for a dictionary of values.
"""
ret = Promise()
def handleSuccess(v, ret):
for p in m.values():
if not p.isFulfilled():
return
value = {}
for k in m:
value[k] = m[k].value
ret.fulfill(value)
for p in m.values():
p.addCallback(lambda v: handleSuccess(v, ret))
p.addErrback(lambda r: ret.reject(r))
# Check to see if all the promises are already fulfilled
handleSuccess(None, ret)
return ret
class BackgroundThread(Thread):
def __init__(self, promise, func):
self.promise = promise;
self.func = func;
Thread.__init__(self)
def run(self):
try:
val = self.func()
self.promise.fulfill(val)
except Exception as e:
self.promise.reject(e)
def background(f):
p = Promise()
t = BackgroundThread(p, f)
t.start()
return p
def spawn(f):
from gevent import spawn
def process(p, f):
try:
val = f()
p.fulfill(val)
except Exception as e:
p.reject(e)
p = Promise()
g = spawn(lambda: process(p, f))
return p
|
jtmoulia/switchboard-python | aplus/__init__.py | dictPromise | python | def dictPromise(m):
ret = Promise()
def handleSuccess(v, ret):
for p in m.values():
if not p.isFulfilled():
return
value = {}
for k in m:
value[k] = m[k].value
ret.fulfill(value)
for p in m.values():
p.addCallback(lambda v: handleSuccess(v, ret))
p.addErrback(lambda r: ret.reject(r))
# Check to see if all the promises are already fulfilled
handleSuccess(None, ret)
return ret | A special function that takes a dictionary of promises
and turns them into a promise for a dictionary of values.
In other words, this turns an dictionary of promises for values
into a promise for a dictionary of values. | train | https://github.com/jtmoulia/switchboard-python/blob/c9b0cb74cb12a64385465091be633e78d39f08b1/aplus/__init__.py#L299-L325 | [
"def handleSuccess(v, ret):\n for p in m.values():\n if not p.isFulfilled():\n return\n\n value = {}\n for k in m:\n value[k] = m[k].value\n ret.fulfill(value)\n"
] | from threading import Thread
class Promise:
"""
This is a class that attempts to comply with the
Promises/A+ specification and test suite:
http://promises-aplus.github.io/promises-spec/
"""
# These are the potential states of a promise
PENDING = -1
REJECTED = 0
FULFILLED = 1
def __init__(self):
"""
Initialize the Promise into a pending state.
"""
self._state = self.PENDING;
self.value = None;
self.reason = None;
self._callbacks = [];
self._errbacks = [];
def fulfill(self, value):
"""
Fulfill the promise with a given value.
"""
assert self._state==self.PENDING
self._state=self.FULFILLED;
self.value = value
for callback in self._callbacks:
try:
callback(value)
except Exception:
# Ignore errors in callbacks
pass
# We will never call these callbacks again, so allow
# them to be garbage collected. This is important since
# they probably include closures which are binding variables
# that might otherwise be garbage collected.
self._callbacks = []
def reject(self, reason):
"""
Reject this promise for a given reason.
"""
assert self._state==self.PENDING
self._state=self.REJECTED;
self.reason = reason
for errback in self._errbacks:
try:
errback(reason)
except Exception:
# Ignore errors in callbacks
pass
# We will never call these errbacks again, so allow
# them to be garbage collected. This is important since
# they probably include closures which are binding variables
# that might otherwise be garbage collected.
self._errbacks = []
def isPending(self):
"""Indicate whether the Promise is still pending."""
return self._state==self.PENDING
def isFulfilled(self):
"""Indicate whether the Promise has been fulfilled."""
return self._state==self.FULFILLED
def isRejected(self):
"""Indicate whether the Promise has been rejected."""
return self._state==self.REJECTED
def get(self, timeout=None):
"""Get the value of the promise, waiting if necessary."""
self.wait(timeout)
if self._state==self.FULFILLED:
return self.value
else:
raise ValueError("Calculation didn't yield a value")
def wait(self, timeout=None):
"""
An implementation of the wait method which doesn't involve
polling but instead utilizes a "real" synchronization
scheme.
"""
import threading
if self._state!=self.PENDING:
return
e = threading.Event()
self.addCallback(lambda v: e.set())
self.addErrback(lambda r: e.set())
e.wait(timeout)
def addCallback(self, f):
"""
Add a callback for when this promis is fulfilled. Note that
if you intend to use the value of the promise somehow in
the callback, it is more convenient to use the 'then' method.
"""
self._callbacks.append(f)
def addErrback(self, f):
"""
Add a callback for when this promis is rejected. Note that
if you intend to use the rejection reason of the promise
somehow in the callback, it is more convenient to use
the 'then' method.
"""
self._errbacks.append(f)
def then(self, success=None, failure=None):
"""
This method takes two optional arguments. The first argument
is used if the "self promise" is fulfilled and the other is
used if the "self promise" is rejected. In either case, this
method returns another promise that effectively represents
the result of either the first of the second argument (in the
case that the "self promise" is fulfilled or rejected,
respectively).
Each argument can be either:
* None - Meaning no action is taken
* A function - which will be called with either the value
of the "self promise" or the reason for rejection of
the "self promise". The function may return:
* A value - which will be used to fulfill the promise
returned by this method.
* A promise - which, when fulfilled or rejected, will
cascade its value or reason to the promise returned
by this method.
* A value - which will be assigned as either the value
or the reason for the promise returned by this method
when the "self promise" is either fulfilled or rejected,
respectively.
"""
ret = Promise()
def callAndFulfill(v):
"""
A callback to be invoked if the "self promise"
is fulfilled.
"""
try:
# From 3.2.1, don't call non-functions values
if _isFunction(success):
newvalue = success(v)
if _isPromise(newvalue):
newvalue.then(lambda v: ret.fulfill(v),
lambda r: ret.reject(r))
else:
ret.fulfill(newvalue)
elif success!=None:
# From 3.2.6.4
ret.fulfill(v)
else:
pass
except Exception as e:
ret.reject(e)
def callAndReject(r):
"""
A callback to be invoked if the "self promise"
is rejected.
"""
try:
if _isFunction(failure):
newvalue = failure(r)
if _isPromise(newvalue):
newvalue.then(lambda v: ret.fulfill(v),
lambda r: ret.reject(r))
else:
ret.fulfill(newvalue)
elif failure!=None:
# From 3.2.6.5
ret.reject(r)
else:
pass
except Exception as e:
ret.reject(e)
if self._state==self.PENDING:
"""
If this is still pending, then add callbacks to the
existing promise that call either the success or
rejected functions supplied and then fulfill the
promise being returned by this method
"""
if success!=None:
self._callbacks.append(callAndFulfill)
if failure!=None:
self._errbacks.append(callAndReject)
elif self._state==self.FULFILLED:
"""
If this promise was already fulfilled, then
we need to use the first argument to this method
to determine the value to use in fulfilling the
promise that we return from this method.
"""
try:
if _isFunction(success):
newvalue = success(self.value)
if _isPromise(newvalue):
newvalue.then(lambda v: ret.fulfill(v),
lambda r: ret.reject(r))
else:
ret.fulfill(newvalue)
elif success!=None:
# From 3.2.6.4
ret.fulfill(self.value)
else:
pass
except Exception as e:
ret.reject(e)
elif self._state==self.REJECTED:
"""
If this promise was already rejected, then
we need to use the second argument to this method
to determine the value to use in fulfilling the
promise that we return from this method.
"""
try:
if _isFunction(failure):
newvalue = failure(self.reason)
if _isPromise(newvalue):
newvalue.then(lambda v: ret.fulfill(v),
lambda r: ret.reject(r))
else:
ret.fulfill(newvalue)
elif failure!=None:
# From 3.2.6.5
ret.reject(self.reason)
else:
pass
except Exception as e:
ret.reject(e)
return ret
def _isFunction(v):
"""
A utility function to determine if the specified
value is a function.
"""
if v==None:
return False
if hasattr(v, "__call__"):
return True
return False
def _isPromise(obj):
"""
A utility function to determine if the specified
object is a promise using "duck typing".
"""
return hasattr(obj, "fulfill") and \
_isFunction(getattr(obj, "fulfill")) and \
hasattr(obj, "reject") and \
_isFunction(getattr(obj, "reject")) and \
hasattr(obj, "then") and \
_isFunction(getattr(obj, "then"))
def listPromise(*args):
"""
A special function that takes a bunch of promises
and turns them into a promise for a vector of values.
In other words, this turns an list of promises for values
into a promise for a list of values.
"""
ret = Promise()
def handleSuccess(v, ret):
for arg in args:
if not arg.isFulfilled():
return
value = map(lambda p: p.value, args)
ret.fulfill(value)
for arg in args:
arg.addCallback(lambda v: handleSuccess(v, ret))
arg.addErrback(lambda r: ret.reject(r))
# Check to see if all the promises are already fulfilled
handleSuccess(None, ret)
return ret
class BackgroundThread(Thread):
def __init__(self, promise, func):
self.promise = promise;
self.func = func;
Thread.__init__(self)
def run(self):
try:
val = self.func()
self.promise.fulfill(val)
except Exception as e:
self.promise.reject(e)
def background(f):
p = Promise()
t = BackgroundThread(p, f)
t.start()
return p
def spawn(f):
from gevent import spawn
def process(p, f):
try:
val = f()
p.fulfill(val)
except Exception as e:
p.reject(e)
p = Promise()
g = spawn(lambda: process(p, f))
return p
|
jtmoulia/switchboard-python | aplus/__init__.py | Promise.fulfill | python | def fulfill(self, value):
assert self._state==self.PENDING
self._state=self.FULFILLED;
self.value = value
for callback in self._callbacks:
try:
callback(value)
except Exception:
# Ignore errors in callbacks
pass
# We will never call these callbacks again, so allow
# them to be garbage collected. This is important since
# they probably include closures which are binding variables
# that might otherwise be garbage collected.
self._callbacks = [] | Fulfill the promise with a given value. | train | https://github.com/jtmoulia/switchboard-python/blob/c9b0cb74cb12a64385465091be633e78d39f08b1/aplus/__init__.py#L26-L45 | null | class Promise:
"""
This is a class that attempts to comply with the
Promises/A+ specification and test suite:
http://promises-aplus.github.io/promises-spec/
"""
# These are the potential states of a promise
PENDING = -1
REJECTED = 0
FULFILLED = 1
def __init__(self):
"""
Initialize the Promise into a pending state.
"""
self._state = self.PENDING;
self.value = None;
self.reason = None;
self._callbacks = [];
self._errbacks = [];
def reject(self, reason):
"""
Reject this promise for a given reason.
"""
assert self._state==self.PENDING
self._state=self.REJECTED;
self.reason = reason
for errback in self._errbacks:
try:
errback(reason)
except Exception:
# Ignore errors in callbacks
pass
# We will never call these errbacks again, so allow
# them to be garbage collected. This is important since
# they probably include closures which are binding variables
# that might otherwise be garbage collected.
self._errbacks = []
def isPending(self):
"""Indicate whether the Promise is still pending."""
return self._state==self.PENDING
def isFulfilled(self):
"""Indicate whether the Promise has been fulfilled."""
return self._state==self.FULFILLED
def isRejected(self):
"""Indicate whether the Promise has been rejected."""
return self._state==self.REJECTED
def get(self, timeout=None):
"""Get the value of the promise, waiting if necessary."""
self.wait(timeout)
if self._state==self.FULFILLED:
return self.value
else:
raise ValueError("Calculation didn't yield a value")
def wait(self, timeout=None):
"""
An implementation of the wait method which doesn't involve
polling but instead utilizes a "real" synchronization
scheme.
"""
import threading
if self._state!=self.PENDING:
return
e = threading.Event()
self.addCallback(lambda v: e.set())
self.addErrback(lambda r: e.set())
e.wait(timeout)
def addCallback(self, f):
"""
Add a callback for when this promis is fulfilled. Note that
if you intend to use the value of the promise somehow in
the callback, it is more convenient to use the 'then' method.
"""
self._callbacks.append(f)
def addErrback(self, f):
"""
Add a callback for when this promis is rejected. Note that
if you intend to use the rejection reason of the promise
somehow in the callback, it is more convenient to use
the 'then' method.
"""
self._errbacks.append(f)
def then(self, success=None, failure=None):
"""
This method takes two optional arguments. The first argument
is used if the "self promise" is fulfilled and the other is
used if the "self promise" is rejected. In either case, this
method returns another promise that effectively represents
the result of either the first of the second argument (in the
case that the "self promise" is fulfilled or rejected,
respectively).
Each argument can be either:
* None - Meaning no action is taken
* A function - which will be called with either the value
of the "self promise" or the reason for rejection of
the "self promise". The function may return:
* A value - which will be used to fulfill the promise
returned by this method.
* A promise - which, when fulfilled or rejected, will
cascade its value or reason to the promise returned
by this method.
* A value - which will be assigned as either the value
or the reason for the promise returned by this method
when the "self promise" is either fulfilled or rejected,
respectively.
"""
ret = Promise()
def callAndFulfill(v):
"""
A callback to be invoked if the "self promise"
is fulfilled.
"""
try:
# From 3.2.1, don't call non-functions values
if _isFunction(success):
newvalue = success(v)
if _isPromise(newvalue):
newvalue.then(lambda v: ret.fulfill(v),
lambda r: ret.reject(r))
else:
ret.fulfill(newvalue)
elif success!=None:
# From 3.2.6.4
ret.fulfill(v)
else:
pass
except Exception as e:
ret.reject(e)
def callAndReject(r):
"""
A callback to be invoked if the "self promise"
is rejected.
"""
try:
if _isFunction(failure):
newvalue = failure(r)
if _isPromise(newvalue):
newvalue.then(lambda v: ret.fulfill(v),
lambda r: ret.reject(r))
else:
ret.fulfill(newvalue)
elif failure!=None:
# From 3.2.6.5
ret.reject(r)
else:
pass
except Exception as e:
ret.reject(e)
if self._state==self.PENDING:
"""
If this is still pending, then add callbacks to the
existing promise that call either the success or
rejected functions supplied and then fulfill the
promise being returned by this method
"""
if success!=None:
self._callbacks.append(callAndFulfill)
if failure!=None:
self._errbacks.append(callAndReject)
elif self._state==self.FULFILLED:
"""
If this promise was already fulfilled, then
we need to use the first argument to this method
to determine the value to use in fulfilling the
promise that we return from this method.
"""
try:
if _isFunction(success):
newvalue = success(self.value)
if _isPromise(newvalue):
newvalue.then(lambda v: ret.fulfill(v),
lambda r: ret.reject(r))
else:
ret.fulfill(newvalue)
elif success!=None:
# From 3.2.6.4
ret.fulfill(self.value)
else:
pass
except Exception as e:
ret.reject(e)
elif self._state==self.REJECTED:
"""
If this promise was already rejected, then
we need to use the second argument to this method
to determine the value to use in fulfilling the
promise that we return from this method.
"""
try:
if _isFunction(failure):
newvalue = failure(self.reason)
if _isPromise(newvalue):
newvalue.then(lambda v: ret.fulfill(v),
lambda r: ret.reject(r))
else:
ret.fulfill(newvalue)
elif failure!=None:
# From 3.2.6.5
ret.reject(self.reason)
else:
pass
except Exception as e:
ret.reject(e)
return ret
|
jtmoulia/switchboard-python | aplus/__init__.py | Promise.reject | python | def reject(self, reason):
assert self._state==self.PENDING
self._state=self.REJECTED;
self.reason = reason
for errback in self._errbacks:
try:
errback(reason)
except Exception:
# Ignore errors in callbacks
pass
# We will never call these errbacks again, so allow
# them to be garbage collected. This is important since
# they probably include closures which are binding variables
# that might otherwise be garbage collected.
self._errbacks = [] | Reject this promise for a given reason. | train | https://github.com/jtmoulia/switchboard-python/blob/c9b0cb74cb12a64385465091be633e78d39f08b1/aplus/__init__.py#L47-L66 | null | class Promise:
"""
This is a class that attempts to comply with the
Promises/A+ specification and test suite:
http://promises-aplus.github.io/promises-spec/
"""
# These are the potential states of a promise
PENDING = -1
REJECTED = 0
FULFILLED = 1
def __init__(self):
"""
Initialize the Promise into a pending state.
"""
self._state = self.PENDING;
self.value = None;
self.reason = None;
self._callbacks = [];
self._errbacks = [];
def fulfill(self, value):
"""
Fulfill the promise with a given value.
"""
assert self._state==self.PENDING
self._state=self.FULFILLED;
self.value = value
for callback in self._callbacks:
try:
callback(value)
except Exception:
# Ignore errors in callbacks
pass
# We will never call these callbacks again, so allow
# them to be garbage collected. This is important since
# they probably include closures which are binding variables
# that might otherwise be garbage collected.
self._callbacks = []
def isPending(self):
"""Indicate whether the Promise is still pending."""
return self._state==self.PENDING
def isFulfilled(self):
"""Indicate whether the Promise has been fulfilled."""
return self._state==self.FULFILLED
def isRejected(self):
"""Indicate whether the Promise has been rejected."""
return self._state==self.REJECTED
def get(self, timeout=None):
"""Get the value of the promise, waiting if necessary."""
self.wait(timeout)
if self._state==self.FULFILLED:
return self.value
else:
raise ValueError("Calculation didn't yield a value")
def wait(self, timeout=None):
"""
An implementation of the wait method which doesn't involve
polling but instead utilizes a "real" synchronization
scheme.
"""
import threading
if self._state!=self.PENDING:
return
e = threading.Event()
self.addCallback(lambda v: e.set())
self.addErrback(lambda r: e.set())
e.wait(timeout)
def addCallback(self, f):
"""
Add a callback for when this promis is fulfilled. Note that
if you intend to use the value of the promise somehow in
the callback, it is more convenient to use the 'then' method.
"""
self._callbacks.append(f)
def addErrback(self, f):
"""
Add a callback for when this promis is rejected. Note that
if you intend to use the rejection reason of the promise
somehow in the callback, it is more convenient to use
the 'then' method.
"""
self._errbacks.append(f)
def then(self, success=None, failure=None):
"""
This method takes two optional arguments. The first argument
is used if the "self promise" is fulfilled and the other is
used if the "self promise" is rejected. In either case, this
method returns another promise that effectively represents
the result of either the first of the second argument (in the
case that the "self promise" is fulfilled or rejected,
respectively).
Each argument can be either:
* None - Meaning no action is taken
* A function - which will be called with either the value
of the "self promise" or the reason for rejection of
the "self promise". The function may return:
* A value - which will be used to fulfill the promise
returned by this method.
* A promise - which, when fulfilled or rejected, will
cascade its value or reason to the promise returned
by this method.
* A value - which will be assigned as either the value
or the reason for the promise returned by this method
when the "self promise" is either fulfilled or rejected,
respectively.
"""
ret = Promise()
def callAndFulfill(v):
"""
A callback to be invoked if the "self promise"
is fulfilled.
"""
try:
# From 3.2.1, don't call non-functions values
if _isFunction(success):
newvalue = success(v)
if _isPromise(newvalue):
newvalue.then(lambda v: ret.fulfill(v),
lambda r: ret.reject(r))
else:
ret.fulfill(newvalue)
elif success!=None:
# From 3.2.6.4
ret.fulfill(v)
else:
pass
except Exception as e:
ret.reject(e)
def callAndReject(r):
"""
A callback to be invoked if the "self promise"
is rejected.
"""
try:
if _isFunction(failure):
newvalue = failure(r)
if _isPromise(newvalue):
newvalue.then(lambda v: ret.fulfill(v),
lambda r: ret.reject(r))
else:
ret.fulfill(newvalue)
elif failure!=None:
# From 3.2.6.5
ret.reject(r)
else:
pass
except Exception as e:
ret.reject(e)
if self._state==self.PENDING:
"""
If this is still pending, then add callbacks to the
existing promise that call either the success or
rejected functions supplied and then fulfill the
promise being returned by this method
"""
if success!=None:
self._callbacks.append(callAndFulfill)
if failure!=None:
self._errbacks.append(callAndReject)
elif self._state==self.FULFILLED:
"""
If this promise was already fulfilled, then
we need to use the first argument to this method
to determine the value to use in fulfilling the
promise that we return from this method.
"""
try:
if _isFunction(success):
newvalue = success(self.value)
if _isPromise(newvalue):
newvalue.then(lambda v: ret.fulfill(v),
lambda r: ret.reject(r))
else:
ret.fulfill(newvalue)
elif success!=None:
# From 3.2.6.4
ret.fulfill(self.value)
else:
pass
except Exception as e:
ret.reject(e)
elif self._state==self.REJECTED:
"""
If this promise was already rejected, then
we need to use the second argument to this method
to determine the value to use in fulfilling the
promise that we return from this method.
"""
try:
if _isFunction(failure):
newvalue = failure(self.reason)
if _isPromise(newvalue):
newvalue.then(lambda v: ret.fulfill(v),
lambda r: ret.reject(r))
else:
ret.fulfill(newvalue)
elif failure!=None:
# From 3.2.6.5
ret.reject(self.reason)
else:
pass
except Exception as e:
ret.reject(e)
return ret
|
jtmoulia/switchboard-python | aplus/__init__.py | Promise.get | python | def get(self, timeout=None):
self.wait(timeout)
if self._state==self.FULFILLED:
return self.value
else:
raise ValueError("Calculation didn't yield a value") | Get the value of the promise, waiting if necessary. | train | https://github.com/jtmoulia/switchboard-python/blob/c9b0cb74cb12a64385465091be633e78d39f08b1/aplus/__init__.py#L80-L86 | [
"def wait(self, timeout=None):\n \"\"\"\n An implementation of the wait method which doesn't involve\n polling but instead utilizes a \"real\" synchronization\n scheme.\n \"\"\"\n import threading\n\n if self._state!=self.PENDING:\n return\n\n e = threading.Event()\n self.addCallback(lambda v: e.set())\n self.addErrback(lambda r: e.set())\n e.wait(timeout)\n"
] | class Promise:
"""
This is a class that attempts to comply with the
Promises/A+ specification and test suite:
http://promises-aplus.github.io/promises-spec/
"""
# These are the potential states of a promise
PENDING = -1
REJECTED = 0
FULFILLED = 1
def __init__(self):
"""
Initialize the Promise into a pending state.
"""
self._state = self.PENDING;
self.value = None;
self.reason = None;
self._callbacks = [];
self._errbacks = [];
def fulfill(self, value):
"""
Fulfill the promise with a given value.
"""
assert self._state==self.PENDING
self._state=self.FULFILLED;
self.value = value
for callback in self._callbacks:
try:
callback(value)
except Exception:
# Ignore errors in callbacks
pass
# We will never call these callbacks again, so allow
# them to be garbage collected. This is important since
# they probably include closures which are binding variables
# that might otherwise be garbage collected.
self._callbacks = []
def reject(self, reason):
"""
Reject this promise for a given reason.
"""
assert self._state==self.PENDING
self._state=self.REJECTED;
self.reason = reason
for errback in self._errbacks:
try:
errback(reason)
except Exception:
# Ignore errors in callbacks
pass
# We will never call these errbacks again, so allow
# them to be garbage collected. This is important since
# they probably include closures which are binding variables
# that might otherwise be garbage collected.
self._errbacks = []
def isPending(self):
"""Indicate whether the Promise is still pending."""
return self._state==self.PENDING
def isFulfilled(self):
"""Indicate whether the Promise has been fulfilled."""
return self._state==self.FULFILLED
def isRejected(self):
"""Indicate whether the Promise has been rejected."""
return self._state==self.REJECTED
def wait(self, timeout=None):
"""
An implementation of the wait method which doesn't involve
polling but instead utilizes a "real" synchronization
scheme.
"""
import threading
if self._state!=self.PENDING:
return
e = threading.Event()
self.addCallback(lambda v: e.set())
self.addErrback(lambda r: e.set())
e.wait(timeout)
def addCallback(self, f):
"""
Add a callback for when this promis is fulfilled. Note that
if you intend to use the value of the promise somehow in
the callback, it is more convenient to use the 'then' method.
"""
self._callbacks.append(f)
def addErrback(self, f):
"""
Add a callback for when this promis is rejected. Note that
if you intend to use the rejection reason of the promise
somehow in the callback, it is more convenient to use
the 'then' method.
"""
self._errbacks.append(f)
def then(self, success=None, failure=None):
"""
This method takes two optional arguments. The first argument
is used if the "self promise" is fulfilled and the other is
used if the "self promise" is rejected. In either case, this
method returns another promise that effectively represents
the result of either the first of the second argument (in the
case that the "self promise" is fulfilled or rejected,
respectively).
Each argument can be either:
* None - Meaning no action is taken
* A function - which will be called with either the value
of the "self promise" or the reason for rejection of
the "self promise". The function may return:
* A value - which will be used to fulfill the promise
returned by this method.
* A promise - which, when fulfilled or rejected, will
cascade its value or reason to the promise returned
by this method.
* A value - which will be assigned as either the value
or the reason for the promise returned by this method
when the "self promise" is either fulfilled or rejected,
respectively.
"""
ret = Promise()
def callAndFulfill(v):
"""
A callback to be invoked if the "self promise"
is fulfilled.
"""
try:
# From 3.2.1, don't call non-functions values
if _isFunction(success):
newvalue = success(v)
if _isPromise(newvalue):
newvalue.then(lambda v: ret.fulfill(v),
lambda r: ret.reject(r))
else:
ret.fulfill(newvalue)
elif success!=None:
# From 3.2.6.4
ret.fulfill(v)
else:
pass
except Exception as e:
ret.reject(e)
def callAndReject(r):
"""
A callback to be invoked if the "self promise"
is rejected.
"""
try:
if _isFunction(failure):
newvalue = failure(r)
if _isPromise(newvalue):
newvalue.then(lambda v: ret.fulfill(v),
lambda r: ret.reject(r))
else:
ret.fulfill(newvalue)
elif failure!=None:
# From 3.2.6.5
ret.reject(r)
else:
pass
except Exception as e:
ret.reject(e)
if self._state==self.PENDING:
"""
If this is still pending, then add callbacks to the
existing promise that call either the success or
rejected functions supplied and then fulfill the
promise being returned by this method
"""
if success!=None:
self._callbacks.append(callAndFulfill)
if failure!=None:
self._errbacks.append(callAndReject)
elif self._state==self.FULFILLED:
"""
If this promise was already fulfilled, then
we need to use the first argument to this method
to determine the value to use in fulfilling the
promise that we return from this method.
"""
try:
if _isFunction(success):
newvalue = success(self.value)
if _isPromise(newvalue):
newvalue.then(lambda v: ret.fulfill(v),
lambda r: ret.reject(r))
else:
ret.fulfill(newvalue)
elif success!=None:
# From 3.2.6.4
ret.fulfill(self.value)
else:
pass
except Exception as e:
ret.reject(e)
elif self._state==self.REJECTED:
"""
If this promise was already rejected, then
we need to use the second argument to this method
to determine the value to use in fulfilling the
promise that we return from this method.
"""
try:
if _isFunction(failure):
newvalue = failure(self.reason)
if _isPromise(newvalue):
newvalue.then(lambda v: ret.fulfill(v),
lambda r: ret.reject(r))
else:
ret.fulfill(newvalue)
elif failure!=None:
# From 3.2.6.5
ret.reject(self.reason)
else:
pass
except Exception as e:
ret.reject(e)
return ret
|
jtmoulia/switchboard-python | aplus/__init__.py | Promise.wait | python | def wait(self, timeout=None):
import threading
if self._state!=self.PENDING:
return
e = threading.Event()
self.addCallback(lambda v: e.set())
self.addErrback(lambda r: e.set())
e.wait(timeout) | An implementation of the wait method which doesn't involve
polling but instead utilizes a "real" synchronization
scheme. | train | https://github.com/jtmoulia/switchboard-python/blob/c9b0cb74cb12a64385465091be633e78d39f08b1/aplus/__init__.py#L88-L102 | [
"def addCallback(self, f):\n \"\"\"\n Add a callback for when this promis is fulfilled. Note that\n if you intend to use the value of the promise somehow in\n the callback, it is more convenient to use the 'then' method.\n \"\"\"\n self._callbacks.append(f)\n",
"def addErrback(self, f):\n \"\"\"\n Add a callback for when this promis is rejected. Note that\n if you intend to use the rejection reason of the promise\n somehow in the callback, it is more convenient to use\n the 'then' method.\n \"\"\"\n self._errbacks.append(f)\n"
] | class Promise:
"""
This is a class that attempts to comply with the
Promises/A+ specification and test suite:
http://promises-aplus.github.io/promises-spec/
"""
# These are the potential states of a promise
PENDING = -1
REJECTED = 0
FULFILLED = 1
def __init__(self):
"""
Initialize the Promise into a pending state.
"""
self._state = self.PENDING;
self.value = None;
self.reason = None;
self._callbacks = [];
self._errbacks = [];
def fulfill(self, value):
"""
Fulfill the promise with a given value.
"""
assert self._state==self.PENDING
self._state=self.FULFILLED;
self.value = value
for callback in self._callbacks:
try:
callback(value)
except Exception:
# Ignore errors in callbacks
pass
# We will never call these callbacks again, so allow
# them to be garbage collected. This is important since
# they probably include closures which are binding variables
# that might otherwise be garbage collected.
self._callbacks = []
def reject(self, reason):
"""
Reject this promise for a given reason.
"""
assert self._state==self.PENDING
self._state=self.REJECTED;
self.reason = reason
for errback in self._errbacks:
try:
errback(reason)
except Exception:
# Ignore errors in callbacks
pass
# We will never call these errbacks again, so allow
# them to be garbage collected. This is important since
# they probably include closures which are binding variables
# that might otherwise be garbage collected.
self._errbacks = []
def isPending(self):
"""Indicate whether the Promise is still pending."""
return self._state==self.PENDING
def isFulfilled(self):
"""Indicate whether the Promise has been fulfilled."""
return self._state==self.FULFILLED
def isRejected(self):
"""Indicate whether the Promise has been rejected."""
return self._state==self.REJECTED
def get(self, timeout=None):
"""Get the value of the promise, waiting if necessary."""
self.wait(timeout)
if self._state==self.FULFILLED:
return self.value
else:
raise ValueError("Calculation didn't yield a value")
def addCallback(self, f):
"""
Add a callback for when this promis is fulfilled. Note that
if you intend to use the value of the promise somehow in
the callback, it is more convenient to use the 'then' method.
"""
self._callbacks.append(f)
def addErrback(self, f):
"""
Add a callback for when this promis is rejected. Note that
if you intend to use the rejection reason of the promise
somehow in the callback, it is more convenient to use
the 'then' method.
"""
self._errbacks.append(f)
def then(self, success=None, failure=None):
"""
This method takes two optional arguments. The first argument
is used if the "self promise" is fulfilled and the other is
used if the "self promise" is rejected. In either case, this
method returns another promise that effectively represents
the result of either the first of the second argument (in the
case that the "self promise" is fulfilled or rejected,
respectively).
Each argument can be either:
* None - Meaning no action is taken
* A function - which will be called with either the value
of the "self promise" or the reason for rejection of
the "self promise". The function may return:
* A value - which will be used to fulfill the promise
returned by this method.
* A promise - which, when fulfilled or rejected, will
cascade its value or reason to the promise returned
by this method.
* A value - which will be assigned as either the value
or the reason for the promise returned by this method
when the "self promise" is either fulfilled or rejected,
respectively.
"""
ret = Promise()
def callAndFulfill(v):
"""
A callback to be invoked if the "self promise"
is fulfilled.
"""
try:
# From 3.2.1, don't call non-functions values
if _isFunction(success):
newvalue = success(v)
if _isPromise(newvalue):
newvalue.then(lambda v: ret.fulfill(v),
lambda r: ret.reject(r))
else:
ret.fulfill(newvalue)
elif success!=None:
# From 3.2.6.4
ret.fulfill(v)
else:
pass
except Exception as e:
ret.reject(e)
def callAndReject(r):
"""
A callback to be invoked if the "self promise"
is rejected.
"""
try:
if _isFunction(failure):
newvalue = failure(r)
if _isPromise(newvalue):
newvalue.then(lambda v: ret.fulfill(v),
lambda r: ret.reject(r))
else:
ret.fulfill(newvalue)
elif failure!=None:
# From 3.2.6.5
ret.reject(r)
else:
pass
except Exception as e:
ret.reject(e)
if self._state==self.PENDING:
"""
If this is still pending, then add callbacks to the
existing promise that call either the success or
rejected functions supplied and then fulfill the
promise being returned by this method
"""
if success!=None:
self._callbacks.append(callAndFulfill)
if failure!=None:
self._errbacks.append(callAndReject)
elif self._state==self.FULFILLED:
"""
If this promise was already fulfilled, then
we need to use the first argument to this method
to determine the value to use in fulfilling the
promise that we return from this method.
"""
try:
if _isFunction(success):
newvalue = success(self.value)
if _isPromise(newvalue):
newvalue.then(lambda v: ret.fulfill(v),
lambda r: ret.reject(r))
else:
ret.fulfill(newvalue)
elif success!=None:
# From 3.2.6.4
ret.fulfill(self.value)
else:
pass
except Exception as e:
ret.reject(e)
elif self._state==self.REJECTED:
"""
If this promise was already rejected, then
we need to use the second argument to this method
to determine the value to use in fulfilling the
promise that we return from this method.
"""
try:
if _isFunction(failure):
newvalue = failure(self.reason)
if _isPromise(newvalue):
newvalue.then(lambda v: ret.fulfill(v),
lambda r: ret.reject(r))
else:
ret.fulfill(newvalue)
elif failure!=None:
# From 3.2.6.5
ret.reject(self.reason)
else:
pass
except Exception as e:
ret.reject(e)
return ret
|
jtmoulia/switchboard-python | aplus/__init__.py | Promise.then | python | def then(self, success=None, failure=None):
ret = Promise()
def callAndFulfill(v):
"""
A callback to be invoked if the "self promise"
is fulfilled.
"""
try:
# From 3.2.1, don't call non-functions values
if _isFunction(success):
newvalue = success(v)
if _isPromise(newvalue):
newvalue.then(lambda v: ret.fulfill(v),
lambda r: ret.reject(r))
else:
ret.fulfill(newvalue)
elif success!=None:
# From 3.2.6.4
ret.fulfill(v)
else:
pass
except Exception as e:
ret.reject(e)
def callAndReject(r):
"""
A callback to be invoked if the "self promise"
is rejected.
"""
try:
if _isFunction(failure):
newvalue = failure(r)
if _isPromise(newvalue):
newvalue.then(lambda v: ret.fulfill(v),
lambda r: ret.reject(r))
else:
ret.fulfill(newvalue)
elif failure!=None:
# From 3.2.6.5
ret.reject(r)
else:
pass
except Exception as e:
ret.reject(e)
if self._state==self.PENDING:
"""
If this is still pending, then add callbacks to the
existing promise that call either the success or
rejected functions supplied and then fulfill the
promise being returned by this method
"""
if success!=None:
self._callbacks.append(callAndFulfill)
if failure!=None:
self._errbacks.append(callAndReject)
elif self._state==self.FULFILLED:
"""
If this promise was already fulfilled, then
we need to use the first argument to this method
to determine the value to use in fulfilling the
promise that we return from this method.
"""
try:
if _isFunction(success):
newvalue = success(self.value)
if _isPromise(newvalue):
newvalue.then(lambda v: ret.fulfill(v),
lambda r: ret.reject(r))
else:
ret.fulfill(newvalue)
elif success!=None:
# From 3.2.6.4
ret.fulfill(self.value)
else:
pass
except Exception as e:
ret.reject(e)
elif self._state==self.REJECTED:
"""
If this promise was already rejected, then
we need to use the second argument to this method
to determine the value to use in fulfilling the
promise that we return from this method.
"""
try:
if _isFunction(failure):
newvalue = failure(self.reason)
if _isPromise(newvalue):
newvalue.then(lambda v: ret.fulfill(v),
lambda r: ret.reject(r))
else:
ret.fulfill(newvalue)
elif failure!=None:
# From 3.2.6.5
ret.reject(self.reason)
else:
pass
except Exception as e:
ret.reject(e)
return ret | This method takes two optional arguments. The first argument
is used if the "self promise" is fulfilled and the other is
used if the "self promise" is rejected. In either case, this
method returns another promise that effectively represents
the result of either the first of the second argument (in the
case that the "self promise" is fulfilled or rejected,
respectively).
Each argument can be either:
* None - Meaning no action is taken
* A function - which will be called with either the value
of the "self promise" or the reason for rejection of
the "self promise". The function may return:
* A value - which will be used to fulfill the promise
returned by this method.
* A promise - which, when fulfilled or rejected, will
cascade its value or reason to the promise returned
by this method.
* A value - which will be assigned as either the value
or the reason for the promise returned by this method
when the "self promise" is either fulfilled or rejected,
respectively. | train | https://github.com/jtmoulia/switchboard-python/blob/c9b0cb74cb12a64385465091be633e78d39f08b1/aplus/__init__.py#L121-L248 | [
"def _isFunction(v):\n \"\"\"\n A utility function to determine if the specified\n value is a function.\n \"\"\"\n if v==None:\n return False\n if hasattr(v, \"__call__\"):\n return True\n return False\n",
"def _isPromise(obj):\n \"\"\"\n A utility function to determine if the specified\n object is a promise using \"duck typing\".\n \"\"\"\n return hasattr(obj, \"fulfill\") and \\\n _isFunction(getattr(obj, \"fulfill\")) and \\\n hasattr(obj, \"reject\") and \\\n _isFunction(getattr(obj, \"reject\")) and \\\n hasattr(obj, \"then\") and \\\n _isFunction(getattr(obj, \"then\"))\n",
"def reject(self, reason):\n \"\"\"\n Reject this promise for a given reason.\n \"\"\"\n assert self._state==self.PENDING\n\n self._state=self.REJECTED;\n self.reason = reason\n for errback in self._errbacks:\n try:\n errback(reason)\n except Exception:\n # Ignore errors in callbacks\n pass\n\n # We will never call these errbacks again, so allow\n # them to be garbage collected. This is important since\n # they probably include closures which are binding variables\n # that might otherwise be garbage collected.\n self._errbacks = []\n"
] | class Promise:
"""
This is a class that attempts to comply with the
Promises/A+ specification and test suite:
http://promises-aplus.github.io/promises-spec/
"""
# These are the potential states of a promise
PENDING = -1
REJECTED = 0
FULFILLED = 1
def __init__(self):
"""
Initialize the Promise into a pending state.
"""
self._state = self.PENDING;
self.value = None;
self.reason = None;
self._callbacks = [];
self._errbacks = [];
def fulfill(self, value):
"""
Fulfill the promise with a given value.
"""
assert self._state==self.PENDING
self._state=self.FULFILLED;
self.value = value
for callback in self._callbacks:
try:
callback(value)
except Exception:
# Ignore errors in callbacks
pass
# We will never call these callbacks again, so allow
# them to be garbage collected. This is important since
# they probably include closures which are binding variables
# that might otherwise be garbage collected.
self._callbacks = []
def reject(self, reason):
"""
Reject this promise for a given reason.
"""
assert self._state==self.PENDING
self._state=self.REJECTED;
self.reason = reason
for errback in self._errbacks:
try:
errback(reason)
except Exception:
# Ignore errors in callbacks
pass
# We will never call these errbacks again, so allow
# them to be garbage collected. This is important since
# they probably include closures which are binding variables
# that might otherwise be garbage collected.
self._errbacks = []
def isPending(self):
"""Indicate whether the Promise is still pending."""
return self._state==self.PENDING
def isFulfilled(self):
"""Indicate whether the Promise has been fulfilled."""
return self._state==self.FULFILLED
def isRejected(self):
"""Indicate whether the Promise has been rejected."""
return self._state==self.REJECTED
def get(self, timeout=None):
"""Get the value of the promise, waiting if necessary."""
self.wait(timeout)
if self._state==self.FULFILLED:
return self.value
else:
raise ValueError("Calculation didn't yield a value")
def wait(self, timeout=None):
"""
An implementation of the wait method which doesn't involve
polling but instead utilizes a "real" synchronization
scheme.
"""
import threading
if self._state!=self.PENDING:
return
e = threading.Event()
self.addCallback(lambda v: e.set())
self.addErrback(lambda r: e.set())
e.wait(timeout)
def addCallback(self, f):
"""
Add a callback for when this promis is fulfilled. Note that
if you intend to use the value of the promise somehow in
the callback, it is more convenient to use the 'then' method.
"""
self._callbacks.append(f)
def addErrback(self, f):
"""
Add a callback for when this promis is rejected. Note that
if you intend to use the rejection reason of the promise
somehow in the callback, it is more convenient to use
the 'then' method.
"""
self._errbacks.append(f)
def then(self, success=None, failure=None):
"""
This method takes two optional arguments. The first argument
is used if the "self promise" is fulfilled and the other is
used if the "self promise" is rejected. In either case, this
method returns another promise that effectively represents
the result of either the first of the second argument (in the
case that the "self promise" is fulfilled or rejected,
respectively).
Each argument can be either:
* None - Meaning no action is taken
* A function - which will be called with either the value
of the "self promise" or the reason for rejection of
the "self promise". The function may return:
* A value - which will be used to fulfill the promise
returned by this method.
* A promise - which, when fulfilled or rejected, will
cascade its value or reason to the promise returned
by this method.
* A value - which will be assigned as either the value
or the reason for the promise returned by this method
when the "self promise" is either fulfilled or rejected,
respectively.
"""
ret = Promise()
def callAndFulfill(v):
"""
A callback to be invoked if the "self promise"
is fulfilled.
"""
try:
# From 3.2.1, don't call non-functions values
if _isFunction(success):
newvalue = success(v)
if _isPromise(newvalue):
newvalue.then(lambda v: ret.fulfill(v),
lambda r: ret.reject(r))
else:
ret.fulfill(newvalue)
elif success!=None:
# From 3.2.6.4
ret.fulfill(v)
else:
pass
except Exception as e:
ret.reject(e)
def callAndReject(r):
"""
A callback to be invoked if the "self promise"
is rejected.
"""
try:
if _isFunction(failure):
newvalue = failure(r)
if _isPromise(newvalue):
newvalue.then(lambda v: ret.fulfill(v),
lambda r: ret.reject(r))
else:
ret.fulfill(newvalue)
elif failure!=None:
# From 3.2.6.5
ret.reject(r)
else:
pass
except Exception as e:
ret.reject(e)
if self._state==self.PENDING:
"""
If this is still pending, then add callbacks to the
existing promise that call either the success or
rejected functions supplied and then fulfill the
promise being returned by this method
"""
if success!=None:
self._callbacks.append(callAndFulfill)
if failure!=None:
self._errbacks.append(callAndReject)
elif self._state==self.FULFILLED:
"""
If this promise was already fulfilled, then
we need to use the first argument to this method
to determine the value to use in fulfilling the
promise that we return from this method.
"""
try:
if _isFunction(success):
newvalue = success(self.value)
if _isPromise(newvalue):
newvalue.then(lambda v: ret.fulfill(v),
lambda r: ret.reject(r))
else:
ret.fulfill(newvalue)
elif success!=None:
# From 3.2.6.4
ret.fulfill(self.value)
else:
pass
except Exception as e:
ret.reject(e)
elif self._state==self.REJECTED:
"""
If this promise was already rejected, then
we need to use the second argument to this method
to determine the value to use in fulfilling the
promise that we return from this method.
"""
try:
if _isFunction(failure):
newvalue = failure(self.reason)
if _isPromise(newvalue):
newvalue.then(lambda v: ret.fulfill(v),
lambda r: ret.reject(r))
else:
ret.fulfill(newvalue)
elif failure!=None:
# From 3.2.6.5
ret.reject(self.reason)
else:
pass
except Exception as e:
ret.reject(e)
return ret
|
jtmoulia/switchboard-python | examples/apnsworker.py | main | python | def main(cert, key, pushtoken, url):
try:
worker = APNSWorker(cert=cert, key=key, pushtoken=pushtoken, url=url)
worker.connect()
worker.run_forever()
except KeyboardInterrupt:
worker.close() | Create, connect, and block on the listener worker. | train | https://github.com/jtmoulia/switchboard-python/blob/c9b0cb74cb12a64385465091be633e78d39f08b1/examples/apnsworker.py#L94-L101 | null | #!/usr/bin/env python
# -*- coding: utf-8 -*-
"""
APNSWorker is a Switchboard worker that collects incoming emails,
uses them to create a push notification, and then sends them to
an iOS device.
./apnsworker.py --help
"""
__author__ = u"Thomas Moulia <jtmoulia@pocketknife.io>"
__copyright__ = u"Copyright © 2014, ThusFresh, Inc. All rights reserved."
import apns
import switchboard
import argparse
import logging
logging.basicConfig(level=logging.INFO)
logger = logging.getLogger(__name__)
ACCOUNT = 'mail.dispatch.test@gmail.com'
CONN_SPEC = {'host': 'imap.gmail.com',
'port': 993,
'auth': {
'type': 'plain',
'username': ACCOUNT,
'password': 'i>V99JuMVEs;'}};
class APNSWorker(switchboard.Client):
"""A Switchboard worker that will listen for new emails across all
accounts. For each new email it wil fetch additional information,
form it into a push notification, and send it to the client.
"""
def __init__(self, cert, key, pushtoken=None, use_sandbox=True, *args, **kwargs):
super(APNSWorker, self).__init__(*args, **kwargs)
self._pushtoken = pushtoken
self._apns = apns.APNs(use_sandbox=use_sandbox, cert_file=cert, key_file=key)
def opened(self):
"""Connect to the websocket, and ensure the account is connected and
the INBOX is being watched, and then start watchingAll.
"""
def post_setup((cmds, resps)):
"""Post setup callback."""
logger.info("Setup complete, listening...")
self.send_cmds(('connect', CONN_SPEC),
('watchMailboxes', {'account': ACCOUNT,
'list': ['INBOX']}),
('watchAll', {})).then(post_setup)
def received_unsolicited(self, resps):
def post_fetch((cmds, resps)):
"""Post fetch callback."""
try:
for msg in resps[0][1]['list']:
logger.debug("Preparing msg to send: %s", msg)
from1 = msg['from'][0]
from_name = from1.get('name') or from1.get('email', '<unknown>')
notification = "%s - %s" % (from_name, msg['subject'])
payload = apns.Payload(notification, sound='default', badge=1)
if self._pushtoken:
logger.info("Sending push notification: %s", payload)
try:
self._apns.gateway_server.send_notification(
self._pushtoken, payload)
except Exception as e:
logger.error("Error sending push notification: %s", e)
raise
else:
logger.info("-- push notification would be sent: %s --", payload)
except Exception as e:
logger.error("Error: %s", e)
raise
for resp in resps:
if resp[0] == 'newMessage':
args = resp[1]
promise = self.send_cmds(('getMessages',
{'account': args['account'],
'ids': [args['messageId']],
'properties': ['subject', 'from']}))
promise.then(post_fetch)
else:
logger.warning("Unknown unsolicted response: %s", response)
if __name__ == '__main__':
parser = argparse.ArgumentParser(description="APNS Worker")
parser.add_argument("--cert", default="cert.pem",
help="the APNS public certificate")
parser.add_argument("--key", default="key.pem",
help="the APNS private key")
parser.add_argument("--pushtoken", default=None,
help="the push token to send emails to")
parser.add_argument("--url", default= "ws://192.168.50.2:8080/workers",
help="the url of the worker websocket interface")
args = parser.parse_args()
main(args.cert, args.key, args.pushtoken, args.url)
|
jtmoulia/switchboard-python | examples/apnsworker.py | APNSWorker.opened | python | def opened(self):
def post_setup((cmds, resps)):
"""Post setup callback."""
logger.info("Setup complete, listening...")
self.send_cmds(('connect', CONN_SPEC),
('watchMailboxes', {'account': ACCOUNT,
'list': ['INBOX']}),
('watchAll', {})).then(post_setup) | Connect to the websocket, and ensure the account is connected and
the INBOX is being watched, and then start watchingAll. | train | https://github.com/jtmoulia/switchboard-python/blob/c9b0cb74cb12a64385465091be633e78d39f08b1/examples/apnsworker.py#L44-L55 | null | class APNSWorker(switchboard.Client):
"""A Switchboard worker that will listen for new emails across all
accounts. For each new email it wil fetch additional information,
form it into a push notification, and send it to the client.
"""
def __init__(self, cert, key, pushtoken=None, use_sandbox=True, *args, **kwargs):
super(APNSWorker, self).__init__(*args, **kwargs)
self._pushtoken = pushtoken
self._apns = apns.APNs(use_sandbox=use_sandbox, cert_file=cert, key_file=key)
def received_unsolicited(self, resps):
def post_fetch((cmds, resps)):
"""Post fetch callback."""
try:
for msg in resps[0][1]['list']:
logger.debug("Preparing msg to send: %s", msg)
from1 = msg['from'][0]
from_name = from1.get('name') or from1.get('email', '<unknown>')
notification = "%s - %s" % (from_name, msg['subject'])
payload = apns.Payload(notification, sound='default', badge=1)
if self._pushtoken:
logger.info("Sending push notification: %s", payload)
try:
self._apns.gateway_server.send_notification(
self._pushtoken, payload)
except Exception as e:
logger.error("Error sending push notification: %s", e)
raise
else:
logger.info("-- push notification would be sent: %s --", payload)
except Exception as e:
logger.error("Error: %s", e)
raise
for resp in resps:
if resp[0] == 'newMessage':
args = resp[1]
promise = self.send_cmds(('getMessages',
{'account': args['account'],
'ids': [args['messageId']],
'properties': ['subject', 'from']}))
promise.then(post_fetch)
else:
logger.warning("Unknown unsolicted response: %s", response)
|
jtmoulia/switchboard-python | examples/twilioworker.py | main | python | def main(sid, token, to, from_, url):
try:
worker = APNSWorker(sid=sid, token=token, to=to, from_=from_, url=url)
worker.connect()
worker.run_forever()
except KeyboardInterrupt:
worker.close() | Create, connect, and block on the listener worker. | train | https://github.com/jtmoulia/switchboard-python/blob/c9b0cb74cb12a64385465091be633e78d39f08b1/examples/twilioworker.py#L78-L85 | null | #!/usr/bin/env python
# -*- coding: utf-8 -*-
"""
A worker which collects messages from Switchboard and sends a text
message via Twilio to the provided mobile number.
./twilioworker.py --help
"""
__author__ = u"Thomas Moulia <jtmoulia@pocketknife.io>"
__copyright__ = u"Copyright © 2014, ThusFresh, Inc. All rights reserved."
import twilio.rest
import switchboard
import logging
logging.basicConfig(level=logging.INFO)
logger = logging.getLogger(__name__)
# Flip this to True to allow the worker to send push notifications
SEND_APNS = True
class APNSWorker(switchboard.Client):
"""A Switchboard worker that will listen for new emails across all
accounts. For each new email it wil fetch additional information,
form it into a push notification, and send it to the client.
"""
def __init__(self, sid, token, to, from_, *args, **kwargs):
super(APNSWorker, self).__init__(*args, **kwargs)
self._to = to
self._from = from_
self._twilio = twilio.rest.TwilioRestClient(sid, token)
def opened(self):
"""Connect to the websocket, and ensure the account is connected and
the INBOX is being watched, and then start watchingAll.
"""
def post_setup((cmds, resps)):
"""Post setup callback."""
logger.info("Setup complete, listening...")
self.send_cmds(('watchAll', {})).then(post_setup)
def received_unsolicited(self, resps):
def post_fetch((cmds, resps)):
"""Post fetch callback."""
for msg in resps[0][1]['list']:
logger.debug("Preparing msg to send: %s", msg)
from1 = msg['from'][0]
from_name = from1.get('name') or from1.get('email', '<unknown>')
notification = "%s - %s" % (from_name, msg['subject'])
logger.info("Sending text message: %s", notification)
try:
self._twilio.messages.create(
body=notification, to=self._to, from_=self._from)
except Exception as e:
logger.error("Error sending push notification: %s", e)
raise
for resp in resps:
if resp[0] == 'newMessage':
args = resp[1]
promise = self.send_cmds(('getMessages',
{'account': args['account'],
'ids': [args['messageId']],
'properties': ['subject', 'from']}))
promise.then(post_fetch)
else:
logger.warning("Unknown unsolicted response: %s", response)
if __name__ == '__main__':
import argparse
parser = argparse.ArgumentParser(description="Switchboard Twilio Worker")
parser.add_argument('--sid', required=True, help="the twilio sid")
parser.add_argument('--token', required=True, help="the twilio token")
parser.add_argument('--to', required=True, help="the destination phone number")
parser.add_argument('--from', required=True, help="the source phone number")
parser.add_argument('--url', default="ws://192.168.50.2:8080/workers",
help="the url of the worker websocket interface")
args = parser.parse_args()
main(args.sid, args.token, args.to, getattr(args, 'from'), args.url)
|
jtmoulia/switchboard-python | switchboard/__init__.py | _take | python | def _take(d, key, default=None):
if key in d:
cmd = d[key]
del d[key]
return cmd
else:
return default | If the key is present in dictionary, remove it and return it's
value. If it is not present, return None. | train | https://github.com/jtmoulia/switchboard-python/blob/c9b0cb74cb12a64385465091be633e78d39f08b1/switchboard/__init__.py#L141-L151 | null | # -*- coding: utf-8 -*-
"""
A Switchboard worker/client implementation.
"""
__author__ = u"Thomas Moulia <jtmoulia@pocketknife.io>"
__copyright__ = u"Copyright © 2014, ThusFresh, Inc. All rights reserved."
from ws4py.client.threadedclient import WebSocketClient
import aplus
import json
import email
import logging
logger = logging.getLogger(__name__)
logger.error("HEYY")
class Client(WebSocketClient):
"""
Base behavior shared between workers and clients.
"""
def __init__(self, *args, **kwargs):
super(Client, self).__init__(*args, **kwargs)
self._tag = 0
self._cmd_groups = {}
# WebSocketClient Hooks
# ---------------------
def opened(self):
"""
Handle the websocket opening.
"""
logger.debug("Connection is open....")
def closed(self, code, reason=None):
"""
Handle the websocket closing.
"""
logger.debug("Connection has closed: %s - %s.", code, reason)
def received_message(self, msg):
"""
Handle receiving a message by checking whether it is in response
to a command or unsolicited, and dispatching it to the appropriate
object method.
"""
logger.debug("Received message: %s", msg)
if msg.is_binary:
raise ValueError("Binary messages not supported")
resps = json.loads(msg.data)
cmd_group = _get_cmds_id(*resps)
if cmd_group:
(cmds, promise) = self._cmd_groups[cmd_group]
promise.fulfill((cmds, resps))
else:
try:
self.received_unsolicited(resps)
except:
logger.exception("Error in unsolicited msg handler")
raise
# Callbacks
# ---------
def received_unsolicited(self, response):
"""
Handle a unsolicited response.
"""
logger.debug("Received unsolicited message: %s", response)
# Public Interface
# ----------------
def _tag_cmds(self, *cmds):
"""
Yields tagged commands.
"""
for (method, args) in cmds:
tagged_cmd = [method, args, self._tag]
self._tag = self._tag + 1
yield tagged_cmd
def send_cmds(self, *cmds):
"""
Tags and sends the commands to the Switchboard server, returning
None.
Each cmd be a 2-tuple where the first element is the method name,
and the second is the arguments, e.g. ("connect", {"host": ...}).
"""
promise = aplus.Promise()
tagged_cmds = list(self._tag_cmds(*cmds))
logger.debug("Sending cmds: %s", tagged_cmds)
cmd_group = _get_cmds_id(*tagged_cmds)
self._cmd_groups[cmd_group] = (tagged_cmds, promise)
self.send(json.dumps(tagged_cmds))
return promise
class Fetcher(Client):
"""
A basic Switchboard worker that will listen for new email
notifications. When it receives a notification, it fetches the
raw email from Switchboard and parses it using the email module.
"""
def received_unsolicited(self, resps):
def post_fetch((cmds, resps)):
"""Post fetch callback."""
for raw_msg in resps[0][1]['list']:
self.received_new(email.message_from_string(raw_msg['raw']))
for resp in resps:
if resp[0] == 'newMessage':
args = resp[1]
self.send_cmds(('getMessages',
{'account': args['account'],
'ids': [args['messageId']],
'properties': ['raw']})).then(post_fetch)
else:
logger.warning("Unknown unsolicted response: %s", response)
def received_new(self, msg):
"""
Override this message to handle new emails.
"""
raise NotImplementedError
## Helpers
## =======
def _get_cmds_id(*cmds):
"""
Returns an identifier for a group of partially tagged commands.
If there are no tagged commands, returns None.
"""
tags = [cmd[2] if len(cmd) == 3 else None for cmd in cmds]
if [tag for tag in tags if tag != None]:
return tuple(tags)
else:
return None
|
jtmoulia/switchboard-python | switchboard/__init__.py | _get_cmds_id | python | def _get_cmds_id(*cmds):
tags = [cmd[2] if len(cmd) == 3 else None for cmd in cmds]
if [tag for tag in tags if tag != None]:
return tuple(tags)
else:
return None | Returns an identifier for a group of partially tagged commands.
If there are no tagged commands, returns None. | train | https://github.com/jtmoulia/switchboard-python/blob/c9b0cb74cb12a64385465091be633e78d39f08b1/switchboard/__init__.py#L154-L163 | null | # -*- coding: utf-8 -*-
"""
A Switchboard worker/client implementation.
"""
__author__ = u"Thomas Moulia <jtmoulia@pocketknife.io>"
__copyright__ = u"Copyright © 2014, ThusFresh, Inc. All rights reserved."
from ws4py.client.threadedclient import WebSocketClient
import aplus
import json
import email
import logging
logger = logging.getLogger(__name__)
logger.error("HEYY")
class Client(WebSocketClient):
"""
Base behavior shared between workers and clients.
"""
def __init__(self, *args, **kwargs):
super(Client, self).__init__(*args, **kwargs)
self._tag = 0
self._cmd_groups = {}
# WebSocketClient Hooks
# ---------------------
def opened(self):
"""
Handle the websocket opening.
"""
logger.debug("Connection is open....")
def closed(self, code, reason=None):
"""
Handle the websocket closing.
"""
logger.debug("Connection has closed: %s - %s.", code, reason)
def received_message(self, msg):
"""
Handle receiving a message by checking whether it is in response
to a command or unsolicited, and dispatching it to the appropriate
object method.
"""
logger.debug("Received message: %s", msg)
if msg.is_binary:
raise ValueError("Binary messages not supported")
resps = json.loads(msg.data)
cmd_group = _get_cmds_id(*resps)
if cmd_group:
(cmds, promise) = self._cmd_groups[cmd_group]
promise.fulfill((cmds, resps))
else:
try:
self.received_unsolicited(resps)
except:
logger.exception("Error in unsolicited msg handler")
raise
# Callbacks
# ---------
def received_unsolicited(self, response):
"""
Handle a unsolicited response.
"""
logger.debug("Received unsolicited message: %s", response)
# Public Interface
# ----------------
def _tag_cmds(self, *cmds):
"""
Yields tagged commands.
"""
for (method, args) in cmds:
tagged_cmd = [method, args, self._tag]
self._tag = self._tag + 1
yield tagged_cmd
def send_cmds(self, *cmds):
"""
Tags and sends the commands to the Switchboard server, returning
None.
Each cmd be a 2-tuple where the first element is the method name,
and the second is the arguments, e.g. ("connect", {"host": ...}).
"""
promise = aplus.Promise()
tagged_cmds = list(self._tag_cmds(*cmds))
logger.debug("Sending cmds: %s", tagged_cmds)
cmd_group = _get_cmds_id(*tagged_cmds)
self._cmd_groups[cmd_group] = (tagged_cmds, promise)
self.send(json.dumps(tagged_cmds))
return promise
class Fetcher(Client):
"""
A basic Switchboard worker that will listen for new email
notifications. When it receives a notification, it fetches the
raw email from Switchboard and parses it using the email module.
"""
def received_unsolicited(self, resps):
def post_fetch((cmds, resps)):
"""Post fetch callback."""
for raw_msg in resps[0][1]['list']:
self.received_new(email.message_from_string(raw_msg['raw']))
for resp in resps:
if resp[0] == 'newMessage':
args = resp[1]
self.send_cmds(('getMessages',
{'account': args['account'],
'ids': [args['messageId']],
'properties': ['raw']})).then(post_fetch)
else:
logger.warning("Unknown unsolicted response: %s", response)
def received_new(self, msg):
"""
Override this message to handle new emails.
"""
raise NotImplementedError
## Helpers
## =======
def _take(d, key, default=None):
"""
If the key is present in dictionary, remove it and return it's
value. If it is not present, return None.
"""
if key in d:
cmd = d[key]
del d[key]
return cmd
else:
return default
|
jtmoulia/switchboard-python | switchboard/__init__.py | Client.received_message | python | def received_message(self, msg):
logger.debug("Received message: %s", msg)
if msg.is_binary:
raise ValueError("Binary messages not supported")
resps = json.loads(msg.data)
cmd_group = _get_cmds_id(*resps)
if cmd_group:
(cmds, promise) = self._cmd_groups[cmd_group]
promise.fulfill((cmds, resps))
else:
try:
self.received_unsolicited(resps)
except:
logger.exception("Error in unsolicited msg handler")
raise | Handle receiving a message by checking whether it is in response
to a command or unsolicited, and dispatching it to the appropriate
object method. | train | https://github.com/jtmoulia/switchboard-python/blob/c9b0cb74cb12a64385465091be633e78d39f08b1/switchboard/__init__.py#L46-L66 | [
"def _get_cmds_id(*cmds):\n \"\"\"\n Returns an identifier for a group of partially tagged commands.\n If there are no tagged commands, returns None.\n \"\"\"\n tags = [cmd[2] if len(cmd) == 3 else None for cmd in cmds]\n if [tag for tag in tags if tag != None]:\n return tuple(tags)\n else:\n return None\n"
] | class Client(WebSocketClient):
"""
Base behavior shared between workers and clients.
"""
def __init__(self, *args, **kwargs):
super(Client, self).__init__(*args, **kwargs)
self._tag = 0
self._cmd_groups = {}
# WebSocketClient Hooks
# ---------------------
def opened(self):
"""
Handle the websocket opening.
"""
logger.debug("Connection is open....")
def closed(self, code, reason=None):
"""
Handle the websocket closing.
"""
logger.debug("Connection has closed: %s - %s.", code, reason)
# Callbacks
# ---------
def received_unsolicited(self, response):
"""
Handle a unsolicited response.
"""
logger.debug("Received unsolicited message: %s", response)
# Public Interface
# ----------------
def _tag_cmds(self, *cmds):
"""
Yields tagged commands.
"""
for (method, args) in cmds:
tagged_cmd = [method, args, self._tag]
self._tag = self._tag + 1
yield tagged_cmd
def send_cmds(self, *cmds):
"""
Tags and sends the commands to the Switchboard server, returning
None.
Each cmd be a 2-tuple where the first element is the method name,
and the second is the arguments, e.g. ("connect", {"host": ...}).
"""
promise = aplus.Promise()
tagged_cmds = list(self._tag_cmds(*cmds))
logger.debug("Sending cmds: %s", tagged_cmds)
cmd_group = _get_cmds_id(*tagged_cmds)
self._cmd_groups[cmd_group] = (tagged_cmds, promise)
self.send(json.dumps(tagged_cmds))
return promise
|
jtmoulia/switchboard-python | switchboard/__init__.py | Client._tag_cmds | python | def _tag_cmds(self, *cmds):
for (method, args) in cmds:
tagged_cmd = [method, args, self._tag]
self._tag = self._tag + 1
yield tagged_cmd | Yields tagged commands. | train | https://github.com/jtmoulia/switchboard-python/blob/c9b0cb74cb12a64385465091be633e78d39f08b1/switchboard/__init__.py#L80-L87 | null | class Client(WebSocketClient):
"""
Base behavior shared between workers and clients.
"""
def __init__(self, *args, **kwargs):
super(Client, self).__init__(*args, **kwargs)
self._tag = 0
self._cmd_groups = {}
# WebSocketClient Hooks
# ---------------------
def opened(self):
"""
Handle the websocket opening.
"""
logger.debug("Connection is open....")
def closed(self, code, reason=None):
"""
Handle the websocket closing.
"""
logger.debug("Connection has closed: %s - %s.", code, reason)
def received_message(self, msg):
"""
Handle receiving a message by checking whether it is in response
to a command or unsolicited, and dispatching it to the appropriate
object method.
"""
logger.debug("Received message: %s", msg)
if msg.is_binary:
raise ValueError("Binary messages not supported")
resps = json.loads(msg.data)
cmd_group = _get_cmds_id(*resps)
if cmd_group:
(cmds, promise) = self._cmd_groups[cmd_group]
promise.fulfill((cmds, resps))
else:
try:
self.received_unsolicited(resps)
except:
logger.exception("Error in unsolicited msg handler")
raise
# Callbacks
# ---------
def received_unsolicited(self, response):
"""
Handle a unsolicited response.
"""
logger.debug("Received unsolicited message: %s", response)
# Public Interface
# ----------------
def send_cmds(self, *cmds):
"""
Tags and sends the commands to the Switchboard server, returning
None.
Each cmd be a 2-tuple where the first element is the method name,
and the second is the arguments, e.g. ("connect", {"host": ...}).
"""
promise = aplus.Promise()
tagged_cmds = list(self._tag_cmds(*cmds))
logger.debug("Sending cmds: %s", tagged_cmds)
cmd_group = _get_cmds_id(*tagged_cmds)
self._cmd_groups[cmd_group] = (tagged_cmds, promise)
self.send(json.dumps(tagged_cmds))
return promise
|
jtmoulia/switchboard-python | switchboard/__init__.py | Client.send_cmds | python | def send_cmds(self, *cmds):
promise = aplus.Promise()
tagged_cmds = list(self._tag_cmds(*cmds))
logger.debug("Sending cmds: %s", tagged_cmds)
cmd_group = _get_cmds_id(*tagged_cmds)
self._cmd_groups[cmd_group] = (tagged_cmds, promise)
self.send(json.dumps(tagged_cmds))
return promise | Tags and sends the commands to the Switchboard server, returning
None.
Each cmd be a 2-tuple where the first element is the method name,
and the second is the arguments, e.g. ("connect", {"host": ...}). | train | https://github.com/jtmoulia/switchboard-python/blob/c9b0cb74cb12a64385465091be633e78d39f08b1/switchboard/__init__.py#L89-L104 | [
"def _get_cmds_id(*cmds):\n \"\"\"\n Returns an identifier for a group of partially tagged commands.\n If there are no tagged commands, returns None.\n \"\"\"\n tags = [cmd[2] if len(cmd) == 3 else None for cmd in cmds]\n if [tag for tag in tags if tag != None]:\n return tuple(tags)\n else:\n return None\n"
] | class Client(WebSocketClient):
"""
Base behavior shared between workers and clients.
"""
def __init__(self, *args, **kwargs):
super(Client, self).__init__(*args, **kwargs)
self._tag = 0
self._cmd_groups = {}
# WebSocketClient Hooks
# ---------------------
def opened(self):
"""
Handle the websocket opening.
"""
logger.debug("Connection is open....")
def closed(self, code, reason=None):
"""
Handle the websocket closing.
"""
logger.debug("Connection has closed: %s - %s.", code, reason)
def received_message(self, msg):
"""
Handle receiving a message by checking whether it is in response
to a command or unsolicited, and dispatching it to the appropriate
object method.
"""
logger.debug("Received message: %s", msg)
if msg.is_binary:
raise ValueError("Binary messages not supported")
resps = json.loads(msg.data)
cmd_group = _get_cmds_id(*resps)
if cmd_group:
(cmds, promise) = self._cmd_groups[cmd_group]
promise.fulfill((cmds, resps))
else:
try:
self.received_unsolicited(resps)
except:
logger.exception("Error in unsolicited msg handler")
raise
# Callbacks
# ---------
def received_unsolicited(self, response):
"""
Handle a unsolicited response.
"""
logger.debug("Received unsolicited message: %s", response)
# Public Interface
# ----------------
def _tag_cmds(self, *cmds):
"""
Yields tagged commands.
"""
for (method, args) in cmds:
tagged_cmd = [method, args, self._tag]
self._tag = self._tag + 1
yield tagged_cmd
|
jtmoulia/switchboard-python | examples/listener.py | main | python | def main(url):
try:
listener = ListenerWorker(url)
listener.connect()
listener.run_forever()
except KeyboardInterrupt:
listener.close() | Create, connect, and block on the listener worker. | train | https://github.com/jtmoulia/switchboard-python/blob/c9b0cb74cb12a64385465091be633e78d39f08b1/examples/listener.py#L59-L66 | null | #!/usr/bin/env python
# -*- coding: utf-8 -*-
"""
"""
__author__ = u"Thomas Moulia <jtmoulia@pocketknife.io>"
__copyright__ = u"Copyright © 2014, ThusFresh, Inc. All rights reserved."
import switchboard
import thread
import email
import argparse
import logging
logging.basicConfig(level=logging.INFO)
logger = logging.getLogger(__name__)
ACCOUNT = 'mail.dispatch.test@gmail.com'
CONN_SPEC = {'host': 'imap.gmail.com',
'port': 993,
'auth': {
'type': 'plain',
'username': ACCOUNT,
'password': 'i>V99JuMVEs;'}};
class ListenerWorker(switchboard.Fetcher):
"""
A basic Switchboard worker that will listen for new email
notifications. When it receives a notification, it fetches the
raw email from Switchboard and parses it using the email module.
"""
def opened(self):
"""
Connect to the websocket, and ensure the account is connected and
the INBOX is being watched, and then start watchingAll.
"""
def post_setup((cmds, resps)):
"""Post setup callback."""
logger.info("Setup complete, listening...")
self.send_cmds(('connect', CONN_SPEC),
('watchMailboxes', {'account': ACCOUNT,
'list': ['INBOX']}),
('watchAll', {})).then(post_setup)
def received_new(self, msg):
"""
Called when a new message is received.
"""
logger.info("Subject: %s, From: %s, To: %s",
msg['subject'], msg['from'], msg['to'])
if __name__ == '__main__':
parser = argparse.ArgumentParser(description="Loop echo listener")
parser.add_argument("--url", default="ws://192.168.50.2:8080/workers")
args = parser.parse_args()
main(args.url)
|
jhermann/rudiments | src/rudiments/security.py | Credentials.auth_pair | python | def auth_pair(self, force_console=False):
if not self.auth_valid():
self._get_auth(force_console)
return (self.user, self.password) | Return username/password tuple, possibly prompting the user for them. | train | https://github.com/jhermann/rudiments/blob/028ec7237946115c7b18e50557cbc5f6b824653e/src/rudiments/security.py#L56-L60 | [
"def auth_valid(self):\n \"\"\"Return bool indicating whether full credentials were provided.\"\"\"\n return bool(self.user and self.password)\n"
] | class Credentials(object):
"""Look up and provide authN credentials (username / password) from common sources."""
URL_RE = re.compile(r'^(http|https|ftp|ftps)://') # covers the common use cases
NETRC_FILE = None # use the default, unless changed for test purposes
AUTH_MEMOIZE_INPUT = {} # remember manual auth input across several queries in one run
def __init__(self, target):
"""``target`` is a representation of the secured object, typically an URL."""
self.target = target
self.user = None
self.password = None
self.keyring_service = target
self.source = None
def auth_valid(self):
"""Return bool indicating whether full credentials were provided."""
return bool(self.user and self.password)
def _raw_input(self, prompt=None):
"""Mockable wrapper for raw_input."""
return raw_input(prompt) # pragma: no cover
def _get_auth(self, force_console=False):
"""Try to get login auth from known sources."""
if not self.target:
raise ValueError("Unspecified target ({!r})".format(self.target))
elif not force_console and self.URL_RE.match(self.target):
auth_url = urlparse(self.target)
source = 'url'
if auth_url.username:
self.user = auth_url.username
if auth_url.password:
self.password = auth_url.password
if not self.auth_valid():
source = self._get_auth_from_keyring()
if not self.auth_valid():
source = self._get_auth_from_netrc(auth_url.hostname)
if not self.auth_valid():
source = self._get_auth_from_console(self.target)
else:
source = self._get_auth_from_console(self.target)
if self.auth_valid():
self.source = source
def _get_auth_from_console(self, realm):
"""Prompt for the user and password."""
self.user, self.password = self.AUTH_MEMOIZE_INPUT.get(realm, (self.user, None))
if not self.auth_valid():
if not self.user:
login = getpass.getuser()
self.user = self._raw_input('Username for "{}" [{}]: '.format(realm, login)) or login
self.password = getpass.getpass('Password for "{}": '.format(realm))
Credentials.AUTH_MEMOIZE_INPUT[realm] = self.user, self.password
return 'console'
def _get_auth_from_netrc(self, hostname):
"""Try to find login auth in ``~/.netrc``."""
try:
hostauth = netrc(self.NETRC_FILE)
except IOError as cause:
if cause.errno != errno.ENOENT:
raise
return None
except NetrcParseError as cause:
raise # TODO: Map to common base class, so caller has to handle less error types?
# Try to find specific `user@host` credentials first, then just `host`
auth = hostauth.hosts.get('{}@{}'.format(self.user or getpass.getuser(), hostname), None)
if not auth:
auth = hostauth.hosts.get(hostname, None)
if auth:
username, account, password = auth # pylint: disable=unpacking-non-sequence
if username:
self.user = username
if password == 'base64':
# support for password obfuscation, prevent "over the shoulder lookup"
self.password = account.decode('base64')
elif password:
self.password = password
return 'netrc'
def _get_password_from_keyring(self, accountname):
"""Query keyring for a password entry."""
return keyring.get_password(self.keyring_service, accountname)
def _get_auth_from_keyring(self):
"""Try to get credentials using `keyring <https://github.com/jaraco/keyring>`_."""
if not keyring:
return None
# Take user from URL if available, else the OS login name
password = self._get_password_from_keyring(self.user or getpass.getuser())
if password is not None:
self.user = self.user or getpass.getuser()
self.password = password
return 'keyring'
|
jhermann/rudiments | src/rudiments/security.py | Credentials._get_auth | python | def _get_auth(self, force_console=False):
if not self.target:
raise ValueError("Unspecified target ({!r})".format(self.target))
elif not force_console and self.URL_RE.match(self.target):
auth_url = urlparse(self.target)
source = 'url'
if auth_url.username:
self.user = auth_url.username
if auth_url.password:
self.password = auth_url.password
if not self.auth_valid():
source = self._get_auth_from_keyring()
if not self.auth_valid():
source = self._get_auth_from_netrc(auth_url.hostname)
if not self.auth_valid():
source = self._get_auth_from_console(self.target)
else:
source = self._get_auth_from_console(self.target)
if self.auth_valid():
self.source = source | Try to get login auth from known sources. | train | https://github.com/jhermann/rudiments/blob/028ec7237946115c7b18e50557cbc5f6b824653e/src/rudiments/security.py#L66-L87 | null | class Credentials(object):
"""Look up and provide authN credentials (username / password) from common sources."""
URL_RE = re.compile(r'^(http|https|ftp|ftps)://') # covers the common use cases
NETRC_FILE = None # use the default, unless changed for test purposes
AUTH_MEMOIZE_INPUT = {} # remember manual auth input across several queries in one run
def __init__(self, target):
"""``target`` is a representation of the secured object, typically an URL."""
self.target = target
self.user = None
self.password = None
self.keyring_service = target
self.source = None
def auth_valid(self):
"""Return bool indicating whether full credentials were provided."""
return bool(self.user and self.password)
def auth_pair(self, force_console=False):
"""Return username/password tuple, possibly prompting the user for them."""
if not self.auth_valid():
self._get_auth(force_console)
return (self.user, self.password)
def _raw_input(self, prompt=None):
"""Mockable wrapper for raw_input."""
return raw_input(prompt) # pragma: no cover
def _get_auth_from_console(self, realm):
"""Prompt for the user and password."""
self.user, self.password = self.AUTH_MEMOIZE_INPUT.get(realm, (self.user, None))
if not self.auth_valid():
if not self.user:
login = getpass.getuser()
self.user = self._raw_input('Username for "{}" [{}]: '.format(realm, login)) or login
self.password = getpass.getpass('Password for "{}": '.format(realm))
Credentials.AUTH_MEMOIZE_INPUT[realm] = self.user, self.password
return 'console'
def _get_auth_from_netrc(self, hostname):
"""Try to find login auth in ``~/.netrc``."""
try:
hostauth = netrc(self.NETRC_FILE)
except IOError as cause:
if cause.errno != errno.ENOENT:
raise
return None
except NetrcParseError as cause:
raise # TODO: Map to common base class, so caller has to handle less error types?
# Try to find specific `user@host` credentials first, then just `host`
auth = hostauth.hosts.get('{}@{}'.format(self.user or getpass.getuser(), hostname), None)
if not auth:
auth = hostauth.hosts.get(hostname, None)
if auth:
username, account, password = auth # pylint: disable=unpacking-non-sequence
if username:
self.user = username
if password == 'base64':
# support for password obfuscation, prevent "over the shoulder lookup"
self.password = account.decode('base64')
elif password:
self.password = password
return 'netrc'
def _get_password_from_keyring(self, accountname):
"""Query keyring for a password entry."""
return keyring.get_password(self.keyring_service, accountname)
def _get_auth_from_keyring(self):
"""Try to get credentials using `keyring <https://github.com/jaraco/keyring>`_."""
if not keyring:
return None
# Take user from URL if available, else the OS login name
password = self._get_password_from_keyring(self.user or getpass.getuser())
if password is not None:
self.user = self.user or getpass.getuser()
self.password = password
return 'keyring'
|
jhermann/rudiments | src/rudiments/security.py | Credentials._get_auth_from_console | python | def _get_auth_from_console(self, realm):
self.user, self.password = self.AUTH_MEMOIZE_INPUT.get(realm, (self.user, None))
if not self.auth_valid():
if not self.user:
login = getpass.getuser()
self.user = self._raw_input('Username for "{}" [{}]: '.format(realm, login)) or login
self.password = getpass.getpass('Password for "{}": '.format(realm))
Credentials.AUTH_MEMOIZE_INPUT[realm] = self.user, self.password
return 'console' | Prompt for the user and password. | train | https://github.com/jhermann/rudiments/blob/028ec7237946115c7b18e50557cbc5f6b824653e/src/rudiments/security.py#L89-L99 | null | class Credentials(object):
"""Look up and provide authN credentials (username / password) from common sources."""
URL_RE = re.compile(r'^(http|https|ftp|ftps)://') # covers the common use cases
NETRC_FILE = None # use the default, unless changed for test purposes
AUTH_MEMOIZE_INPUT = {} # remember manual auth input across several queries in one run
def __init__(self, target):
"""``target`` is a representation of the secured object, typically an URL."""
self.target = target
self.user = None
self.password = None
self.keyring_service = target
self.source = None
def auth_valid(self):
"""Return bool indicating whether full credentials were provided."""
return bool(self.user and self.password)
def auth_pair(self, force_console=False):
"""Return username/password tuple, possibly prompting the user for them."""
if not self.auth_valid():
self._get_auth(force_console)
return (self.user, self.password)
def _raw_input(self, prompt=None):
"""Mockable wrapper for raw_input."""
return raw_input(prompt) # pragma: no cover
def _get_auth(self, force_console=False):
"""Try to get login auth from known sources."""
if not self.target:
raise ValueError("Unspecified target ({!r})".format(self.target))
elif not force_console and self.URL_RE.match(self.target):
auth_url = urlparse(self.target)
source = 'url'
if auth_url.username:
self.user = auth_url.username
if auth_url.password:
self.password = auth_url.password
if not self.auth_valid():
source = self._get_auth_from_keyring()
if not self.auth_valid():
source = self._get_auth_from_netrc(auth_url.hostname)
if not self.auth_valid():
source = self._get_auth_from_console(self.target)
else:
source = self._get_auth_from_console(self.target)
if self.auth_valid():
self.source = source
def _get_auth_from_netrc(self, hostname):
"""Try to find login auth in ``~/.netrc``."""
try:
hostauth = netrc(self.NETRC_FILE)
except IOError as cause:
if cause.errno != errno.ENOENT:
raise
return None
except NetrcParseError as cause:
raise # TODO: Map to common base class, so caller has to handle less error types?
# Try to find specific `user@host` credentials first, then just `host`
auth = hostauth.hosts.get('{}@{}'.format(self.user or getpass.getuser(), hostname), None)
if not auth:
auth = hostauth.hosts.get(hostname, None)
if auth:
username, account, password = auth # pylint: disable=unpacking-non-sequence
if username:
self.user = username
if password == 'base64':
# support for password obfuscation, prevent "over the shoulder lookup"
self.password = account.decode('base64')
elif password:
self.password = password
return 'netrc'
def _get_password_from_keyring(self, accountname):
"""Query keyring for a password entry."""
return keyring.get_password(self.keyring_service, accountname)
def _get_auth_from_keyring(self):
"""Try to get credentials using `keyring <https://github.com/jaraco/keyring>`_."""
if not keyring:
return None
# Take user from URL if available, else the OS login name
password = self._get_password_from_keyring(self.user or getpass.getuser())
if password is not None:
self.user = self.user or getpass.getuser()
self.password = password
return 'keyring'
|
jhermann/rudiments | src/rudiments/security.py | Credentials._get_auth_from_netrc | python | def _get_auth_from_netrc(self, hostname):
try:
hostauth = netrc(self.NETRC_FILE)
except IOError as cause:
if cause.errno != errno.ENOENT:
raise
return None
except NetrcParseError as cause:
raise # TODO: Map to common base class, so caller has to handle less error types?
# Try to find specific `user@host` credentials first, then just `host`
auth = hostauth.hosts.get('{}@{}'.format(self.user or getpass.getuser(), hostname), None)
if not auth:
auth = hostauth.hosts.get(hostname, None)
if auth:
username, account, password = auth # pylint: disable=unpacking-non-sequence
if username:
self.user = username
if password == 'base64':
# support for password obfuscation, prevent "over the shoulder lookup"
self.password = account.decode('base64')
elif password:
self.password = password
return 'netrc' | Try to find login auth in ``~/.netrc``. | train | https://github.com/jhermann/rudiments/blob/028ec7237946115c7b18e50557cbc5f6b824653e/src/rudiments/security.py#L101-L127 | null | class Credentials(object):
"""Look up and provide authN credentials (username / password) from common sources."""
URL_RE = re.compile(r'^(http|https|ftp|ftps)://') # covers the common use cases
NETRC_FILE = None # use the default, unless changed for test purposes
AUTH_MEMOIZE_INPUT = {} # remember manual auth input across several queries in one run
def __init__(self, target):
"""``target`` is a representation of the secured object, typically an URL."""
self.target = target
self.user = None
self.password = None
self.keyring_service = target
self.source = None
def auth_valid(self):
"""Return bool indicating whether full credentials were provided."""
return bool(self.user and self.password)
def auth_pair(self, force_console=False):
"""Return username/password tuple, possibly prompting the user for them."""
if not self.auth_valid():
self._get_auth(force_console)
return (self.user, self.password)
def _raw_input(self, prompt=None):
"""Mockable wrapper for raw_input."""
return raw_input(prompt) # pragma: no cover
def _get_auth(self, force_console=False):
"""Try to get login auth from known sources."""
if not self.target:
raise ValueError("Unspecified target ({!r})".format(self.target))
elif not force_console and self.URL_RE.match(self.target):
auth_url = urlparse(self.target)
source = 'url'
if auth_url.username:
self.user = auth_url.username
if auth_url.password:
self.password = auth_url.password
if not self.auth_valid():
source = self._get_auth_from_keyring()
if not self.auth_valid():
source = self._get_auth_from_netrc(auth_url.hostname)
if not self.auth_valid():
source = self._get_auth_from_console(self.target)
else:
source = self._get_auth_from_console(self.target)
if self.auth_valid():
self.source = source
def _get_auth_from_console(self, realm):
"""Prompt for the user and password."""
self.user, self.password = self.AUTH_MEMOIZE_INPUT.get(realm, (self.user, None))
if not self.auth_valid():
if not self.user:
login = getpass.getuser()
self.user = self._raw_input('Username for "{}" [{}]: '.format(realm, login)) or login
self.password = getpass.getpass('Password for "{}": '.format(realm))
Credentials.AUTH_MEMOIZE_INPUT[realm] = self.user, self.password
return 'console'
def _get_password_from_keyring(self, accountname):
"""Query keyring for a password entry."""
return keyring.get_password(self.keyring_service, accountname)
def _get_auth_from_keyring(self):
"""Try to get credentials using `keyring <https://github.com/jaraco/keyring>`_."""
if not keyring:
return None
# Take user from URL if available, else the OS login name
password = self._get_password_from_keyring(self.user or getpass.getuser())
if password is not None:
self.user = self.user or getpass.getuser()
self.password = password
return 'keyring'
|
jhermann/rudiments | src/rudiments/security.py | Credentials._get_auth_from_keyring | python | def _get_auth_from_keyring(self):
if not keyring:
return None
# Take user from URL if available, else the OS login name
password = self._get_password_from_keyring(self.user or getpass.getuser())
if password is not None:
self.user = self.user or getpass.getuser()
self.password = password
return 'keyring' | Try to get credentials using `keyring <https://github.com/jaraco/keyring>`_. | train | https://github.com/jhermann/rudiments/blob/028ec7237946115c7b18e50557cbc5f6b824653e/src/rudiments/security.py#L133-L144 | null | class Credentials(object):
"""Look up and provide authN credentials (username / password) from common sources."""
URL_RE = re.compile(r'^(http|https|ftp|ftps)://') # covers the common use cases
NETRC_FILE = None # use the default, unless changed for test purposes
AUTH_MEMOIZE_INPUT = {} # remember manual auth input across several queries in one run
def __init__(self, target):
"""``target`` is a representation of the secured object, typically an URL."""
self.target = target
self.user = None
self.password = None
self.keyring_service = target
self.source = None
def auth_valid(self):
"""Return bool indicating whether full credentials were provided."""
return bool(self.user and self.password)
def auth_pair(self, force_console=False):
"""Return username/password tuple, possibly prompting the user for them."""
if not self.auth_valid():
self._get_auth(force_console)
return (self.user, self.password)
def _raw_input(self, prompt=None):
"""Mockable wrapper for raw_input."""
return raw_input(prompt) # pragma: no cover
def _get_auth(self, force_console=False):
"""Try to get login auth from known sources."""
if not self.target:
raise ValueError("Unspecified target ({!r})".format(self.target))
elif not force_console and self.URL_RE.match(self.target):
auth_url = urlparse(self.target)
source = 'url'
if auth_url.username:
self.user = auth_url.username
if auth_url.password:
self.password = auth_url.password
if not self.auth_valid():
source = self._get_auth_from_keyring()
if not self.auth_valid():
source = self._get_auth_from_netrc(auth_url.hostname)
if not self.auth_valid():
source = self._get_auth_from_console(self.target)
else:
source = self._get_auth_from_console(self.target)
if self.auth_valid():
self.source = source
def _get_auth_from_console(self, realm):
"""Prompt for the user and password."""
self.user, self.password = self.AUTH_MEMOIZE_INPUT.get(realm, (self.user, None))
if not self.auth_valid():
if not self.user:
login = getpass.getuser()
self.user = self._raw_input('Username for "{}" [{}]: '.format(realm, login)) or login
self.password = getpass.getpass('Password for "{}": '.format(realm))
Credentials.AUTH_MEMOIZE_INPUT[realm] = self.user, self.password
return 'console'
def _get_auth_from_netrc(self, hostname):
"""Try to find login auth in ``~/.netrc``."""
try:
hostauth = netrc(self.NETRC_FILE)
except IOError as cause:
if cause.errno != errno.ENOENT:
raise
return None
except NetrcParseError as cause:
raise # TODO: Map to common base class, so caller has to handle less error types?
# Try to find specific `user@host` credentials first, then just `host`
auth = hostauth.hosts.get('{}@{}'.format(self.user or getpass.getuser(), hostname), None)
if not auth:
auth = hostauth.hosts.get(hostname, None)
if auth:
username, account, password = auth # pylint: disable=unpacking-non-sequence
if username:
self.user = username
if password == 'base64':
# support for password obfuscation, prevent "over the shoulder lookup"
self.password = account.decode('base64')
elif password:
self.password = password
return 'netrc'
def _get_password_from_keyring(self, accountname):
"""Query keyring for a password entry."""
return keyring.get_password(self.keyring_service, accountname)
|
jhermann/rudiments | src/rudiments/reamed/click.py | pretty_path | python | def pretty_path(path, _home_re=re.compile('^' + re.escape(os.path.expanduser('~') + os.sep))):
path = format_filename(path)
path = _home_re.sub('~' + os.sep, path)
return path | Prettify path for humans, and make it Unicode. | train | https://github.com/jhermann/rudiments/blob/028ec7237946115c7b18e50557cbc5f6b824653e/src/rudiments/reamed/click.py#L39-L43 | null | # -*- coding: utf-8 -*-
# pylint: disable=bad-continuation
""" ‘Double Click’ – Extensions to `Click <http://click.pocoo.org/4/>`_.
"""
# Copyright © 2015 Jürgen Hermann <jh@web.de>
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
from __future__ import absolute_import, unicode_literals, print_function
import os
import re
import sys
import configobj
from munch import Munch as Bunch
from click import * # pylint: disable=wildcard-import
from click import __all__
from .._compat import encode_filename
__all__ = list(__all__) + [
'pretty_path', 'serror',
'LoggedFailure', 'AliasedGroup', 'Configuration',
]
__all__ = [encode_filename(_) for _ in __all__]
def serror(message, *args, **kwargs):
"""Print a styled error message, while using any arguments to format the message."""
if args or kwargs:
message = message.format(*args, **kwargs)
return secho(message, fg='white', bg='red', bold=True)
class LoggedFailure(UsageError):
"""Report a failure condition to the user."""
def __init__(self, message):
message = style(message, fg='white', bg='red', bold=True)
UsageError.__init__(self, message)
class AliasedGroup(Group):
""" A command group with alias names.
Inherit from this class and define a ``MAP`` class variable,
which is a mapping from alias names to canonical command names.
Then use that derived class as the ``cls`` parameter for a
``click.group`` decorator.
"""
MAP = {}
def get_command(self, ctx, cmd_name):
"""Map some aliases to their 'real' names."""
cmd_name = self.MAP.get(cmd_name, cmd_name)
return super(AliasedGroup, self).get_command(ctx, cmd_name)
class Configuration(object):
""" Configuration container that is initialized early in the main command.
The default instance is available via the Click context as ``ctx.obj.cfg``.
Configuration is lazily loaded, on first access.
"""
NO_DEFAULT = object()
DEFAULT_PATH = [
'/etc/{appname}.conf',
'/etc/{appname}.d/',
'{appcfg}.conf',
]
DEFAULT_CONFIG_OPTS = dict(
encoding='utf-8',
default_encoding='utf-8',
)
@classmethod
def from_context(cls, ctx, config_paths=None, project=None):
"""Create a configuration object, and initialize the Click context with it."""
if ctx.obj is None:
ctx.obj = Bunch()
ctx.obj.cfg = cls(ctx.info_name, config_paths, project=project)
return ctx.obj.cfg
def __init__(self, name, config_paths=None, project=None):
""" Set up static knowledge about configuration.
``config_paths`` is a list of PATHs to config files or directories.
If that is non-empty, exactly those paths will be considered; an empty
path element represents the default locations (just like in MANPATH).
If the environment variable ``<prefix>_CONFIG`` is set, its value will
be appended to the default locations.
"""
self.values = configobj.ConfigObj({}, **self.DEFAULT_CONFIG_OPTS)
self.project = project
self.name = name
self.config_paths = []
self.loaded = False
env_config = os.environ.get((self.name + '-config').upper().replace('-', '_'), '')
defaults = [i.format(appname=os.sep.join([self.project, self.name]) if project else self.name,
appdir=get_app_dir(self.project or self.name),
appcfg=get_app_dir(self.project) + os.sep + self.name if self.project else get_app_dir(self.name))
for i in self.DEFAULT_PATH
] + [i for i in env_config.split(os.pathsep) if i]
for path in config_paths or []:
for name in path.split(os.pathsep):
if name:
self.config_paths.append(name)
else:
self.config_paths.extend(defaults)
if not self.config_paths:
self.config_paths = defaults
def locations(self, exists=True):
""" Return the location of the config file(s).
A given directory will be scanned for ``*.conf`` files, in alphabetical order.
Any duplicates will be eliminated.
If ``exists`` is True, only existing configuration locations are returned.
"""
result = []
for config_files in self.config_paths:
if not config_files:
continue
if os.path.isdir(config_files):
config_files = [os.path.join(config_files, i)
for i in sorted(os.listdir(config_files))
if i.endswith('.conf')]
else:
config_files = [config_files]
for config_file in config_files:
if not exists or os.path.exists(config_file):
config_file = os.path.abspath(config_file)
if config_file in result:
result.remove(config_file)
result.append(config_file)
return result
def load(self):
"""Load configuration from the defined locations."""
if not self.loaded:
self.values = configobj.ConfigObj({}, **self.DEFAULT_CONFIG_OPTS)
for path in self.locations():
try:
part = configobj.ConfigObj(infile=path, **self.DEFAULT_CONFIG_OPTS)
except configobj.ConfigObjError as cause:
raise LoggedFailure("Error in file '{path}': {cause}".format(path=pretty_path(path), cause=cause))
self.values.merge(part)
self.loaded = True
return self.values
def dump(self, to=None): # pylint: disable=invalid-name
"""Dump the merged configuration to a stream or stdout."""
self.load().write(to or sys.stdout)
def section(self, ctx, optional=False):
"""
Return section of the config for a specific context (sub-command).
Parameters:
ctx (Context): The Click context object.
optional (bool): If ``True``, return an empty config object when section is missing.
Returns:
Section: The configuration section belonging to
the active (sub-)command (based on ``ctx.info_name``).
"""
values = self.load()
try:
return values[ctx.info_name]
except KeyError:
if optional:
return configobj.ConfigObj({}, **self.DEFAULT_CONFIG_OPTS)
raise LoggedFailure("Configuration section '{}' not found!".format(ctx.info_name))
def get(self, name, default=NO_DEFAULT):
"""
Return the specified name from the root section.
Parameters:
name (str): The name of the requested value.
default (optional): If set, the default value to use
instead of raising :class:`LoggedFailure` for
unknown names.
Returns:
The value for `name`.
Raises:
LoggedFailure: The requested `name` was not found.
"""
values = self.load()
try:
return values[name]
except KeyError:
if default is self.NO_DEFAULT:
raise LoggedFailure("Configuration value '{}' not found in root section!".format(name))
return default
|
jhermann/rudiments | src/rudiments/reamed/click.py | serror | python | def serror(message, *args, **kwargs):
if args or kwargs:
message = message.format(*args, **kwargs)
return secho(message, fg='white', bg='red', bold=True) | Print a styled error message, while using any arguments to format the message. | train | https://github.com/jhermann/rudiments/blob/028ec7237946115c7b18e50557cbc5f6b824653e/src/rudiments/reamed/click.py#L46-L50 | null | # -*- coding: utf-8 -*-
# pylint: disable=bad-continuation
""" ‘Double Click’ – Extensions to `Click <http://click.pocoo.org/4/>`_.
"""
# Copyright © 2015 Jürgen Hermann <jh@web.de>
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
from __future__ import absolute_import, unicode_literals, print_function
import os
import re
import sys
import configobj
from munch import Munch as Bunch
from click import * # pylint: disable=wildcard-import
from click import __all__
from .._compat import encode_filename
__all__ = list(__all__) + [
'pretty_path', 'serror',
'LoggedFailure', 'AliasedGroup', 'Configuration',
]
__all__ = [encode_filename(_) for _ in __all__]
def pretty_path(path, _home_re=re.compile('^' + re.escape(os.path.expanduser('~') + os.sep))):
"""Prettify path for humans, and make it Unicode."""
path = format_filename(path)
path = _home_re.sub('~' + os.sep, path)
return path
class LoggedFailure(UsageError):
"""Report a failure condition to the user."""
def __init__(self, message):
message = style(message, fg='white', bg='red', bold=True)
UsageError.__init__(self, message)
class AliasedGroup(Group):
""" A command group with alias names.
Inherit from this class and define a ``MAP`` class variable,
which is a mapping from alias names to canonical command names.
Then use that derived class as the ``cls`` parameter for a
``click.group`` decorator.
"""
MAP = {}
def get_command(self, ctx, cmd_name):
"""Map some aliases to their 'real' names."""
cmd_name = self.MAP.get(cmd_name, cmd_name)
return super(AliasedGroup, self).get_command(ctx, cmd_name)
class Configuration(object):
""" Configuration container that is initialized early in the main command.
The default instance is available via the Click context as ``ctx.obj.cfg``.
Configuration is lazily loaded, on first access.
"""
NO_DEFAULT = object()
DEFAULT_PATH = [
'/etc/{appname}.conf',
'/etc/{appname}.d/',
'{appcfg}.conf',
]
DEFAULT_CONFIG_OPTS = dict(
encoding='utf-8',
default_encoding='utf-8',
)
@classmethod
def from_context(cls, ctx, config_paths=None, project=None):
"""Create a configuration object, and initialize the Click context with it."""
if ctx.obj is None:
ctx.obj = Bunch()
ctx.obj.cfg = cls(ctx.info_name, config_paths, project=project)
return ctx.obj.cfg
def __init__(self, name, config_paths=None, project=None):
""" Set up static knowledge about configuration.
``config_paths`` is a list of PATHs to config files or directories.
If that is non-empty, exactly those paths will be considered; an empty
path element represents the default locations (just like in MANPATH).
If the environment variable ``<prefix>_CONFIG`` is set, its value will
be appended to the default locations.
"""
self.values = configobj.ConfigObj({}, **self.DEFAULT_CONFIG_OPTS)
self.project = project
self.name = name
self.config_paths = []
self.loaded = False
env_config = os.environ.get((self.name + '-config').upper().replace('-', '_'), '')
defaults = [i.format(appname=os.sep.join([self.project, self.name]) if project else self.name,
appdir=get_app_dir(self.project or self.name),
appcfg=get_app_dir(self.project) + os.sep + self.name if self.project else get_app_dir(self.name))
for i in self.DEFAULT_PATH
] + [i for i in env_config.split(os.pathsep) if i]
for path in config_paths or []:
for name in path.split(os.pathsep):
if name:
self.config_paths.append(name)
else:
self.config_paths.extend(defaults)
if not self.config_paths:
self.config_paths = defaults
def locations(self, exists=True):
""" Return the location of the config file(s).
A given directory will be scanned for ``*.conf`` files, in alphabetical order.
Any duplicates will be eliminated.
If ``exists`` is True, only existing configuration locations are returned.
"""
result = []
for config_files in self.config_paths:
if not config_files:
continue
if os.path.isdir(config_files):
config_files = [os.path.join(config_files, i)
for i in sorted(os.listdir(config_files))
if i.endswith('.conf')]
else:
config_files = [config_files]
for config_file in config_files:
if not exists or os.path.exists(config_file):
config_file = os.path.abspath(config_file)
if config_file in result:
result.remove(config_file)
result.append(config_file)
return result
def load(self):
"""Load configuration from the defined locations."""
if not self.loaded:
self.values = configobj.ConfigObj({}, **self.DEFAULT_CONFIG_OPTS)
for path in self.locations():
try:
part = configobj.ConfigObj(infile=path, **self.DEFAULT_CONFIG_OPTS)
except configobj.ConfigObjError as cause:
raise LoggedFailure("Error in file '{path}': {cause}".format(path=pretty_path(path), cause=cause))
self.values.merge(part)
self.loaded = True
return self.values
def dump(self, to=None): # pylint: disable=invalid-name
"""Dump the merged configuration to a stream or stdout."""
self.load().write(to or sys.stdout)
def section(self, ctx, optional=False):
"""
Return section of the config for a specific context (sub-command).
Parameters:
ctx (Context): The Click context object.
optional (bool): If ``True``, return an empty config object when section is missing.
Returns:
Section: The configuration section belonging to
the active (sub-)command (based on ``ctx.info_name``).
"""
values = self.load()
try:
return values[ctx.info_name]
except KeyError:
if optional:
return configobj.ConfigObj({}, **self.DEFAULT_CONFIG_OPTS)
raise LoggedFailure("Configuration section '{}' not found!".format(ctx.info_name))
def get(self, name, default=NO_DEFAULT):
"""
Return the specified name from the root section.
Parameters:
name (str): The name of the requested value.
default (optional): If set, the default value to use
instead of raising :class:`LoggedFailure` for
unknown names.
Returns:
The value for `name`.
Raises:
LoggedFailure: The requested `name` was not found.
"""
values = self.load()
try:
return values[name]
except KeyError:
if default is self.NO_DEFAULT:
raise LoggedFailure("Configuration value '{}' not found in root section!".format(name))
return default
|
jhermann/rudiments | src/rudiments/reamed/click.py | AliasedGroup.get_command | python | def get_command(self, ctx, cmd_name):
cmd_name = self.MAP.get(cmd_name, cmd_name)
return super(AliasedGroup, self).get_command(ctx, cmd_name) | Map some aliases to their 'real' names. | train | https://github.com/jhermann/rudiments/blob/028ec7237946115c7b18e50557cbc5f6b824653e/src/rudiments/reamed/click.py#L72-L75 | null | class AliasedGroup(Group):
""" A command group with alias names.
Inherit from this class and define a ``MAP`` class variable,
which is a mapping from alias names to canonical command names.
Then use that derived class as the ``cls`` parameter for a
``click.group`` decorator.
"""
MAP = {}
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.