repository_name stringclasses 316 values | func_path_in_repository stringlengths 6 223 | func_name stringlengths 1 134 | language stringclasses 1 value | func_code_string stringlengths 57 65.5k | func_documentation_string stringlengths 1 46.3k | split_name stringclasses 1 value | func_code_url stringlengths 91 315 | called_functions listlengths 1 156 ⌀ | enclosing_scope stringlengths 2 1.48M |
|---|---|---|---|---|---|---|---|---|---|
unbit/sftpclone | sftpclone/sftpclone.py | create_parser | python | def create_parser():
parser = argparse.ArgumentParser(
description='Sync a local and a remote folder through SFTP.'
)
parser.add_argument(
"path",
type=str,
metavar="local-path",
help="the path of the local folder",
)
parser.add_argument(
"remote",
type=str,
metavar="user[:password]@hostname:remote-path",
help="the ssh-url ([user[:password]@]hostname:remote-path) of the remote folder. "
"The hostname can be specified as a ssh_config's hostname too. "
"Every missing information will be gathered from there",
)
parser.add_argument(
"-k",
"--key",
metavar="identity-path",
action="append",
help="private key identity path (defaults to ~/.ssh/id_rsa)"
)
parser.add_argument(
"-l",
"--logging",
choices=['CRITICAL',
'ERROR',
'WARNING',
'INFO',
'DEBUG',
'NOTSET'],
default='ERROR',
help="set logging level"
)
parser.add_argument(
"-p",
"--port",
default=22,
type=int,
help="SSH remote port (defaults to 22)"
)
parser.add_argument(
"-f",
"--fix-symlinks",
action="store_true",
help="fix symbolic links on remote side"
)
parser.add_argument(
"-a",
"--ssh-agent",
action="store_true",
help="enable ssh-agent support"
)
parser.add_argument(
"-c",
"--ssh-config",
metavar="ssh_config path",
default="~/.ssh/config",
type=str,
help="path to the ssh-configuration file (default to ~/.ssh/config)"
)
parser.add_argument(
"-n",
"--known-hosts",
metavar="known_hosts path",
default="~/.ssh/known_hosts",
type=str,
help="path to the openSSH known_hosts file"
)
parser.add_argument(
"-d",
"--disable-known-hosts",
action="store_true",
help="disable known_hosts fingerprint checking (security warning!)"
)
parser.add_argument(
"-e",
"--exclude-from",
metavar="exclude-from-file-path",
type=str,
help="exclude files matching pattern in exclude-from-file-path"
)
parser.add_argument(
"-t",
"--do-not-delete",
action="store_true",
help="do not delete remote files missing from local folder"
)
parser.add_argument(
"-o",
"--allow-unknown",
action="store_true",
help="allow connection to unknown hosts"
)
parser.add_argument(
"-r",
"--create-remote-directory",
action="store_true",
help="Create remote base directory if missing on remote"
)
return parser | Create the CLI argument parser. | train | https://github.com/unbit/sftpclone/blob/1cc89478e680fc4e0d12b1a15b5bafd0390d05da/sftpclone/sftpclone.py#L628-L747 | null | #!/usr/bin/env python
# coding=utf-8
# Python 2.7 backward compatibility
from __future__ import print_function
from __future__ import unicode_literals
from __future__ import absolute_import
import paramiko
import paramiko.py3compat
import os
import os.path
import sys
import errno
from stat import S_ISDIR, S_ISLNK, S_ISREG, S_IMODE, S_IFMT
import argparse
import logging
from getpass import getuser, getpass
import glob
import socket
"""SFTPClone: sync local and remote directories."""
logger = None
try:
# Not available in Python 2.x
FileNotFoundError
except NameError:
FileNotFoundError = IOError
def configure_logging(level=logging.DEBUG):
"""Configure the module logging engine."""
if level == logging.DEBUG:
# For debugging purposes, log from everyone!
logging.basicConfig(
level=logging.DEBUG,
format='%(asctime)s - %(levelname)s - %(message)s'
)
return logging
logger = logging.getLogger(__name__)
logger.setLevel(level)
formatter = logging.Formatter('%(asctime)s - %(levelname)s - %(message)s')
ch = logging.StreamHandler()
ch.setLevel(level)
ch.setFormatter(formatter)
logger.addHandler(ch)
return logger
def path_join(*args):
"""
Wrapper around `os.path.join`.
Makes sure to join paths of the same type (bytes).
"""
args = (paramiko.py3compat.u(arg) for arg in args)
return os.path.join(*args)
def parse_username_password_hostname(remote_url):
"""
Parse a command line string and return username, password, remote hostname and remote path.
:param remote_url: A command line string.
:return: A tuple, containing username, password, remote hostname and remote path.
"""
assert remote_url
assert ':' in remote_url
if '@' in remote_url:
username, hostname = remote_url.rsplit('@', 1)
else:
username, hostname = None, remote_url
hostname, remote_path = hostname.split(':', 1)
password = None
if username and ':' in username:
username, password = username.split(':', 1)
assert hostname
assert remote_path
return username, password, hostname, remote_path
def get_ssh_agent_keys(logger):
"""
Ask the SSH agent for a list of keys, and return it.
:return: A reference to the SSH agent and a list of keys.
"""
agent, agent_keys = None, None
try:
agent = paramiko.agent.Agent()
_agent_keys = agent.get_keys()
if not _agent_keys:
agent.close()
logger.error(
"SSH agent didn't provide any valid key. Trying to continue..."
)
else:
agent_keys = tuple(k for k in _agent_keys)
except paramiko.SSHException:
if agent:
agent.close()
agent = None
logger.error("SSH agent speaks a non-compatible protocol. Ignoring it.")
finally:
return agent, agent_keys
class SFTPClone(object):
"""The SFTPClone class."""
def __init__(self, local_path, remote_url,
identity_files=None, port=None, fix_symlinks=False,
ssh_config_path=None, ssh_agent=False,
exclude_file=None, known_hosts_path=None,
delete=True, allow_unknown=False,
create_remote_directory=False,
):
"""Init the needed parameters and the SFTPClient."""
self.local_path = os.path.realpath(os.path.expanduser(local_path))
self.logger = logger or configure_logging()
self.create_remote_directory = create_remote_directory
if not os.path.exists(self.local_path):
self.logger.error("Local path MUST exist. Exiting.")
sys.exit(1)
if exclude_file:
with open(exclude_file) as f:
# As in rsync's exclude from, ignore lines with leading ; and #
# and treat each path as relative (thus by removing the leading
# /)
exclude_list = [
line.rstrip().lstrip("/")
for line in f
if not line.startswith((";", "#"))
]
# actually, is a set of excluded files
self.exclude_list = {
g
for pattern in exclude_list
for g in glob.glob(path_join(self.local_path, pattern))
}
else:
self.exclude_list = set()
username, password, hostname, self.remote_path = parse_username_password_hostname(remote_url)
identity_files = identity_files or []
proxy_command = None
if ssh_config_path:
try:
with open(os.path.expanduser(ssh_config_path)) as c_file:
ssh_config = paramiko.SSHConfig()
ssh_config.parse(c_file)
c = ssh_config.lookup(hostname)
hostname = c.get("hostname", hostname)
username = c.get("user", username)
port = int(c.get("port", port))
identity_files = c.get("identityfile", identity_files)
proxy_command = c.get("proxycommand")
except Exception as e:
# it could be safe to continue anyway,
# because parameters could have been manually specified
self.logger.error(
"Error while parsing ssh_config file: %s. Trying to continue anyway...", e
)
# Set default values
if not username:
username = getuser() # defaults to current user
port = port or 22
allow_unknown = allow_unknown or False
self.chown = False
self.fix_symlinks = fix_symlinks or False
self.delete = delete if delete is not None else True
if ssh_agent:
agent, agent_keys = get_ssh_agent_keys(self.logger)
else:
agent, agent_keys = None, None
if not identity_files and not password and not agent_keys:
self.logger.error(
"You need to specify either a password, an identity or to enable the ssh-agent support."
)
sys.exit(1)
# only root can change file owner
if username == 'root':
self.chown = True
sock = (hostname, port)
if proxy_command is not None:
sock = paramiko.proxy.ProxyCommand(proxy_command)
try:
transport = paramiko.Transport(sock)
except socket.gaierror:
self.logger.error(
"Hostname not known. Are you sure you inserted it correctly?")
sys.exit(1)
try:
ssh_host = hostname if port == 22 else "[{}]:{}".format(hostname, port)
known_hosts = None
"""
Before starting the transport session, we have to configure it.
Specifically, we need to configure the preferred PK algorithm.
If the system already knows a public key of a specific kind for
a remote host, we have to peek its type as the preferred one.
"""
if known_hosts_path:
known_hosts = paramiko.HostKeys()
known_hosts_path = os.path.realpath(
os.path.expanduser(known_hosts_path))
try:
known_hosts.load(known_hosts_path)
except IOError:
self.logger.error(
"Error while loading known hosts file at {}. Exiting...".format(
known_hosts_path)
)
sys.exit(1)
known_keys = known_hosts.lookup(ssh_host)
if known_keys is not None:
# one or more keys are already known
# set their type as preferred
transport.get_security_options().key_types = \
tuple(known_keys.keys())
transport.start_client()
if not known_hosts:
self.logger.warning("Security warning: skipping known hosts check...")
else:
pubk = transport.get_remote_server_key()
if ssh_host in known_hosts.keys():
if not known_hosts.check(ssh_host, pubk):
self.logger.error(
"Security warning: "
"remote key fingerprint {} for hostname "
"{} didn't match the one in known_hosts {}. "
"Exiting...".format(
pubk.get_base64(),
ssh_host,
known_hosts.lookup(hostname),
)
)
sys.exit(1)
elif not allow_unknown:
prompt = ("The authenticity of host '{}' can't be established.\n"
"{} key is {}.\n"
"Are you sure you want to continue connecting? [y/n] ").format(
ssh_host, pubk.get_name(), pubk.get_base64())
try:
# Renamed to `input` in Python 3.x
response = raw_input(prompt)
except NameError:
response = input(prompt)
# Note: we do not modify the user's known_hosts file
if not (response == "y" or response == "yes"):
self.logger.error(
"Host authentication failed."
)
sys.exit(1)
def perform_key_auth(pkey):
try:
transport.auth_publickey(
username=username,
key=pkey
)
return True
except paramiko.SSHException:
self.logger.warning(
"Authentication with identity {}... failed".format(pkey.get_base64()[:10])
)
return False
if password: # Password auth, if specified.
transport.auth_password(
username=username,
password=password
)
elif agent_keys: # SSH agent keys have higher priority
for pkey in agent_keys:
if perform_key_auth(pkey):
break # Authentication worked.
else: # None of the keys worked.
raise paramiko.SSHException
elif identity_files: # Then follow identity file (specified from CL or ssh_config)
# Try identity files one by one, until one works
for key_path in identity_files:
key_path = os.path.expanduser(key_path)
try:
key = paramiko.RSAKey.from_private_key_file(key_path)
except paramiko.PasswordRequiredException:
pk_password = getpass(
"It seems that your identity from '{}' is encrypted. "
"Please enter your password: ".format(key_path)
)
try:
key = paramiko.RSAKey.from_private_key_file(key_path, pk_password)
except paramiko.SSHException:
self.logger.error(
"Incorrect passphrase. Cannot decode private key from '{}'.".format(key_path)
)
continue
except IOError or paramiko.SSHException:
self.logger.error(
"Something went wrong while opening '{}'. Skipping it.".format(key_path)
)
continue
if perform_key_auth(key):
break # Authentication worked.
else: # None of the keys worked.
raise paramiko.SSHException
else: # No authentication method specified, we shouldn't arrive here.
assert False
except paramiko.SSHException:
self.logger.error(
"None of the provided authentication methods worked. Exiting."
)
transport.close()
sys.exit(1)
finally:
if agent:
agent.close()
self.sftp = paramiko.SFTPClient.from_transport(transport)
if self.remote_path.startswith("~"):
# nasty hack to let getcwd work without changing dir!
self.sftp.chdir('.')
self.remote_path = self.remote_path.replace(
"~", self.sftp.getcwd()) # home is the initial sftp dir
@staticmethod
def _file_need_upload(l_st, r_st):
return True if \
l_st.st_size != r_st.st_size or int(l_st.st_mtime) != r_st.st_mtime \
else False
@staticmethod
def _must_be_deleted(local_path, r_st):
"""Return True if the remote correspondent of local_path has to be deleted.
i.e. if it doesn't exists locally or if it has a different type from the remote one."""
# if the file doesn't exists
if not os.path.lexists(local_path):
return True
# or if the file type is different
l_st = os.lstat(local_path)
if S_IFMT(r_st.st_mode) != S_IFMT(l_st.st_mode):
return True
return False
def _match_modes(self, remote_path, l_st):
"""Match mod, utime and uid/gid with locals one."""
self.sftp.chmod(remote_path, S_IMODE(l_st.st_mode))
self.sftp.utime(remote_path, (l_st.st_atime, l_st.st_mtime))
if self.chown:
self.sftp.chown(remote_path, l_st.st_uid, l_st.st_gid)
def file_upload(self, local_path, remote_path, l_st):
"""Upload local_path to remote_path and set permission and mtime."""
self.sftp.put(local_path, remote_path)
self._match_modes(remote_path, l_st)
def remote_delete(self, remote_path, r_st):
"""Remove the remote directory node."""
# If it's a directory, then delete content and directory
if S_ISDIR(r_st.st_mode):
for item in self.sftp.listdir_attr(remote_path):
full_path = path_join(remote_path, item.filename)
self.remote_delete(full_path, item)
self.sftp.rmdir(remote_path)
# Or simply delete files
else:
try:
self.sftp.remove(remote_path)
except FileNotFoundError as e:
self.logger.error(
"error while removing {}. trace: {}".format(remote_path, e)
)
def check_for_deletion(self, relative_path=None):
"""Traverse the entire remote_path tree.
Find files/directories that need to be deleted,
not being present in the local folder.
"""
if not relative_path:
relative_path = str() # root of shared directory tree
remote_path = path_join(self.remote_path, relative_path)
local_path = path_join(self.local_path, relative_path)
for remote_st in self.sftp.listdir_attr(remote_path):
r_lstat = self.sftp.lstat(path_join(remote_path, remote_st.filename))
inner_remote_path = path_join(remote_path, remote_st.filename)
inner_local_path = path_join(local_path, remote_st.filename)
# check if remote_st is a symlink
# otherwise could delete file outside shared directory
if S_ISLNK(r_lstat.st_mode):
if self._must_be_deleted(inner_local_path, r_lstat):
self.remote_delete(inner_remote_path, r_lstat)
continue
if self._must_be_deleted(inner_local_path, remote_st):
self.remote_delete(inner_remote_path, remote_st)
elif S_ISDIR(remote_st.st_mode):
self.check_for_deletion(
path_join(relative_path, remote_st.filename)
)
def create_update_symlink(self, link_destination, remote_path):
"""Create a new link pointing to link_destination in remote_path position."""
try: # if there's anything, delete it
self.sftp.remove(remote_path)
except IOError: # that's fine, nothing exists there!
pass
finally: # and recreate the link
try:
self.sftp.symlink(link_destination, remote_path)
except OSError as e:
# Sometimes, if links are "too" different, symlink fails.
# Sadly, nothing we can do about it.
self.logger.error("error while symlinking {} to {}: {}".format(
remote_path, link_destination, e))
def node_check_for_upload_create(self, relative_path, f):
"""Check if the given directory tree node has to be uploaded/created on the remote folder."""
if not relative_path:
# we're at the root of the shared directory tree
relative_path = str()
# the (absolute) local address of f.
local_path = path_join(self.local_path, relative_path, f)
try:
l_st = os.lstat(local_path)
except OSError as e:
"""A little background here.
Sometimes, in big clusters configurations (mail, etc.),
files could disappear or be moved, suddenly.
There's nothing to do about it,
system should be stopped before doing backups.
Anyway, we log it, and skip it.
"""
self.logger.error("error while checking {}: {}".format(relative_path, e))
return
if local_path in self.exclude_list:
self.logger.info("Skipping excluded file %s.", local_path)
return
# the (absolute) remote address of f.
remote_path = path_join(self.remote_path, relative_path, f)
# First case: f is a directory
if S_ISDIR(l_st.st_mode):
# we check if the folder exists on the remote side
# it has to be a folder, otherwise it would have already been
# deleted
try:
self.sftp.stat(remote_path)
except IOError: # it doesn't exist yet on remote side
self.sftp.mkdir(remote_path)
self._match_modes(remote_path, l_st)
# now, we should traverse f too (recursion magic!)
self.check_for_upload_create(path_join(relative_path, f))
# Second case: f is a symbolic link
elif S_ISLNK(l_st.st_mode):
# read the local link
local_link = os.readlink(local_path)
absolute_local_link = os.path.realpath(local_link)
# is it absolute?
is_absolute = local_link.startswith("/")
# and does it point inside the shared directory?
# add trailing slash (security)
trailing_local_path = path_join(self.local_path, '')
relpath = os.path.commonprefix(
[absolute_local_link,
trailing_local_path]
) == trailing_local_path
if relpath:
relative_link = absolute_local_link[len(trailing_local_path):]
else:
relative_link = None
"""
# Refactor them all, be efficient!
# Case A: absolute link pointing outside shared directory
# (we can only update the remote part)
if is_absolute and not relpath:
self.create_update_symlink(local_link, remote_path)
# Case B: absolute link pointing inside shared directory
# (we can leave it as it is or fix the prefix to match the one of the remote server)
elif is_absolute and relpath:
if self.fix_symlinks:
self.create_update_symlink(
join(
self.remote_path,
relative_link,
),
remote_path
)
else:
self.create_update_symlink(local_link, remote_path)
# Case C: relative link pointing outside shared directory
# (all we can do is try to make the link anyway)
elif not is_absolute and not relpath:
self.create_update_symlink(local_link, remote_path)
# Case D: relative link pointing inside shared directory
# (we preserve the relativity and link it!)
elif not is_absolute and relpath:
self.create_update_symlink(local_link, remote_path)
"""
if is_absolute and relpath:
if self.fix_symlinks:
self.create_update_symlink(
path_join(
self.remote_path,
relative_link,
),
remote_path
)
else:
self.create_update_symlink(local_link, remote_path)
# Third case: regular file
elif S_ISREG(l_st.st_mode):
try:
r_st = self.sftp.lstat(remote_path)
if self._file_need_upload(l_st, r_st):
self.file_upload(local_path, remote_path, l_st)
except IOError as e:
if e.errno == errno.ENOENT:
self.file_upload(local_path, remote_path, l_st)
# Anything else.
else:
self.logger.warning("Skipping unsupported file %s.", local_path)
def check_for_upload_create(self, relative_path=None):
"""Traverse the relative_path tree and check for files that need to be uploaded/created.
Relativity here refers to the shared directory tree."""
for f in os.listdir(
path_join(
self.local_path, relative_path) if relative_path else self.local_path
):
self.node_check_for_upload_create(relative_path, f)
def run(self):
"""Run the sync.
Confront the local and the remote directories and perform the needed changes."""
# Check if remote path is present
try:
self.sftp.stat(self.remote_path)
except FileNotFoundError as e:
if self.create_remote_directory:
self.sftp.mkdir(self.remote_path)
self.logger.info(
"Created missing remote dir: '" + self.remote_path + "'")
else:
self.logger.error(
"Remote folder does not exists. "
"Add '-r' to create it if missing.")
sys.exit(1)
try:
if self.delete:
# First check for items to be removed
self.check_for_deletion()
# Now scan local for items to upload/create
self.check_for_upload_create()
except FileNotFoundError:
# If this happens, probably the remote folder doesn't exist.
self.logger.error(
"Error while opening remote folder. Are you sure it does exist?")
sys.exit(1)
def main(args=None):
"""The main."""
parser = create_parser()
args = vars(parser.parse_args(args))
log_mapping = {
'CRITICAL': logging.CRITICAL,
'ERROR': logging.ERROR,
'WARNING': logging.WARNING,
'INFO': logging.INFO,
'DEBUG': logging.DEBUG,
'NOTSET': logging.NOTSET,
}
log_level = log_mapping[args['logging']]
del(args['logging'])
global logger
logger = configure_logging(log_level)
args_mapping = {
"path": "local_path",
"remote": "remote_url",
"ssh_config": "ssh_config_path",
"exclude_from": "exclude_file",
"known_hosts": "known_hosts_path",
"do_not_delete": "delete",
"key": "identity_files",
}
kwargs = { # convert the argument names to class constructor parameters
args_mapping[k]: v
for k, v in args.items()
if v and k in args_mapping
}
kwargs.update({
k: v
for k, v in args.items()
if v and k not in args_mapping
})
# Special case: disable known_hosts check
if args['disable_known_hosts']:
kwargs['known_hosts_path'] = None
del(kwargs['disable_known_hosts'])
# Toggle `do_not_delete` flag
if "delete" in kwargs:
kwargs["delete"] = not kwargs["delete"]
# Manually set the default identity file.
kwargs["identity_files"] = kwargs.get("identity_files", None) or ["~/.ssh/id_rsa"]
sync = SFTPClone(
**kwargs
)
sync.run()
if __name__ == '__main__':
main()
|
unbit/sftpclone | sftpclone/sftpclone.py | main | python | def main(args=None):
parser = create_parser()
args = vars(parser.parse_args(args))
log_mapping = {
'CRITICAL': logging.CRITICAL,
'ERROR': logging.ERROR,
'WARNING': logging.WARNING,
'INFO': logging.INFO,
'DEBUG': logging.DEBUG,
'NOTSET': logging.NOTSET,
}
log_level = log_mapping[args['logging']]
del(args['logging'])
global logger
logger = configure_logging(log_level)
args_mapping = {
"path": "local_path",
"remote": "remote_url",
"ssh_config": "ssh_config_path",
"exclude_from": "exclude_file",
"known_hosts": "known_hosts_path",
"do_not_delete": "delete",
"key": "identity_files",
}
kwargs = { # convert the argument names to class constructor parameters
args_mapping[k]: v
for k, v in args.items()
if v and k in args_mapping
}
kwargs.update({
k: v
for k, v in args.items()
if v and k not in args_mapping
})
# Special case: disable known_hosts check
if args['disable_known_hosts']:
kwargs['known_hosts_path'] = None
del(kwargs['disable_known_hosts'])
# Toggle `do_not_delete` flag
if "delete" in kwargs:
kwargs["delete"] = not kwargs["delete"]
# Manually set the default identity file.
kwargs["identity_files"] = kwargs.get("identity_files", None) or ["~/.ssh/id_rsa"]
sync = SFTPClone(
**kwargs
)
sync.run() | The main. | train | https://github.com/unbit/sftpclone/blob/1cc89478e680fc4e0d12b1a15b5bafd0390d05da/sftpclone/sftpclone.py#L750-L806 | [
"def configure_logging(level=logging.DEBUG):\n \"\"\"Configure the module logging engine.\"\"\"\n if level == logging.DEBUG:\n # For debugging purposes, log from everyone!\n logging.basicConfig(\n level=logging.DEBUG,\n format='%(asctime)s - %(levelname)s - %(message)s'\n )\n return logging\n\n logger = logging.getLogger(__name__)\n logger.setLevel(level)\n formatter = logging.Formatter('%(asctime)s - %(levelname)s - %(message)s')\n ch = logging.StreamHandler()\n ch.setLevel(level)\n ch.setFormatter(formatter)\n logger.addHandler(ch)\n return logger\n",
"def create_parser():\n \"\"\"Create the CLI argument parser.\"\"\"\n parser = argparse.ArgumentParser(\n description='Sync a local and a remote folder through SFTP.'\n )\n\n parser.add_argument(\n \"path\",\n type=str,\n metavar=\"local-path\",\n help=\"the path of the local folder\",\n )\n\n parser.add_argument(\n \"remote\",\n type=str,\n metavar=\"user[:password]@hostname:remote-path\",\n help=\"the ssh-url ([user[:password]@]hostname:remote-path) of the remote folder. \"\n \"The hostname can be specified as a ssh_config's hostname too. \"\n \"Every missing information will be gathered from there\",\n )\n\n parser.add_argument(\n \"-k\",\n \"--key\",\n metavar=\"identity-path\",\n action=\"append\",\n help=\"private key identity path (defaults to ~/.ssh/id_rsa)\"\n )\n\n parser.add_argument(\n \"-l\",\n \"--logging\",\n choices=['CRITICAL',\n 'ERROR',\n 'WARNING',\n 'INFO',\n 'DEBUG',\n 'NOTSET'],\n default='ERROR',\n help=\"set logging level\"\n )\n\n parser.add_argument(\n \"-p\",\n \"--port\",\n default=22,\n type=int,\n help=\"SSH remote port (defaults to 22)\"\n )\n\n parser.add_argument(\n \"-f\",\n \"--fix-symlinks\",\n action=\"store_true\",\n help=\"fix symbolic links on remote side\"\n )\n\n parser.add_argument(\n \"-a\",\n \"--ssh-agent\",\n action=\"store_true\",\n help=\"enable ssh-agent support\"\n )\n\n parser.add_argument(\n \"-c\",\n \"--ssh-config\",\n metavar=\"ssh_config path\",\n default=\"~/.ssh/config\",\n type=str,\n help=\"path to the ssh-configuration file (default to ~/.ssh/config)\"\n )\n\n parser.add_argument(\n \"-n\",\n \"--known-hosts\",\n metavar=\"known_hosts path\",\n default=\"~/.ssh/known_hosts\",\n type=str,\n help=\"path to the openSSH known_hosts file\"\n )\n\n parser.add_argument(\n \"-d\",\n \"--disable-known-hosts\",\n action=\"store_true\",\n help=\"disable known_hosts fingerprint checking (security warning!)\"\n )\n\n parser.add_argument(\n \"-e\",\n \"--exclude-from\",\n metavar=\"exclude-from-file-path\",\n type=str,\n help=\"exclude files matching pattern in exclude-from-file-path\"\n )\n\n parser.add_argument(\n \"-t\",\n \"--do-not-delete\",\n action=\"store_true\",\n help=\"do not delete remote files missing from local folder\"\n )\n\n parser.add_argument(\n \"-o\",\n \"--allow-unknown\",\n action=\"store_true\",\n help=\"allow connection to unknown hosts\"\n )\n\n parser.add_argument(\n \"-r\",\n \"--create-remote-directory\",\n action=\"store_true\",\n help=\"Create remote base directory if missing on remote\"\n )\n\n return parser\n",
"def run(self):\n \"\"\"Run the sync.\n\n Confront the local and the remote directories and perform the needed changes.\"\"\"\n\n # Check if remote path is present\n try:\n self.sftp.stat(self.remote_path)\n except FileNotFoundError as e:\n if self.create_remote_directory:\n self.sftp.mkdir(self.remote_path)\n self.logger.info(\n \"Created missing remote dir: '\" + self.remote_path + \"'\")\n else:\n self.logger.error(\n \"Remote folder does not exists. \"\n \"Add '-r' to create it if missing.\")\n sys.exit(1)\n\n try:\n if self.delete:\n # First check for items to be removed\n self.check_for_deletion()\n\n # Now scan local for items to upload/create\n self.check_for_upload_create()\n except FileNotFoundError:\n # If this happens, probably the remote folder doesn't exist.\n self.logger.error(\n \"Error while opening remote folder. Are you sure it does exist?\")\n sys.exit(1)\n"
] | #!/usr/bin/env python
# coding=utf-8
# Python 2.7 backward compatibility
from __future__ import print_function
from __future__ import unicode_literals
from __future__ import absolute_import
import paramiko
import paramiko.py3compat
import os
import os.path
import sys
import errno
from stat import S_ISDIR, S_ISLNK, S_ISREG, S_IMODE, S_IFMT
import argparse
import logging
from getpass import getuser, getpass
import glob
import socket
"""SFTPClone: sync local and remote directories."""
logger = None
try:
# Not available in Python 2.x
FileNotFoundError
except NameError:
FileNotFoundError = IOError
def configure_logging(level=logging.DEBUG):
"""Configure the module logging engine."""
if level == logging.DEBUG:
# For debugging purposes, log from everyone!
logging.basicConfig(
level=logging.DEBUG,
format='%(asctime)s - %(levelname)s - %(message)s'
)
return logging
logger = logging.getLogger(__name__)
logger.setLevel(level)
formatter = logging.Formatter('%(asctime)s - %(levelname)s - %(message)s')
ch = logging.StreamHandler()
ch.setLevel(level)
ch.setFormatter(formatter)
logger.addHandler(ch)
return logger
def path_join(*args):
"""
Wrapper around `os.path.join`.
Makes sure to join paths of the same type (bytes).
"""
args = (paramiko.py3compat.u(arg) for arg in args)
return os.path.join(*args)
def parse_username_password_hostname(remote_url):
"""
Parse a command line string and return username, password, remote hostname and remote path.
:param remote_url: A command line string.
:return: A tuple, containing username, password, remote hostname and remote path.
"""
assert remote_url
assert ':' in remote_url
if '@' in remote_url:
username, hostname = remote_url.rsplit('@', 1)
else:
username, hostname = None, remote_url
hostname, remote_path = hostname.split(':', 1)
password = None
if username and ':' in username:
username, password = username.split(':', 1)
assert hostname
assert remote_path
return username, password, hostname, remote_path
def get_ssh_agent_keys(logger):
"""
Ask the SSH agent for a list of keys, and return it.
:return: A reference to the SSH agent and a list of keys.
"""
agent, agent_keys = None, None
try:
agent = paramiko.agent.Agent()
_agent_keys = agent.get_keys()
if not _agent_keys:
agent.close()
logger.error(
"SSH agent didn't provide any valid key. Trying to continue..."
)
else:
agent_keys = tuple(k for k in _agent_keys)
except paramiko.SSHException:
if agent:
agent.close()
agent = None
logger.error("SSH agent speaks a non-compatible protocol. Ignoring it.")
finally:
return agent, agent_keys
class SFTPClone(object):
"""The SFTPClone class."""
def __init__(self, local_path, remote_url,
identity_files=None, port=None, fix_symlinks=False,
ssh_config_path=None, ssh_agent=False,
exclude_file=None, known_hosts_path=None,
delete=True, allow_unknown=False,
create_remote_directory=False,
):
"""Init the needed parameters and the SFTPClient."""
self.local_path = os.path.realpath(os.path.expanduser(local_path))
self.logger = logger or configure_logging()
self.create_remote_directory = create_remote_directory
if not os.path.exists(self.local_path):
self.logger.error("Local path MUST exist. Exiting.")
sys.exit(1)
if exclude_file:
with open(exclude_file) as f:
# As in rsync's exclude from, ignore lines with leading ; and #
# and treat each path as relative (thus by removing the leading
# /)
exclude_list = [
line.rstrip().lstrip("/")
for line in f
if not line.startswith((";", "#"))
]
# actually, is a set of excluded files
self.exclude_list = {
g
for pattern in exclude_list
for g in glob.glob(path_join(self.local_path, pattern))
}
else:
self.exclude_list = set()
username, password, hostname, self.remote_path = parse_username_password_hostname(remote_url)
identity_files = identity_files or []
proxy_command = None
if ssh_config_path:
try:
with open(os.path.expanduser(ssh_config_path)) as c_file:
ssh_config = paramiko.SSHConfig()
ssh_config.parse(c_file)
c = ssh_config.lookup(hostname)
hostname = c.get("hostname", hostname)
username = c.get("user", username)
port = int(c.get("port", port))
identity_files = c.get("identityfile", identity_files)
proxy_command = c.get("proxycommand")
except Exception as e:
# it could be safe to continue anyway,
# because parameters could have been manually specified
self.logger.error(
"Error while parsing ssh_config file: %s. Trying to continue anyway...", e
)
# Set default values
if not username:
username = getuser() # defaults to current user
port = port or 22
allow_unknown = allow_unknown or False
self.chown = False
self.fix_symlinks = fix_symlinks or False
self.delete = delete if delete is not None else True
if ssh_agent:
agent, agent_keys = get_ssh_agent_keys(self.logger)
else:
agent, agent_keys = None, None
if not identity_files and not password and not agent_keys:
self.logger.error(
"You need to specify either a password, an identity or to enable the ssh-agent support."
)
sys.exit(1)
# only root can change file owner
if username == 'root':
self.chown = True
sock = (hostname, port)
if proxy_command is not None:
sock = paramiko.proxy.ProxyCommand(proxy_command)
try:
transport = paramiko.Transport(sock)
except socket.gaierror:
self.logger.error(
"Hostname not known. Are you sure you inserted it correctly?")
sys.exit(1)
try:
ssh_host = hostname if port == 22 else "[{}]:{}".format(hostname, port)
known_hosts = None
"""
Before starting the transport session, we have to configure it.
Specifically, we need to configure the preferred PK algorithm.
If the system already knows a public key of a specific kind for
a remote host, we have to peek its type as the preferred one.
"""
if known_hosts_path:
known_hosts = paramiko.HostKeys()
known_hosts_path = os.path.realpath(
os.path.expanduser(known_hosts_path))
try:
known_hosts.load(known_hosts_path)
except IOError:
self.logger.error(
"Error while loading known hosts file at {}. Exiting...".format(
known_hosts_path)
)
sys.exit(1)
known_keys = known_hosts.lookup(ssh_host)
if known_keys is not None:
# one or more keys are already known
# set their type as preferred
transport.get_security_options().key_types = \
tuple(known_keys.keys())
transport.start_client()
if not known_hosts:
self.logger.warning("Security warning: skipping known hosts check...")
else:
pubk = transport.get_remote_server_key()
if ssh_host in known_hosts.keys():
if not known_hosts.check(ssh_host, pubk):
self.logger.error(
"Security warning: "
"remote key fingerprint {} for hostname "
"{} didn't match the one in known_hosts {}. "
"Exiting...".format(
pubk.get_base64(),
ssh_host,
known_hosts.lookup(hostname),
)
)
sys.exit(1)
elif not allow_unknown:
prompt = ("The authenticity of host '{}' can't be established.\n"
"{} key is {}.\n"
"Are you sure you want to continue connecting? [y/n] ").format(
ssh_host, pubk.get_name(), pubk.get_base64())
try:
# Renamed to `input` in Python 3.x
response = raw_input(prompt)
except NameError:
response = input(prompt)
# Note: we do not modify the user's known_hosts file
if not (response == "y" or response == "yes"):
self.logger.error(
"Host authentication failed."
)
sys.exit(1)
def perform_key_auth(pkey):
try:
transport.auth_publickey(
username=username,
key=pkey
)
return True
except paramiko.SSHException:
self.logger.warning(
"Authentication with identity {}... failed".format(pkey.get_base64()[:10])
)
return False
if password: # Password auth, if specified.
transport.auth_password(
username=username,
password=password
)
elif agent_keys: # SSH agent keys have higher priority
for pkey in agent_keys:
if perform_key_auth(pkey):
break # Authentication worked.
else: # None of the keys worked.
raise paramiko.SSHException
elif identity_files: # Then follow identity file (specified from CL or ssh_config)
# Try identity files one by one, until one works
for key_path in identity_files:
key_path = os.path.expanduser(key_path)
try:
key = paramiko.RSAKey.from_private_key_file(key_path)
except paramiko.PasswordRequiredException:
pk_password = getpass(
"It seems that your identity from '{}' is encrypted. "
"Please enter your password: ".format(key_path)
)
try:
key = paramiko.RSAKey.from_private_key_file(key_path, pk_password)
except paramiko.SSHException:
self.logger.error(
"Incorrect passphrase. Cannot decode private key from '{}'.".format(key_path)
)
continue
except IOError or paramiko.SSHException:
self.logger.error(
"Something went wrong while opening '{}'. Skipping it.".format(key_path)
)
continue
if perform_key_auth(key):
break # Authentication worked.
else: # None of the keys worked.
raise paramiko.SSHException
else: # No authentication method specified, we shouldn't arrive here.
assert False
except paramiko.SSHException:
self.logger.error(
"None of the provided authentication methods worked. Exiting."
)
transport.close()
sys.exit(1)
finally:
if agent:
agent.close()
self.sftp = paramiko.SFTPClient.from_transport(transport)
if self.remote_path.startswith("~"):
# nasty hack to let getcwd work without changing dir!
self.sftp.chdir('.')
self.remote_path = self.remote_path.replace(
"~", self.sftp.getcwd()) # home is the initial sftp dir
@staticmethod
def _file_need_upload(l_st, r_st):
return True if \
l_st.st_size != r_st.st_size or int(l_st.st_mtime) != r_st.st_mtime \
else False
@staticmethod
def _must_be_deleted(local_path, r_st):
"""Return True if the remote correspondent of local_path has to be deleted.
i.e. if it doesn't exists locally or if it has a different type from the remote one."""
# if the file doesn't exists
if not os.path.lexists(local_path):
return True
# or if the file type is different
l_st = os.lstat(local_path)
if S_IFMT(r_st.st_mode) != S_IFMT(l_st.st_mode):
return True
return False
def _match_modes(self, remote_path, l_st):
"""Match mod, utime and uid/gid with locals one."""
self.sftp.chmod(remote_path, S_IMODE(l_st.st_mode))
self.sftp.utime(remote_path, (l_st.st_atime, l_st.st_mtime))
if self.chown:
self.sftp.chown(remote_path, l_st.st_uid, l_st.st_gid)
def file_upload(self, local_path, remote_path, l_st):
"""Upload local_path to remote_path and set permission and mtime."""
self.sftp.put(local_path, remote_path)
self._match_modes(remote_path, l_st)
def remote_delete(self, remote_path, r_st):
"""Remove the remote directory node."""
# If it's a directory, then delete content and directory
if S_ISDIR(r_st.st_mode):
for item in self.sftp.listdir_attr(remote_path):
full_path = path_join(remote_path, item.filename)
self.remote_delete(full_path, item)
self.sftp.rmdir(remote_path)
# Or simply delete files
else:
try:
self.sftp.remove(remote_path)
except FileNotFoundError as e:
self.logger.error(
"error while removing {}. trace: {}".format(remote_path, e)
)
def check_for_deletion(self, relative_path=None):
"""Traverse the entire remote_path tree.
Find files/directories that need to be deleted,
not being present in the local folder.
"""
if not relative_path:
relative_path = str() # root of shared directory tree
remote_path = path_join(self.remote_path, relative_path)
local_path = path_join(self.local_path, relative_path)
for remote_st in self.sftp.listdir_attr(remote_path):
r_lstat = self.sftp.lstat(path_join(remote_path, remote_st.filename))
inner_remote_path = path_join(remote_path, remote_st.filename)
inner_local_path = path_join(local_path, remote_st.filename)
# check if remote_st is a symlink
# otherwise could delete file outside shared directory
if S_ISLNK(r_lstat.st_mode):
if self._must_be_deleted(inner_local_path, r_lstat):
self.remote_delete(inner_remote_path, r_lstat)
continue
if self._must_be_deleted(inner_local_path, remote_st):
self.remote_delete(inner_remote_path, remote_st)
elif S_ISDIR(remote_st.st_mode):
self.check_for_deletion(
path_join(relative_path, remote_st.filename)
)
def create_update_symlink(self, link_destination, remote_path):
"""Create a new link pointing to link_destination in remote_path position."""
try: # if there's anything, delete it
self.sftp.remove(remote_path)
except IOError: # that's fine, nothing exists there!
pass
finally: # and recreate the link
try:
self.sftp.symlink(link_destination, remote_path)
except OSError as e:
# Sometimes, if links are "too" different, symlink fails.
# Sadly, nothing we can do about it.
self.logger.error("error while symlinking {} to {}: {}".format(
remote_path, link_destination, e))
def node_check_for_upload_create(self, relative_path, f):
"""Check if the given directory tree node has to be uploaded/created on the remote folder."""
if not relative_path:
# we're at the root of the shared directory tree
relative_path = str()
# the (absolute) local address of f.
local_path = path_join(self.local_path, relative_path, f)
try:
l_st = os.lstat(local_path)
except OSError as e:
"""A little background here.
Sometimes, in big clusters configurations (mail, etc.),
files could disappear or be moved, suddenly.
There's nothing to do about it,
system should be stopped before doing backups.
Anyway, we log it, and skip it.
"""
self.logger.error("error while checking {}: {}".format(relative_path, e))
return
if local_path in self.exclude_list:
self.logger.info("Skipping excluded file %s.", local_path)
return
# the (absolute) remote address of f.
remote_path = path_join(self.remote_path, relative_path, f)
# First case: f is a directory
if S_ISDIR(l_st.st_mode):
# we check if the folder exists on the remote side
# it has to be a folder, otherwise it would have already been
# deleted
try:
self.sftp.stat(remote_path)
except IOError: # it doesn't exist yet on remote side
self.sftp.mkdir(remote_path)
self._match_modes(remote_path, l_st)
# now, we should traverse f too (recursion magic!)
self.check_for_upload_create(path_join(relative_path, f))
# Second case: f is a symbolic link
elif S_ISLNK(l_st.st_mode):
# read the local link
local_link = os.readlink(local_path)
absolute_local_link = os.path.realpath(local_link)
# is it absolute?
is_absolute = local_link.startswith("/")
# and does it point inside the shared directory?
# add trailing slash (security)
trailing_local_path = path_join(self.local_path, '')
relpath = os.path.commonprefix(
[absolute_local_link,
trailing_local_path]
) == trailing_local_path
if relpath:
relative_link = absolute_local_link[len(trailing_local_path):]
else:
relative_link = None
"""
# Refactor them all, be efficient!
# Case A: absolute link pointing outside shared directory
# (we can only update the remote part)
if is_absolute and not relpath:
self.create_update_symlink(local_link, remote_path)
# Case B: absolute link pointing inside shared directory
# (we can leave it as it is or fix the prefix to match the one of the remote server)
elif is_absolute and relpath:
if self.fix_symlinks:
self.create_update_symlink(
join(
self.remote_path,
relative_link,
),
remote_path
)
else:
self.create_update_symlink(local_link, remote_path)
# Case C: relative link pointing outside shared directory
# (all we can do is try to make the link anyway)
elif not is_absolute and not relpath:
self.create_update_symlink(local_link, remote_path)
# Case D: relative link pointing inside shared directory
# (we preserve the relativity and link it!)
elif not is_absolute and relpath:
self.create_update_symlink(local_link, remote_path)
"""
if is_absolute and relpath:
if self.fix_symlinks:
self.create_update_symlink(
path_join(
self.remote_path,
relative_link,
),
remote_path
)
else:
self.create_update_symlink(local_link, remote_path)
# Third case: regular file
elif S_ISREG(l_st.st_mode):
try:
r_st = self.sftp.lstat(remote_path)
if self._file_need_upload(l_st, r_st):
self.file_upload(local_path, remote_path, l_st)
except IOError as e:
if e.errno == errno.ENOENT:
self.file_upload(local_path, remote_path, l_st)
# Anything else.
else:
self.logger.warning("Skipping unsupported file %s.", local_path)
def check_for_upload_create(self, relative_path=None):
"""Traverse the relative_path tree and check for files that need to be uploaded/created.
Relativity here refers to the shared directory tree."""
for f in os.listdir(
path_join(
self.local_path, relative_path) if relative_path else self.local_path
):
self.node_check_for_upload_create(relative_path, f)
def run(self):
"""Run the sync.
Confront the local and the remote directories and perform the needed changes."""
# Check if remote path is present
try:
self.sftp.stat(self.remote_path)
except FileNotFoundError as e:
if self.create_remote_directory:
self.sftp.mkdir(self.remote_path)
self.logger.info(
"Created missing remote dir: '" + self.remote_path + "'")
else:
self.logger.error(
"Remote folder does not exists. "
"Add '-r' to create it if missing.")
sys.exit(1)
try:
if self.delete:
# First check for items to be removed
self.check_for_deletion()
# Now scan local for items to upload/create
self.check_for_upload_create()
except FileNotFoundError:
# If this happens, probably the remote folder doesn't exist.
self.logger.error(
"Error while opening remote folder. Are you sure it does exist?")
sys.exit(1)
def create_parser():
"""Create the CLI argument parser."""
parser = argparse.ArgumentParser(
description='Sync a local and a remote folder through SFTP.'
)
parser.add_argument(
"path",
type=str,
metavar="local-path",
help="the path of the local folder",
)
parser.add_argument(
"remote",
type=str,
metavar="user[:password]@hostname:remote-path",
help="the ssh-url ([user[:password]@]hostname:remote-path) of the remote folder. "
"The hostname can be specified as a ssh_config's hostname too. "
"Every missing information will be gathered from there",
)
parser.add_argument(
"-k",
"--key",
metavar="identity-path",
action="append",
help="private key identity path (defaults to ~/.ssh/id_rsa)"
)
parser.add_argument(
"-l",
"--logging",
choices=['CRITICAL',
'ERROR',
'WARNING',
'INFO',
'DEBUG',
'NOTSET'],
default='ERROR',
help="set logging level"
)
parser.add_argument(
"-p",
"--port",
default=22,
type=int,
help="SSH remote port (defaults to 22)"
)
parser.add_argument(
"-f",
"--fix-symlinks",
action="store_true",
help="fix symbolic links on remote side"
)
parser.add_argument(
"-a",
"--ssh-agent",
action="store_true",
help="enable ssh-agent support"
)
parser.add_argument(
"-c",
"--ssh-config",
metavar="ssh_config path",
default="~/.ssh/config",
type=str,
help="path to the ssh-configuration file (default to ~/.ssh/config)"
)
parser.add_argument(
"-n",
"--known-hosts",
metavar="known_hosts path",
default="~/.ssh/known_hosts",
type=str,
help="path to the openSSH known_hosts file"
)
parser.add_argument(
"-d",
"--disable-known-hosts",
action="store_true",
help="disable known_hosts fingerprint checking (security warning!)"
)
parser.add_argument(
"-e",
"--exclude-from",
metavar="exclude-from-file-path",
type=str,
help="exclude files matching pattern in exclude-from-file-path"
)
parser.add_argument(
"-t",
"--do-not-delete",
action="store_true",
help="do not delete remote files missing from local folder"
)
parser.add_argument(
"-o",
"--allow-unknown",
action="store_true",
help="allow connection to unknown hosts"
)
parser.add_argument(
"-r",
"--create-remote-directory",
action="store_true",
help="Create remote base directory if missing on remote"
)
return parser
if __name__ == '__main__':
main()
|
unbit/sftpclone | sftpclone/sftpclone.py | SFTPClone._must_be_deleted | python | def _must_be_deleted(local_path, r_st):
# if the file doesn't exists
if not os.path.lexists(local_path):
return True
# or if the file type is different
l_st = os.lstat(local_path)
if S_IFMT(r_st.st_mode) != S_IFMT(l_st.st_mode):
return True
return False | Return True if the remote correspondent of local_path has to be deleted.
i.e. if it doesn't exists locally or if it has a different type from the remote one. | train | https://github.com/unbit/sftpclone/blob/1cc89478e680fc4e0d12b1a15b5bafd0390d05da/sftpclone/sftpclone.py#L369-L382 | null | class SFTPClone(object):
"""The SFTPClone class."""
def __init__(self, local_path, remote_url,
identity_files=None, port=None, fix_symlinks=False,
ssh_config_path=None, ssh_agent=False,
exclude_file=None, known_hosts_path=None,
delete=True, allow_unknown=False,
create_remote_directory=False,
):
"""Init the needed parameters and the SFTPClient."""
self.local_path = os.path.realpath(os.path.expanduser(local_path))
self.logger = logger or configure_logging()
self.create_remote_directory = create_remote_directory
if not os.path.exists(self.local_path):
self.logger.error("Local path MUST exist. Exiting.")
sys.exit(1)
if exclude_file:
with open(exclude_file) as f:
# As in rsync's exclude from, ignore lines with leading ; and #
# and treat each path as relative (thus by removing the leading
# /)
exclude_list = [
line.rstrip().lstrip("/")
for line in f
if not line.startswith((";", "#"))
]
# actually, is a set of excluded files
self.exclude_list = {
g
for pattern in exclude_list
for g in glob.glob(path_join(self.local_path, pattern))
}
else:
self.exclude_list = set()
username, password, hostname, self.remote_path = parse_username_password_hostname(remote_url)
identity_files = identity_files or []
proxy_command = None
if ssh_config_path:
try:
with open(os.path.expanduser(ssh_config_path)) as c_file:
ssh_config = paramiko.SSHConfig()
ssh_config.parse(c_file)
c = ssh_config.lookup(hostname)
hostname = c.get("hostname", hostname)
username = c.get("user", username)
port = int(c.get("port", port))
identity_files = c.get("identityfile", identity_files)
proxy_command = c.get("proxycommand")
except Exception as e:
# it could be safe to continue anyway,
# because parameters could have been manually specified
self.logger.error(
"Error while parsing ssh_config file: %s. Trying to continue anyway...", e
)
# Set default values
if not username:
username = getuser() # defaults to current user
port = port or 22
allow_unknown = allow_unknown or False
self.chown = False
self.fix_symlinks = fix_symlinks or False
self.delete = delete if delete is not None else True
if ssh_agent:
agent, agent_keys = get_ssh_agent_keys(self.logger)
else:
agent, agent_keys = None, None
if not identity_files and not password and not agent_keys:
self.logger.error(
"You need to specify either a password, an identity or to enable the ssh-agent support."
)
sys.exit(1)
# only root can change file owner
if username == 'root':
self.chown = True
sock = (hostname, port)
if proxy_command is not None:
sock = paramiko.proxy.ProxyCommand(proxy_command)
try:
transport = paramiko.Transport(sock)
except socket.gaierror:
self.logger.error(
"Hostname not known. Are you sure you inserted it correctly?")
sys.exit(1)
try:
ssh_host = hostname if port == 22 else "[{}]:{}".format(hostname, port)
known_hosts = None
"""
Before starting the transport session, we have to configure it.
Specifically, we need to configure the preferred PK algorithm.
If the system already knows a public key of a specific kind for
a remote host, we have to peek its type as the preferred one.
"""
if known_hosts_path:
known_hosts = paramiko.HostKeys()
known_hosts_path = os.path.realpath(
os.path.expanduser(known_hosts_path))
try:
known_hosts.load(known_hosts_path)
except IOError:
self.logger.error(
"Error while loading known hosts file at {}. Exiting...".format(
known_hosts_path)
)
sys.exit(1)
known_keys = known_hosts.lookup(ssh_host)
if known_keys is not None:
# one or more keys are already known
# set their type as preferred
transport.get_security_options().key_types = \
tuple(known_keys.keys())
transport.start_client()
if not known_hosts:
self.logger.warning("Security warning: skipping known hosts check...")
else:
pubk = transport.get_remote_server_key()
if ssh_host in known_hosts.keys():
if not known_hosts.check(ssh_host, pubk):
self.logger.error(
"Security warning: "
"remote key fingerprint {} for hostname "
"{} didn't match the one in known_hosts {}. "
"Exiting...".format(
pubk.get_base64(),
ssh_host,
known_hosts.lookup(hostname),
)
)
sys.exit(1)
elif not allow_unknown:
prompt = ("The authenticity of host '{}' can't be established.\n"
"{} key is {}.\n"
"Are you sure you want to continue connecting? [y/n] ").format(
ssh_host, pubk.get_name(), pubk.get_base64())
try:
# Renamed to `input` in Python 3.x
response = raw_input(prompt)
except NameError:
response = input(prompt)
# Note: we do not modify the user's known_hosts file
if not (response == "y" or response == "yes"):
self.logger.error(
"Host authentication failed."
)
sys.exit(1)
def perform_key_auth(pkey):
try:
transport.auth_publickey(
username=username,
key=pkey
)
return True
except paramiko.SSHException:
self.logger.warning(
"Authentication with identity {}... failed".format(pkey.get_base64()[:10])
)
return False
if password: # Password auth, if specified.
transport.auth_password(
username=username,
password=password
)
elif agent_keys: # SSH agent keys have higher priority
for pkey in agent_keys:
if perform_key_auth(pkey):
break # Authentication worked.
else: # None of the keys worked.
raise paramiko.SSHException
elif identity_files: # Then follow identity file (specified from CL or ssh_config)
# Try identity files one by one, until one works
for key_path in identity_files:
key_path = os.path.expanduser(key_path)
try:
key = paramiko.RSAKey.from_private_key_file(key_path)
except paramiko.PasswordRequiredException:
pk_password = getpass(
"It seems that your identity from '{}' is encrypted. "
"Please enter your password: ".format(key_path)
)
try:
key = paramiko.RSAKey.from_private_key_file(key_path, pk_password)
except paramiko.SSHException:
self.logger.error(
"Incorrect passphrase. Cannot decode private key from '{}'.".format(key_path)
)
continue
except IOError or paramiko.SSHException:
self.logger.error(
"Something went wrong while opening '{}'. Skipping it.".format(key_path)
)
continue
if perform_key_auth(key):
break # Authentication worked.
else: # None of the keys worked.
raise paramiko.SSHException
else: # No authentication method specified, we shouldn't arrive here.
assert False
except paramiko.SSHException:
self.logger.error(
"None of the provided authentication methods worked. Exiting."
)
transport.close()
sys.exit(1)
finally:
if agent:
agent.close()
self.sftp = paramiko.SFTPClient.from_transport(transport)
if self.remote_path.startswith("~"):
# nasty hack to let getcwd work without changing dir!
self.sftp.chdir('.')
self.remote_path = self.remote_path.replace(
"~", self.sftp.getcwd()) # home is the initial sftp dir
@staticmethod
def _file_need_upload(l_st, r_st):
return True if \
l_st.st_size != r_st.st_size or int(l_st.st_mtime) != r_st.st_mtime \
else False
@staticmethod
def _match_modes(self, remote_path, l_st):
"""Match mod, utime and uid/gid with locals one."""
self.sftp.chmod(remote_path, S_IMODE(l_st.st_mode))
self.sftp.utime(remote_path, (l_st.st_atime, l_st.st_mtime))
if self.chown:
self.sftp.chown(remote_path, l_st.st_uid, l_st.st_gid)
def file_upload(self, local_path, remote_path, l_st):
"""Upload local_path to remote_path and set permission and mtime."""
self.sftp.put(local_path, remote_path)
self._match_modes(remote_path, l_st)
def remote_delete(self, remote_path, r_st):
"""Remove the remote directory node."""
# If it's a directory, then delete content and directory
if S_ISDIR(r_st.st_mode):
for item in self.sftp.listdir_attr(remote_path):
full_path = path_join(remote_path, item.filename)
self.remote_delete(full_path, item)
self.sftp.rmdir(remote_path)
# Or simply delete files
else:
try:
self.sftp.remove(remote_path)
except FileNotFoundError as e:
self.logger.error(
"error while removing {}. trace: {}".format(remote_path, e)
)
def check_for_deletion(self, relative_path=None):
"""Traverse the entire remote_path tree.
Find files/directories that need to be deleted,
not being present in the local folder.
"""
if not relative_path:
relative_path = str() # root of shared directory tree
remote_path = path_join(self.remote_path, relative_path)
local_path = path_join(self.local_path, relative_path)
for remote_st in self.sftp.listdir_attr(remote_path):
r_lstat = self.sftp.lstat(path_join(remote_path, remote_st.filename))
inner_remote_path = path_join(remote_path, remote_st.filename)
inner_local_path = path_join(local_path, remote_st.filename)
# check if remote_st is a symlink
# otherwise could delete file outside shared directory
if S_ISLNK(r_lstat.st_mode):
if self._must_be_deleted(inner_local_path, r_lstat):
self.remote_delete(inner_remote_path, r_lstat)
continue
if self._must_be_deleted(inner_local_path, remote_st):
self.remote_delete(inner_remote_path, remote_st)
elif S_ISDIR(remote_st.st_mode):
self.check_for_deletion(
path_join(relative_path, remote_st.filename)
)
def create_update_symlink(self, link_destination, remote_path):
"""Create a new link pointing to link_destination in remote_path position."""
try: # if there's anything, delete it
self.sftp.remove(remote_path)
except IOError: # that's fine, nothing exists there!
pass
finally: # and recreate the link
try:
self.sftp.symlink(link_destination, remote_path)
except OSError as e:
# Sometimes, if links are "too" different, symlink fails.
# Sadly, nothing we can do about it.
self.logger.error("error while symlinking {} to {}: {}".format(
remote_path, link_destination, e))
def node_check_for_upload_create(self, relative_path, f):
"""Check if the given directory tree node has to be uploaded/created on the remote folder."""
if not relative_path:
# we're at the root of the shared directory tree
relative_path = str()
# the (absolute) local address of f.
local_path = path_join(self.local_path, relative_path, f)
try:
l_st = os.lstat(local_path)
except OSError as e:
"""A little background here.
Sometimes, in big clusters configurations (mail, etc.),
files could disappear or be moved, suddenly.
There's nothing to do about it,
system should be stopped before doing backups.
Anyway, we log it, and skip it.
"""
self.logger.error("error while checking {}: {}".format(relative_path, e))
return
if local_path in self.exclude_list:
self.logger.info("Skipping excluded file %s.", local_path)
return
# the (absolute) remote address of f.
remote_path = path_join(self.remote_path, relative_path, f)
# First case: f is a directory
if S_ISDIR(l_st.st_mode):
# we check if the folder exists on the remote side
# it has to be a folder, otherwise it would have already been
# deleted
try:
self.sftp.stat(remote_path)
except IOError: # it doesn't exist yet on remote side
self.sftp.mkdir(remote_path)
self._match_modes(remote_path, l_st)
# now, we should traverse f too (recursion magic!)
self.check_for_upload_create(path_join(relative_path, f))
# Second case: f is a symbolic link
elif S_ISLNK(l_st.st_mode):
# read the local link
local_link = os.readlink(local_path)
absolute_local_link = os.path.realpath(local_link)
# is it absolute?
is_absolute = local_link.startswith("/")
# and does it point inside the shared directory?
# add trailing slash (security)
trailing_local_path = path_join(self.local_path, '')
relpath = os.path.commonprefix(
[absolute_local_link,
trailing_local_path]
) == trailing_local_path
if relpath:
relative_link = absolute_local_link[len(trailing_local_path):]
else:
relative_link = None
"""
# Refactor them all, be efficient!
# Case A: absolute link pointing outside shared directory
# (we can only update the remote part)
if is_absolute and not relpath:
self.create_update_symlink(local_link, remote_path)
# Case B: absolute link pointing inside shared directory
# (we can leave it as it is or fix the prefix to match the one of the remote server)
elif is_absolute and relpath:
if self.fix_symlinks:
self.create_update_symlink(
join(
self.remote_path,
relative_link,
),
remote_path
)
else:
self.create_update_symlink(local_link, remote_path)
# Case C: relative link pointing outside shared directory
# (all we can do is try to make the link anyway)
elif not is_absolute and not relpath:
self.create_update_symlink(local_link, remote_path)
# Case D: relative link pointing inside shared directory
# (we preserve the relativity and link it!)
elif not is_absolute and relpath:
self.create_update_symlink(local_link, remote_path)
"""
if is_absolute and relpath:
if self.fix_symlinks:
self.create_update_symlink(
path_join(
self.remote_path,
relative_link,
),
remote_path
)
else:
self.create_update_symlink(local_link, remote_path)
# Third case: regular file
elif S_ISREG(l_st.st_mode):
try:
r_st = self.sftp.lstat(remote_path)
if self._file_need_upload(l_st, r_st):
self.file_upload(local_path, remote_path, l_st)
except IOError as e:
if e.errno == errno.ENOENT:
self.file_upload(local_path, remote_path, l_st)
# Anything else.
else:
self.logger.warning("Skipping unsupported file %s.", local_path)
def check_for_upload_create(self, relative_path=None):
"""Traverse the relative_path tree and check for files that need to be uploaded/created.
Relativity here refers to the shared directory tree."""
for f in os.listdir(
path_join(
self.local_path, relative_path) if relative_path else self.local_path
):
self.node_check_for_upload_create(relative_path, f)
def run(self):
"""Run the sync.
Confront the local and the remote directories and perform the needed changes."""
# Check if remote path is present
try:
self.sftp.stat(self.remote_path)
except FileNotFoundError as e:
if self.create_remote_directory:
self.sftp.mkdir(self.remote_path)
self.logger.info(
"Created missing remote dir: '" + self.remote_path + "'")
else:
self.logger.error(
"Remote folder does not exists. "
"Add '-r' to create it if missing.")
sys.exit(1)
try:
if self.delete:
# First check for items to be removed
self.check_for_deletion()
# Now scan local for items to upload/create
self.check_for_upload_create()
except FileNotFoundError:
# If this happens, probably the remote folder doesn't exist.
self.logger.error(
"Error while opening remote folder. Are you sure it does exist?")
sys.exit(1)
|
unbit/sftpclone | sftpclone/sftpclone.py | SFTPClone._match_modes | python | def _match_modes(self, remote_path, l_st):
self.sftp.chmod(remote_path, S_IMODE(l_st.st_mode))
self.sftp.utime(remote_path, (l_st.st_atime, l_st.st_mtime))
if self.chown:
self.sftp.chown(remote_path, l_st.st_uid, l_st.st_gid) | Match mod, utime and uid/gid with locals one. | train | https://github.com/unbit/sftpclone/blob/1cc89478e680fc4e0d12b1a15b5bafd0390d05da/sftpclone/sftpclone.py#L384-L390 | null | class SFTPClone(object):
"""The SFTPClone class."""
def __init__(self, local_path, remote_url,
identity_files=None, port=None, fix_symlinks=False,
ssh_config_path=None, ssh_agent=False,
exclude_file=None, known_hosts_path=None,
delete=True, allow_unknown=False,
create_remote_directory=False,
):
"""Init the needed parameters and the SFTPClient."""
self.local_path = os.path.realpath(os.path.expanduser(local_path))
self.logger = logger or configure_logging()
self.create_remote_directory = create_remote_directory
if not os.path.exists(self.local_path):
self.logger.error("Local path MUST exist. Exiting.")
sys.exit(1)
if exclude_file:
with open(exclude_file) as f:
# As in rsync's exclude from, ignore lines with leading ; and #
# and treat each path as relative (thus by removing the leading
# /)
exclude_list = [
line.rstrip().lstrip("/")
for line in f
if not line.startswith((";", "#"))
]
# actually, is a set of excluded files
self.exclude_list = {
g
for pattern in exclude_list
for g in glob.glob(path_join(self.local_path, pattern))
}
else:
self.exclude_list = set()
username, password, hostname, self.remote_path = parse_username_password_hostname(remote_url)
identity_files = identity_files or []
proxy_command = None
if ssh_config_path:
try:
with open(os.path.expanduser(ssh_config_path)) as c_file:
ssh_config = paramiko.SSHConfig()
ssh_config.parse(c_file)
c = ssh_config.lookup(hostname)
hostname = c.get("hostname", hostname)
username = c.get("user", username)
port = int(c.get("port", port))
identity_files = c.get("identityfile", identity_files)
proxy_command = c.get("proxycommand")
except Exception as e:
# it could be safe to continue anyway,
# because parameters could have been manually specified
self.logger.error(
"Error while parsing ssh_config file: %s. Trying to continue anyway...", e
)
# Set default values
if not username:
username = getuser() # defaults to current user
port = port or 22
allow_unknown = allow_unknown or False
self.chown = False
self.fix_symlinks = fix_symlinks or False
self.delete = delete if delete is not None else True
if ssh_agent:
agent, agent_keys = get_ssh_agent_keys(self.logger)
else:
agent, agent_keys = None, None
if not identity_files and not password and not agent_keys:
self.logger.error(
"You need to specify either a password, an identity or to enable the ssh-agent support."
)
sys.exit(1)
# only root can change file owner
if username == 'root':
self.chown = True
sock = (hostname, port)
if proxy_command is not None:
sock = paramiko.proxy.ProxyCommand(proxy_command)
try:
transport = paramiko.Transport(sock)
except socket.gaierror:
self.logger.error(
"Hostname not known. Are you sure you inserted it correctly?")
sys.exit(1)
try:
ssh_host = hostname if port == 22 else "[{}]:{}".format(hostname, port)
known_hosts = None
"""
Before starting the transport session, we have to configure it.
Specifically, we need to configure the preferred PK algorithm.
If the system already knows a public key of a specific kind for
a remote host, we have to peek its type as the preferred one.
"""
if known_hosts_path:
known_hosts = paramiko.HostKeys()
known_hosts_path = os.path.realpath(
os.path.expanduser(known_hosts_path))
try:
known_hosts.load(known_hosts_path)
except IOError:
self.logger.error(
"Error while loading known hosts file at {}. Exiting...".format(
known_hosts_path)
)
sys.exit(1)
known_keys = known_hosts.lookup(ssh_host)
if known_keys is not None:
# one or more keys are already known
# set their type as preferred
transport.get_security_options().key_types = \
tuple(known_keys.keys())
transport.start_client()
if not known_hosts:
self.logger.warning("Security warning: skipping known hosts check...")
else:
pubk = transport.get_remote_server_key()
if ssh_host in known_hosts.keys():
if not known_hosts.check(ssh_host, pubk):
self.logger.error(
"Security warning: "
"remote key fingerprint {} for hostname "
"{} didn't match the one in known_hosts {}. "
"Exiting...".format(
pubk.get_base64(),
ssh_host,
known_hosts.lookup(hostname),
)
)
sys.exit(1)
elif not allow_unknown:
prompt = ("The authenticity of host '{}' can't be established.\n"
"{} key is {}.\n"
"Are you sure you want to continue connecting? [y/n] ").format(
ssh_host, pubk.get_name(), pubk.get_base64())
try:
# Renamed to `input` in Python 3.x
response = raw_input(prompt)
except NameError:
response = input(prompt)
# Note: we do not modify the user's known_hosts file
if not (response == "y" or response == "yes"):
self.logger.error(
"Host authentication failed."
)
sys.exit(1)
def perform_key_auth(pkey):
try:
transport.auth_publickey(
username=username,
key=pkey
)
return True
except paramiko.SSHException:
self.logger.warning(
"Authentication with identity {}... failed".format(pkey.get_base64()[:10])
)
return False
if password: # Password auth, if specified.
transport.auth_password(
username=username,
password=password
)
elif agent_keys: # SSH agent keys have higher priority
for pkey in agent_keys:
if perform_key_auth(pkey):
break # Authentication worked.
else: # None of the keys worked.
raise paramiko.SSHException
elif identity_files: # Then follow identity file (specified from CL or ssh_config)
# Try identity files one by one, until one works
for key_path in identity_files:
key_path = os.path.expanduser(key_path)
try:
key = paramiko.RSAKey.from_private_key_file(key_path)
except paramiko.PasswordRequiredException:
pk_password = getpass(
"It seems that your identity from '{}' is encrypted. "
"Please enter your password: ".format(key_path)
)
try:
key = paramiko.RSAKey.from_private_key_file(key_path, pk_password)
except paramiko.SSHException:
self.logger.error(
"Incorrect passphrase. Cannot decode private key from '{}'.".format(key_path)
)
continue
except IOError or paramiko.SSHException:
self.logger.error(
"Something went wrong while opening '{}'. Skipping it.".format(key_path)
)
continue
if perform_key_auth(key):
break # Authentication worked.
else: # None of the keys worked.
raise paramiko.SSHException
else: # No authentication method specified, we shouldn't arrive here.
assert False
except paramiko.SSHException:
self.logger.error(
"None of the provided authentication methods worked. Exiting."
)
transport.close()
sys.exit(1)
finally:
if agent:
agent.close()
self.sftp = paramiko.SFTPClient.from_transport(transport)
if self.remote_path.startswith("~"):
# nasty hack to let getcwd work without changing dir!
self.sftp.chdir('.')
self.remote_path = self.remote_path.replace(
"~", self.sftp.getcwd()) # home is the initial sftp dir
@staticmethod
def _file_need_upload(l_st, r_st):
return True if \
l_st.st_size != r_st.st_size or int(l_st.st_mtime) != r_st.st_mtime \
else False
@staticmethod
def _must_be_deleted(local_path, r_st):
"""Return True if the remote correspondent of local_path has to be deleted.
i.e. if it doesn't exists locally or if it has a different type from the remote one."""
# if the file doesn't exists
if not os.path.lexists(local_path):
return True
# or if the file type is different
l_st = os.lstat(local_path)
if S_IFMT(r_st.st_mode) != S_IFMT(l_st.st_mode):
return True
return False
def file_upload(self, local_path, remote_path, l_st):
"""Upload local_path to remote_path and set permission and mtime."""
self.sftp.put(local_path, remote_path)
self._match_modes(remote_path, l_st)
def remote_delete(self, remote_path, r_st):
"""Remove the remote directory node."""
# If it's a directory, then delete content and directory
if S_ISDIR(r_st.st_mode):
for item in self.sftp.listdir_attr(remote_path):
full_path = path_join(remote_path, item.filename)
self.remote_delete(full_path, item)
self.sftp.rmdir(remote_path)
# Or simply delete files
else:
try:
self.sftp.remove(remote_path)
except FileNotFoundError as e:
self.logger.error(
"error while removing {}. trace: {}".format(remote_path, e)
)
def check_for_deletion(self, relative_path=None):
"""Traverse the entire remote_path tree.
Find files/directories that need to be deleted,
not being present in the local folder.
"""
if not relative_path:
relative_path = str() # root of shared directory tree
remote_path = path_join(self.remote_path, relative_path)
local_path = path_join(self.local_path, relative_path)
for remote_st in self.sftp.listdir_attr(remote_path):
r_lstat = self.sftp.lstat(path_join(remote_path, remote_st.filename))
inner_remote_path = path_join(remote_path, remote_st.filename)
inner_local_path = path_join(local_path, remote_st.filename)
# check if remote_st is a symlink
# otherwise could delete file outside shared directory
if S_ISLNK(r_lstat.st_mode):
if self._must_be_deleted(inner_local_path, r_lstat):
self.remote_delete(inner_remote_path, r_lstat)
continue
if self._must_be_deleted(inner_local_path, remote_st):
self.remote_delete(inner_remote_path, remote_st)
elif S_ISDIR(remote_st.st_mode):
self.check_for_deletion(
path_join(relative_path, remote_st.filename)
)
def create_update_symlink(self, link_destination, remote_path):
"""Create a new link pointing to link_destination in remote_path position."""
try: # if there's anything, delete it
self.sftp.remove(remote_path)
except IOError: # that's fine, nothing exists there!
pass
finally: # and recreate the link
try:
self.sftp.symlink(link_destination, remote_path)
except OSError as e:
# Sometimes, if links are "too" different, symlink fails.
# Sadly, nothing we can do about it.
self.logger.error("error while symlinking {} to {}: {}".format(
remote_path, link_destination, e))
def node_check_for_upload_create(self, relative_path, f):
"""Check if the given directory tree node has to be uploaded/created on the remote folder."""
if not relative_path:
# we're at the root of the shared directory tree
relative_path = str()
# the (absolute) local address of f.
local_path = path_join(self.local_path, relative_path, f)
try:
l_st = os.lstat(local_path)
except OSError as e:
"""A little background here.
Sometimes, in big clusters configurations (mail, etc.),
files could disappear or be moved, suddenly.
There's nothing to do about it,
system should be stopped before doing backups.
Anyway, we log it, and skip it.
"""
self.logger.error("error while checking {}: {}".format(relative_path, e))
return
if local_path in self.exclude_list:
self.logger.info("Skipping excluded file %s.", local_path)
return
# the (absolute) remote address of f.
remote_path = path_join(self.remote_path, relative_path, f)
# First case: f is a directory
if S_ISDIR(l_st.st_mode):
# we check if the folder exists on the remote side
# it has to be a folder, otherwise it would have already been
# deleted
try:
self.sftp.stat(remote_path)
except IOError: # it doesn't exist yet on remote side
self.sftp.mkdir(remote_path)
self._match_modes(remote_path, l_st)
# now, we should traverse f too (recursion magic!)
self.check_for_upload_create(path_join(relative_path, f))
# Second case: f is a symbolic link
elif S_ISLNK(l_st.st_mode):
# read the local link
local_link = os.readlink(local_path)
absolute_local_link = os.path.realpath(local_link)
# is it absolute?
is_absolute = local_link.startswith("/")
# and does it point inside the shared directory?
# add trailing slash (security)
trailing_local_path = path_join(self.local_path, '')
relpath = os.path.commonprefix(
[absolute_local_link,
trailing_local_path]
) == trailing_local_path
if relpath:
relative_link = absolute_local_link[len(trailing_local_path):]
else:
relative_link = None
"""
# Refactor them all, be efficient!
# Case A: absolute link pointing outside shared directory
# (we can only update the remote part)
if is_absolute and not relpath:
self.create_update_symlink(local_link, remote_path)
# Case B: absolute link pointing inside shared directory
# (we can leave it as it is or fix the prefix to match the one of the remote server)
elif is_absolute and relpath:
if self.fix_symlinks:
self.create_update_symlink(
join(
self.remote_path,
relative_link,
),
remote_path
)
else:
self.create_update_symlink(local_link, remote_path)
# Case C: relative link pointing outside shared directory
# (all we can do is try to make the link anyway)
elif not is_absolute and not relpath:
self.create_update_symlink(local_link, remote_path)
# Case D: relative link pointing inside shared directory
# (we preserve the relativity and link it!)
elif not is_absolute and relpath:
self.create_update_symlink(local_link, remote_path)
"""
if is_absolute and relpath:
if self.fix_symlinks:
self.create_update_symlink(
path_join(
self.remote_path,
relative_link,
),
remote_path
)
else:
self.create_update_symlink(local_link, remote_path)
# Third case: regular file
elif S_ISREG(l_st.st_mode):
try:
r_st = self.sftp.lstat(remote_path)
if self._file_need_upload(l_st, r_st):
self.file_upload(local_path, remote_path, l_st)
except IOError as e:
if e.errno == errno.ENOENT:
self.file_upload(local_path, remote_path, l_st)
# Anything else.
else:
self.logger.warning("Skipping unsupported file %s.", local_path)
def check_for_upload_create(self, relative_path=None):
"""Traverse the relative_path tree and check for files that need to be uploaded/created.
Relativity here refers to the shared directory tree."""
for f in os.listdir(
path_join(
self.local_path, relative_path) if relative_path else self.local_path
):
self.node_check_for_upload_create(relative_path, f)
def run(self):
"""Run the sync.
Confront the local and the remote directories and perform the needed changes."""
# Check if remote path is present
try:
self.sftp.stat(self.remote_path)
except FileNotFoundError as e:
if self.create_remote_directory:
self.sftp.mkdir(self.remote_path)
self.logger.info(
"Created missing remote dir: '" + self.remote_path + "'")
else:
self.logger.error(
"Remote folder does not exists. "
"Add '-r' to create it if missing.")
sys.exit(1)
try:
if self.delete:
# First check for items to be removed
self.check_for_deletion()
# Now scan local for items to upload/create
self.check_for_upload_create()
except FileNotFoundError:
# If this happens, probably the remote folder doesn't exist.
self.logger.error(
"Error while opening remote folder. Are you sure it does exist?")
sys.exit(1)
|
unbit/sftpclone | sftpclone/sftpclone.py | SFTPClone.file_upload | python | def file_upload(self, local_path, remote_path, l_st):
self.sftp.put(local_path, remote_path)
self._match_modes(remote_path, l_st) | Upload local_path to remote_path and set permission and mtime. | train | https://github.com/unbit/sftpclone/blob/1cc89478e680fc4e0d12b1a15b5bafd0390d05da/sftpclone/sftpclone.py#L392-L395 | [
"def _match_modes(self, remote_path, l_st):\n \"\"\"Match mod, utime and uid/gid with locals one.\"\"\"\n self.sftp.chmod(remote_path, S_IMODE(l_st.st_mode))\n self.sftp.utime(remote_path, (l_st.st_atime, l_st.st_mtime))\n\n if self.chown:\n self.sftp.chown(remote_path, l_st.st_uid, l_st.st_gid)\n"
] | class SFTPClone(object):
"""The SFTPClone class."""
def __init__(self, local_path, remote_url,
identity_files=None, port=None, fix_symlinks=False,
ssh_config_path=None, ssh_agent=False,
exclude_file=None, known_hosts_path=None,
delete=True, allow_unknown=False,
create_remote_directory=False,
):
"""Init the needed parameters and the SFTPClient."""
self.local_path = os.path.realpath(os.path.expanduser(local_path))
self.logger = logger or configure_logging()
self.create_remote_directory = create_remote_directory
if not os.path.exists(self.local_path):
self.logger.error("Local path MUST exist. Exiting.")
sys.exit(1)
if exclude_file:
with open(exclude_file) as f:
# As in rsync's exclude from, ignore lines with leading ; and #
# and treat each path as relative (thus by removing the leading
# /)
exclude_list = [
line.rstrip().lstrip("/")
for line in f
if not line.startswith((";", "#"))
]
# actually, is a set of excluded files
self.exclude_list = {
g
for pattern in exclude_list
for g in glob.glob(path_join(self.local_path, pattern))
}
else:
self.exclude_list = set()
username, password, hostname, self.remote_path = parse_username_password_hostname(remote_url)
identity_files = identity_files or []
proxy_command = None
if ssh_config_path:
try:
with open(os.path.expanduser(ssh_config_path)) as c_file:
ssh_config = paramiko.SSHConfig()
ssh_config.parse(c_file)
c = ssh_config.lookup(hostname)
hostname = c.get("hostname", hostname)
username = c.get("user", username)
port = int(c.get("port", port))
identity_files = c.get("identityfile", identity_files)
proxy_command = c.get("proxycommand")
except Exception as e:
# it could be safe to continue anyway,
# because parameters could have been manually specified
self.logger.error(
"Error while parsing ssh_config file: %s. Trying to continue anyway...", e
)
# Set default values
if not username:
username = getuser() # defaults to current user
port = port or 22
allow_unknown = allow_unknown or False
self.chown = False
self.fix_symlinks = fix_symlinks or False
self.delete = delete if delete is not None else True
if ssh_agent:
agent, agent_keys = get_ssh_agent_keys(self.logger)
else:
agent, agent_keys = None, None
if not identity_files and not password and not agent_keys:
self.logger.error(
"You need to specify either a password, an identity or to enable the ssh-agent support."
)
sys.exit(1)
# only root can change file owner
if username == 'root':
self.chown = True
sock = (hostname, port)
if proxy_command is not None:
sock = paramiko.proxy.ProxyCommand(proxy_command)
try:
transport = paramiko.Transport(sock)
except socket.gaierror:
self.logger.error(
"Hostname not known. Are you sure you inserted it correctly?")
sys.exit(1)
try:
ssh_host = hostname if port == 22 else "[{}]:{}".format(hostname, port)
known_hosts = None
"""
Before starting the transport session, we have to configure it.
Specifically, we need to configure the preferred PK algorithm.
If the system already knows a public key of a specific kind for
a remote host, we have to peek its type as the preferred one.
"""
if known_hosts_path:
known_hosts = paramiko.HostKeys()
known_hosts_path = os.path.realpath(
os.path.expanduser(known_hosts_path))
try:
known_hosts.load(known_hosts_path)
except IOError:
self.logger.error(
"Error while loading known hosts file at {}. Exiting...".format(
known_hosts_path)
)
sys.exit(1)
known_keys = known_hosts.lookup(ssh_host)
if known_keys is not None:
# one or more keys are already known
# set their type as preferred
transport.get_security_options().key_types = \
tuple(known_keys.keys())
transport.start_client()
if not known_hosts:
self.logger.warning("Security warning: skipping known hosts check...")
else:
pubk = transport.get_remote_server_key()
if ssh_host in known_hosts.keys():
if not known_hosts.check(ssh_host, pubk):
self.logger.error(
"Security warning: "
"remote key fingerprint {} for hostname "
"{} didn't match the one in known_hosts {}. "
"Exiting...".format(
pubk.get_base64(),
ssh_host,
known_hosts.lookup(hostname),
)
)
sys.exit(1)
elif not allow_unknown:
prompt = ("The authenticity of host '{}' can't be established.\n"
"{} key is {}.\n"
"Are you sure you want to continue connecting? [y/n] ").format(
ssh_host, pubk.get_name(), pubk.get_base64())
try:
# Renamed to `input` in Python 3.x
response = raw_input(prompt)
except NameError:
response = input(prompt)
# Note: we do not modify the user's known_hosts file
if not (response == "y" or response == "yes"):
self.logger.error(
"Host authentication failed."
)
sys.exit(1)
def perform_key_auth(pkey):
try:
transport.auth_publickey(
username=username,
key=pkey
)
return True
except paramiko.SSHException:
self.logger.warning(
"Authentication with identity {}... failed".format(pkey.get_base64()[:10])
)
return False
if password: # Password auth, if specified.
transport.auth_password(
username=username,
password=password
)
elif agent_keys: # SSH agent keys have higher priority
for pkey in agent_keys:
if perform_key_auth(pkey):
break # Authentication worked.
else: # None of the keys worked.
raise paramiko.SSHException
elif identity_files: # Then follow identity file (specified from CL or ssh_config)
# Try identity files one by one, until one works
for key_path in identity_files:
key_path = os.path.expanduser(key_path)
try:
key = paramiko.RSAKey.from_private_key_file(key_path)
except paramiko.PasswordRequiredException:
pk_password = getpass(
"It seems that your identity from '{}' is encrypted. "
"Please enter your password: ".format(key_path)
)
try:
key = paramiko.RSAKey.from_private_key_file(key_path, pk_password)
except paramiko.SSHException:
self.logger.error(
"Incorrect passphrase. Cannot decode private key from '{}'.".format(key_path)
)
continue
except IOError or paramiko.SSHException:
self.logger.error(
"Something went wrong while opening '{}'. Skipping it.".format(key_path)
)
continue
if perform_key_auth(key):
break # Authentication worked.
else: # None of the keys worked.
raise paramiko.SSHException
else: # No authentication method specified, we shouldn't arrive here.
assert False
except paramiko.SSHException:
self.logger.error(
"None of the provided authentication methods worked. Exiting."
)
transport.close()
sys.exit(1)
finally:
if agent:
agent.close()
self.sftp = paramiko.SFTPClient.from_transport(transport)
if self.remote_path.startswith("~"):
# nasty hack to let getcwd work without changing dir!
self.sftp.chdir('.')
self.remote_path = self.remote_path.replace(
"~", self.sftp.getcwd()) # home is the initial sftp dir
@staticmethod
def _file_need_upload(l_st, r_st):
return True if \
l_st.st_size != r_st.st_size or int(l_st.st_mtime) != r_st.st_mtime \
else False
@staticmethod
def _must_be_deleted(local_path, r_st):
"""Return True if the remote correspondent of local_path has to be deleted.
i.e. if it doesn't exists locally or if it has a different type from the remote one."""
# if the file doesn't exists
if not os.path.lexists(local_path):
return True
# or if the file type is different
l_st = os.lstat(local_path)
if S_IFMT(r_st.st_mode) != S_IFMT(l_st.st_mode):
return True
return False
def _match_modes(self, remote_path, l_st):
"""Match mod, utime and uid/gid with locals one."""
self.sftp.chmod(remote_path, S_IMODE(l_st.st_mode))
self.sftp.utime(remote_path, (l_st.st_atime, l_st.st_mtime))
if self.chown:
self.sftp.chown(remote_path, l_st.st_uid, l_st.st_gid)
def remote_delete(self, remote_path, r_st):
"""Remove the remote directory node."""
# If it's a directory, then delete content and directory
if S_ISDIR(r_st.st_mode):
for item in self.sftp.listdir_attr(remote_path):
full_path = path_join(remote_path, item.filename)
self.remote_delete(full_path, item)
self.sftp.rmdir(remote_path)
# Or simply delete files
else:
try:
self.sftp.remove(remote_path)
except FileNotFoundError as e:
self.logger.error(
"error while removing {}. trace: {}".format(remote_path, e)
)
def check_for_deletion(self, relative_path=None):
"""Traverse the entire remote_path tree.
Find files/directories that need to be deleted,
not being present in the local folder.
"""
if not relative_path:
relative_path = str() # root of shared directory tree
remote_path = path_join(self.remote_path, relative_path)
local_path = path_join(self.local_path, relative_path)
for remote_st in self.sftp.listdir_attr(remote_path):
r_lstat = self.sftp.lstat(path_join(remote_path, remote_st.filename))
inner_remote_path = path_join(remote_path, remote_st.filename)
inner_local_path = path_join(local_path, remote_st.filename)
# check if remote_st is a symlink
# otherwise could delete file outside shared directory
if S_ISLNK(r_lstat.st_mode):
if self._must_be_deleted(inner_local_path, r_lstat):
self.remote_delete(inner_remote_path, r_lstat)
continue
if self._must_be_deleted(inner_local_path, remote_st):
self.remote_delete(inner_remote_path, remote_st)
elif S_ISDIR(remote_st.st_mode):
self.check_for_deletion(
path_join(relative_path, remote_st.filename)
)
def create_update_symlink(self, link_destination, remote_path):
"""Create a new link pointing to link_destination in remote_path position."""
try: # if there's anything, delete it
self.sftp.remove(remote_path)
except IOError: # that's fine, nothing exists there!
pass
finally: # and recreate the link
try:
self.sftp.symlink(link_destination, remote_path)
except OSError as e:
# Sometimes, if links are "too" different, symlink fails.
# Sadly, nothing we can do about it.
self.logger.error("error while symlinking {} to {}: {}".format(
remote_path, link_destination, e))
def node_check_for_upload_create(self, relative_path, f):
"""Check if the given directory tree node has to be uploaded/created on the remote folder."""
if not relative_path:
# we're at the root of the shared directory tree
relative_path = str()
# the (absolute) local address of f.
local_path = path_join(self.local_path, relative_path, f)
try:
l_st = os.lstat(local_path)
except OSError as e:
"""A little background here.
Sometimes, in big clusters configurations (mail, etc.),
files could disappear or be moved, suddenly.
There's nothing to do about it,
system should be stopped before doing backups.
Anyway, we log it, and skip it.
"""
self.logger.error("error while checking {}: {}".format(relative_path, e))
return
if local_path in self.exclude_list:
self.logger.info("Skipping excluded file %s.", local_path)
return
# the (absolute) remote address of f.
remote_path = path_join(self.remote_path, relative_path, f)
# First case: f is a directory
if S_ISDIR(l_st.st_mode):
# we check if the folder exists on the remote side
# it has to be a folder, otherwise it would have already been
# deleted
try:
self.sftp.stat(remote_path)
except IOError: # it doesn't exist yet on remote side
self.sftp.mkdir(remote_path)
self._match_modes(remote_path, l_st)
# now, we should traverse f too (recursion magic!)
self.check_for_upload_create(path_join(relative_path, f))
# Second case: f is a symbolic link
elif S_ISLNK(l_st.st_mode):
# read the local link
local_link = os.readlink(local_path)
absolute_local_link = os.path.realpath(local_link)
# is it absolute?
is_absolute = local_link.startswith("/")
# and does it point inside the shared directory?
# add trailing slash (security)
trailing_local_path = path_join(self.local_path, '')
relpath = os.path.commonprefix(
[absolute_local_link,
trailing_local_path]
) == trailing_local_path
if relpath:
relative_link = absolute_local_link[len(trailing_local_path):]
else:
relative_link = None
"""
# Refactor them all, be efficient!
# Case A: absolute link pointing outside shared directory
# (we can only update the remote part)
if is_absolute and not relpath:
self.create_update_symlink(local_link, remote_path)
# Case B: absolute link pointing inside shared directory
# (we can leave it as it is or fix the prefix to match the one of the remote server)
elif is_absolute and relpath:
if self.fix_symlinks:
self.create_update_symlink(
join(
self.remote_path,
relative_link,
),
remote_path
)
else:
self.create_update_symlink(local_link, remote_path)
# Case C: relative link pointing outside shared directory
# (all we can do is try to make the link anyway)
elif not is_absolute and not relpath:
self.create_update_symlink(local_link, remote_path)
# Case D: relative link pointing inside shared directory
# (we preserve the relativity and link it!)
elif not is_absolute and relpath:
self.create_update_symlink(local_link, remote_path)
"""
if is_absolute and relpath:
if self.fix_symlinks:
self.create_update_symlink(
path_join(
self.remote_path,
relative_link,
),
remote_path
)
else:
self.create_update_symlink(local_link, remote_path)
# Third case: regular file
elif S_ISREG(l_st.st_mode):
try:
r_st = self.sftp.lstat(remote_path)
if self._file_need_upload(l_st, r_st):
self.file_upload(local_path, remote_path, l_st)
except IOError as e:
if e.errno == errno.ENOENT:
self.file_upload(local_path, remote_path, l_st)
# Anything else.
else:
self.logger.warning("Skipping unsupported file %s.", local_path)
def check_for_upload_create(self, relative_path=None):
"""Traverse the relative_path tree and check for files that need to be uploaded/created.
Relativity here refers to the shared directory tree."""
for f in os.listdir(
path_join(
self.local_path, relative_path) if relative_path else self.local_path
):
self.node_check_for_upload_create(relative_path, f)
def run(self):
"""Run the sync.
Confront the local and the remote directories and perform the needed changes."""
# Check if remote path is present
try:
self.sftp.stat(self.remote_path)
except FileNotFoundError as e:
if self.create_remote_directory:
self.sftp.mkdir(self.remote_path)
self.logger.info(
"Created missing remote dir: '" + self.remote_path + "'")
else:
self.logger.error(
"Remote folder does not exists. "
"Add '-r' to create it if missing.")
sys.exit(1)
try:
if self.delete:
# First check for items to be removed
self.check_for_deletion()
# Now scan local for items to upload/create
self.check_for_upload_create()
except FileNotFoundError:
# If this happens, probably the remote folder doesn't exist.
self.logger.error(
"Error while opening remote folder. Are you sure it does exist?")
sys.exit(1)
|
unbit/sftpclone | sftpclone/sftpclone.py | SFTPClone.remote_delete | python | def remote_delete(self, remote_path, r_st):
# If it's a directory, then delete content and directory
if S_ISDIR(r_st.st_mode):
for item in self.sftp.listdir_attr(remote_path):
full_path = path_join(remote_path, item.filename)
self.remote_delete(full_path, item)
self.sftp.rmdir(remote_path)
# Or simply delete files
else:
try:
self.sftp.remove(remote_path)
except FileNotFoundError as e:
self.logger.error(
"error while removing {}. trace: {}".format(remote_path, e)
) | Remove the remote directory node. | train | https://github.com/unbit/sftpclone/blob/1cc89478e680fc4e0d12b1a15b5bafd0390d05da/sftpclone/sftpclone.py#L397-L413 | [
"def path_join(*args):\n \"\"\"\n Wrapper around `os.path.join`.\n Makes sure to join paths of the same type (bytes).\n \"\"\"\n args = (paramiko.py3compat.u(arg) for arg in args)\n return os.path.join(*args)\n",
"def remote_delete(self, remote_path, r_st):\n \"\"\"Remove the remote directory node.\"\"\"\n # If it's a directory, then delete content and directory\n if S_ISDIR(r_st.st_mode):\n for item in self.sftp.listdir_attr(remote_path):\n full_path = path_join(remote_path, item.filename)\n self.remote_delete(full_path, item)\n self.sftp.rmdir(remote_path)\n\n # Or simply delete files\n else:\n try:\n self.sftp.remove(remote_path)\n except FileNotFoundError as e:\n self.logger.error(\n \"error while removing {}. trace: {}\".format(remote_path, e)\n )\n"
] | class SFTPClone(object):
"""The SFTPClone class."""
def __init__(self, local_path, remote_url,
identity_files=None, port=None, fix_symlinks=False,
ssh_config_path=None, ssh_agent=False,
exclude_file=None, known_hosts_path=None,
delete=True, allow_unknown=False,
create_remote_directory=False,
):
"""Init the needed parameters and the SFTPClient."""
self.local_path = os.path.realpath(os.path.expanduser(local_path))
self.logger = logger or configure_logging()
self.create_remote_directory = create_remote_directory
if not os.path.exists(self.local_path):
self.logger.error("Local path MUST exist. Exiting.")
sys.exit(1)
if exclude_file:
with open(exclude_file) as f:
# As in rsync's exclude from, ignore lines with leading ; and #
# and treat each path as relative (thus by removing the leading
# /)
exclude_list = [
line.rstrip().lstrip("/")
for line in f
if not line.startswith((";", "#"))
]
# actually, is a set of excluded files
self.exclude_list = {
g
for pattern in exclude_list
for g in glob.glob(path_join(self.local_path, pattern))
}
else:
self.exclude_list = set()
username, password, hostname, self.remote_path = parse_username_password_hostname(remote_url)
identity_files = identity_files or []
proxy_command = None
if ssh_config_path:
try:
with open(os.path.expanduser(ssh_config_path)) as c_file:
ssh_config = paramiko.SSHConfig()
ssh_config.parse(c_file)
c = ssh_config.lookup(hostname)
hostname = c.get("hostname", hostname)
username = c.get("user", username)
port = int(c.get("port", port))
identity_files = c.get("identityfile", identity_files)
proxy_command = c.get("proxycommand")
except Exception as e:
# it could be safe to continue anyway,
# because parameters could have been manually specified
self.logger.error(
"Error while parsing ssh_config file: %s. Trying to continue anyway...", e
)
# Set default values
if not username:
username = getuser() # defaults to current user
port = port or 22
allow_unknown = allow_unknown or False
self.chown = False
self.fix_symlinks = fix_symlinks or False
self.delete = delete if delete is not None else True
if ssh_agent:
agent, agent_keys = get_ssh_agent_keys(self.logger)
else:
agent, agent_keys = None, None
if not identity_files and not password and not agent_keys:
self.logger.error(
"You need to specify either a password, an identity or to enable the ssh-agent support."
)
sys.exit(1)
# only root can change file owner
if username == 'root':
self.chown = True
sock = (hostname, port)
if proxy_command is not None:
sock = paramiko.proxy.ProxyCommand(proxy_command)
try:
transport = paramiko.Transport(sock)
except socket.gaierror:
self.logger.error(
"Hostname not known. Are you sure you inserted it correctly?")
sys.exit(1)
try:
ssh_host = hostname if port == 22 else "[{}]:{}".format(hostname, port)
known_hosts = None
"""
Before starting the transport session, we have to configure it.
Specifically, we need to configure the preferred PK algorithm.
If the system already knows a public key of a specific kind for
a remote host, we have to peek its type as the preferred one.
"""
if known_hosts_path:
known_hosts = paramiko.HostKeys()
known_hosts_path = os.path.realpath(
os.path.expanduser(known_hosts_path))
try:
known_hosts.load(known_hosts_path)
except IOError:
self.logger.error(
"Error while loading known hosts file at {}. Exiting...".format(
known_hosts_path)
)
sys.exit(1)
known_keys = known_hosts.lookup(ssh_host)
if known_keys is not None:
# one or more keys are already known
# set their type as preferred
transport.get_security_options().key_types = \
tuple(known_keys.keys())
transport.start_client()
if not known_hosts:
self.logger.warning("Security warning: skipping known hosts check...")
else:
pubk = transport.get_remote_server_key()
if ssh_host in known_hosts.keys():
if not known_hosts.check(ssh_host, pubk):
self.logger.error(
"Security warning: "
"remote key fingerprint {} for hostname "
"{} didn't match the one in known_hosts {}. "
"Exiting...".format(
pubk.get_base64(),
ssh_host,
known_hosts.lookup(hostname),
)
)
sys.exit(1)
elif not allow_unknown:
prompt = ("The authenticity of host '{}' can't be established.\n"
"{} key is {}.\n"
"Are you sure you want to continue connecting? [y/n] ").format(
ssh_host, pubk.get_name(), pubk.get_base64())
try:
# Renamed to `input` in Python 3.x
response = raw_input(prompt)
except NameError:
response = input(prompt)
# Note: we do not modify the user's known_hosts file
if not (response == "y" or response == "yes"):
self.logger.error(
"Host authentication failed."
)
sys.exit(1)
def perform_key_auth(pkey):
try:
transport.auth_publickey(
username=username,
key=pkey
)
return True
except paramiko.SSHException:
self.logger.warning(
"Authentication with identity {}... failed".format(pkey.get_base64()[:10])
)
return False
if password: # Password auth, if specified.
transport.auth_password(
username=username,
password=password
)
elif agent_keys: # SSH agent keys have higher priority
for pkey in agent_keys:
if perform_key_auth(pkey):
break # Authentication worked.
else: # None of the keys worked.
raise paramiko.SSHException
elif identity_files: # Then follow identity file (specified from CL or ssh_config)
# Try identity files one by one, until one works
for key_path in identity_files:
key_path = os.path.expanduser(key_path)
try:
key = paramiko.RSAKey.from_private_key_file(key_path)
except paramiko.PasswordRequiredException:
pk_password = getpass(
"It seems that your identity from '{}' is encrypted. "
"Please enter your password: ".format(key_path)
)
try:
key = paramiko.RSAKey.from_private_key_file(key_path, pk_password)
except paramiko.SSHException:
self.logger.error(
"Incorrect passphrase. Cannot decode private key from '{}'.".format(key_path)
)
continue
except IOError or paramiko.SSHException:
self.logger.error(
"Something went wrong while opening '{}'. Skipping it.".format(key_path)
)
continue
if perform_key_auth(key):
break # Authentication worked.
else: # None of the keys worked.
raise paramiko.SSHException
else: # No authentication method specified, we shouldn't arrive here.
assert False
except paramiko.SSHException:
self.logger.error(
"None of the provided authentication methods worked. Exiting."
)
transport.close()
sys.exit(1)
finally:
if agent:
agent.close()
self.sftp = paramiko.SFTPClient.from_transport(transport)
if self.remote_path.startswith("~"):
# nasty hack to let getcwd work without changing dir!
self.sftp.chdir('.')
self.remote_path = self.remote_path.replace(
"~", self.sftp.getcwd()) # home is the initial sftp dir
@staticmethod
def _file_need_upload(l_st, r_st):
return True if \
l_st.st_size != r_st.st_size or int(l_st.st_mtime) != r_st.st_mtime \
else False
@staticmethod
def _must_be_deleted(local_path, r_st):
"""Return True if the remote correspondent of local_path has to be deleted.
i.e. if it doesn't exists locally or if it has a different type from the remote one."""
# if the file doesn't exists
if not os.path.lexists(local_path):
return True
# or if the file type is different
l_st = os.lstat(local_path)
if S_IFMT(r_st.st_mode) != S_IFMT(l_st.st_mode):
return True
return False
def _match_modes(self, remote_path, l_st):
"""Match mod, utime and uid/gid with locals one."""
self.sftp.chmod(remote_path, S_IMODE(l_st.st_mode))
self.sftp.utime(remote_path, (l_st.st_atime, l_st.st_mtime))
if self.chown:
self.sftp.chown(remote_path, l_st.st_uid, l_st.st_gid)
def file_upload(self, local_path, remote_path, l_st):
"""Upload local_path to remote_path and set permission and mtime."""
self.sftp.put(local_path, remote_path)
self._match_modes(remote_path, l_st)
def check_for_deletion(self, relative_path=None):
"""Traverse the entire remote_path tree.
Find files/directories that need to be deleted,
not being present in the local folder.
"""
if not relative_path:
relative_path = str() # root of shared directory tree
remote_path = path_join(self.remote_path, relative_path)
local_path = path_join(self.local_path, relative_path)
for remote_st in self.sftp.listdir_attr(remote_path):
r_lstat = self.sftp.lstat(path_join(remote_path, remote_st.filename))
inner_remote_path = path_join(remote_path, remote_st.filename)
inner_local_path = path_join(local_path, remote_st.filename)
# check if remote_st is a symlink
# otherwise could delete file outside shared directory
if S_ISLNK(r_lstat.st_mode):
if self._must_be_deleted(inner_local_path, r_lstat):
self.remote_delete(inner_remote_path, r_lstat)
continue
if self._must_be_deleted(inner_local_path, remote_st):
self.remote_delete(inner_remote_path, remote_st)
elif S_ISDIR(remote_st.st_mode):
self.check_for_deletion(
path_join(relative_path, remote_st.filename)
)
def create_update_symlink(self, link_destination, remote_path):
"""Create a new link pointing to link_destination in remote_path position."""
try: # if there's anything, delete it
self.sftp.remove(remote_path)
except IOError: # that's fine, nothing exists there!
pass
finally: # and recreate the link
try:
self.sftp.symlink(link_destination, remote_path)
except OSError as e:
# Sometimes, if links are "too" different, symlink fails.
# Sadly, nothing we can do about it.
self.logger.error("error while symlinking {} to {}: {}".format(
remote_path, link_destination, e))
def node_check_for_upload_create(self, relative_path, f):
"""Check if the given directory tree node has to be uploaded/created on the remote folder."""
if not relative_path:
# we're at the root of the shared directory tree
relative_path = str()
# the (absolute) local address of f.
local_path = path_join(self.local_path, relative_path, f)
try:
l_st = os.lstat(local_path)
except OSError as e:
"""A little background here.
Sometimes, in big clusters configurations (mail, etc.),
files could disappear or be moved, suddenly.
There's nothing to do about it,
system should be stopped before doing backups.
Anyway, we log it, and skip it.
"""
self.logger.error("error while checking {}: {}".format(relative_path, e))
return
if local_path in self.exclude_list:
self.logger.info("Skipping excluded file %s.", local_path)
return
# the (absolute) remote address of f.
remote_path = path_join(self.remote_path, relative_path, f)
# First case: f is a directory
if S_ISDIR(l_st.st_mode):
# we check if the folder exists on the remote side
# it has to be a folder, otherwise it would have already been
# deleted
try:
self.sftp.stat(remote_path)
except IOError: # it doesn't exist yet on remote side
self.sftp.mkdir(remote_path)
self._match_modes(remote_path, l_st)
# now, we should traverse f too (recursion magic!)
self.check_for_upload_create(path_join(relative_path, f))
# Second case: f is a symbolic link
elif S_ISLNK(l_st.st_mode):
# read the local link
local_link = os.readlink(local_path)
absolute_local_link = os.path.realpath(local_link)
# is it absolute?
is_absolute = local_link.startswith("/")
# and does it point inside the shared directory?
# add trailing slash (security)
trailing_local_path = path_join(self.local_path, '')
relpath = os.path.commonprefix(
[absolute_local_link,
trailing_local_path]
) == trailing_local_path
if relpath:
relative_link = absolute_local_link[len(trailing_local_path):]
else:
relative_link = None
"""
# Refactor them all, be efficient!
# Case A: absolute link pointing outside shared directory
# (we can only update the remote part)
if is_absolute and not relpath:
self.create_update_symlink(local_link, remote_path)
# Case B: absolute link pointing inside shared directory
# (we can leave it as it is or fix the prefix to match the one of the remote server)
elif is_absolute and relpath:
if self.fix_symlinks:
self.create_update_symlink(
join(
self.remote_path,
relative_link,
),
remote_path
)
else:
self.create_update_symlink(local_link, remote_path)
# Case C: relative link pointing outside shared directory
# (all we can do is try to make the link anyway)
elif not is_absolute and not relpath:
self.create_update_symlink(local_link, remote_path)
# Case D: relative link pointing inside shared directory
# (we preserve the relativity and link it!)
elif not is_absolute and relpath:
self.create_update_symlink(local_link, remote_path)
"""
if is_absolute and relpath:
if self.fix_symlinks:
self.create_update_symlink(
path_join(
self.remote_path,
relative_link,
),
remote_path
)
else:
self.create_update_symlink(local_link, remote_path)
# Third case: regular file
elif S_ISREG(l_st.st_mode):
try:
r_st = self.sftp.lstat(remote_path)
if self._file_need_upload(l_st, r_st):
self.file_upload(local_path, remote_path, l_st)
except IOError as e:
if e.errno == errno.ENOENT:
self.file_upload(local_path, remote_path, l_st)
# Anything else.
else:
self.logger.warning("Skipping unsupported file %s.", local_path)
def check_for_upload_create(self, relative_path=None):
"""Traverse the relative_path tree and check for files that need to be uploaded/created.
Relativity here refers to the shared directory tree."""
for f in os.listdir(
path_join(
self.local_path, relative_path) if relative_path else self.local_path
):
self.node_check_for_upload_create(relative_path, f)
def run(self):
"""Run the sync.
Confront the local and the remote directories and perform the needed changes."""
# Check if remote path is present
try:
self.sftp.stat(self.remote_path)
except FileNotFoundError as e:
if self.create_remote_directory:
self.sftp.mkdir(self.remote_path)
self.logger.info(
"Created missing remote dir: '" + self.remote_path + "'")
else:
self.logger.error(
"Remote folder does not exists. "
"Add '-r' to create it if missing.")
sys.exit(1)
try:
if self.delete:
# First check for items to be removed
self.check_for_deletion()
# Now scan local for items to upload/create
self.check_for_upload_create()
except FileNotFoundError:
# If this happens, probably the remote folder doesn't exist.
self.logger.error(
"Error while opening remote folder. Are you sure it does exist?")
sys.exit(1)
|
unbit/sftpclone | sftpclone/sftpclone.py | SFTPClone.check_for_deletion | python | def check_for_deletion(self, relative_path=None):
if not relative_path:
relative_path = str() # root of shared directory tree
remote_path = path_join(self.remote_path, relative_path)
local_path = path_join(self.local_path, relative_path)
for remote_st in self.sftp.listdir_attr(remote_path):
r_lstat = self.sftp.lstat(path_join(remote_path, remote_st.filename))
inner_remote_path = path_join(remote_path, remote_st.filename)
inner_local_path = path_join(local_path, remote_st.filename)
# check if remote_st is a symlink
# otherwise could delete file outside shared directory
if S_ISLNK(r_lstat.st_mode):
if self._must_be_deleted(inner_local_path, r_lstat):
self.remote_delete(inner_remote_path, r_lstat)
continue
if self._must_be_deleted(inner_local_path, remote_st):
self.remote_delete(inner_remote_path, remote_st)
elif S_ISDIR(remote_st.st_mode):
self.check_for_deletion(
path_join(relative_path, remote_st.filename)
) | Traverse the entire remote_path tree.
Find files/directories that need to be deleted,
not being present in the local folder. | train | https://github.com/unbit/sftpclone/blob/1cc89478e680fc4e0d12b1a15b5bafd0390d05da/sftpclone/sftpclone.py#L415-L445 | [
"def path_join(*args):\n \"\"\"\n Wrapper around `os.path.join`.\n Makes sure to join paths of the same type (bytes).\n \"\"\"\n args = (paramiko.py3compat.u(arg) for arg in args)\n return os.path.join(*args)\n",
"def _must_be_deleted(local_path, r_st):\n \"\"\"Return True if the remote correspondent of local_path has to be deleted.\n\n i.e. if it doesn't exists locally or if it has a different type from the remote one.\"\"\"\n # if the file doesn't exists\n if not os.path.lexists(local_path):\n return True\n\n # or if the file type is different\n l_st = os.lstat(local_path)\n if S_IFMT(r_st.st_mode) != S_IFMT(l_st.st_mode):\n return True\n\n return False\n"
] | class SFTPClone(object):
"""The SFTPClone class."""
def __init__(self, local_path, remote_url,
identity_files=None, port=None, fix_symlinks=False,
ssh_config_path=None, ssh_agent=False,
exclude_file=None, known_hosts_path=None,
delete=True, allow_unknown=False,
create_remote_directory=False,
):
"""Init the needed parameters and the SFTPClient."""
self.local_path = os.path.realpath(os.path.expanduser(local_path))
self.logger = logger or configure_logging()
self.create_remote_directory = create_remote_directory
if not os.path.exists(self.local_path):
self.logger.error("Local path MUST exist. Exiting.")
sys.exit(1)
if exclude_file:
with open(exclude_file) as f:
# As in rsync's exclude from, ignore lines with leading ; and #
# and treat each path as relative (thus by removing the leading
# /)
exclude_list = [
line.rstrip().lstrip("/")
for line in f
if not line.startswith((";", "#"))
]
# actually, is a set of excluded files
self.exclude_list = {
g
for pattern in exclude_list
for g in glob.glob(path_join(self.local_path, pattern))
}
else:
self.exclude_list = set()
username, password, hostname, self.remote_path = parse_username_password_hostname(remote_url)
identity_files = identity_files or []
proxy_command = None
if ssh_config_path:
try:
with open(os.path.expanduser(ssh_config_path)) as c_file:
ssh_config = paramiko.SSHConfig()
ssh_config.parse(c_file)
c = ssh_config.lookup(hostname)
hostname = c.get("hostname", hostname)
username = c.get("user", username)
port = int(c.get("port", port))
identity_files = c.get("identityfile", identity_files)
proxy_command = c.get("proxycommand")
except Exception as e:
# it could be safe to continue anyway,
# because parameters could have been manually specified
self.logger.error(
"Error while parsing ssh_config file: %s. Trying to continue anyway...", e
)
# Set default values
if not username:
username = getuser() # defaults to current user
port = port or 22
allow_unknown = allow_unknown or False
self.chown = False
self.fix_symlinks = fix_symlinks or False
self.delete = delete if delete is not None else True
if ssh_agent:
agent, agent_keys = get_ssh_agent_keys(self.logger)
else:
agent, agent_keys = None, None
if not identity_files and not password and not agent_keys:
self.logger.error(
"You need to specify either a password, an identity or to enable the ssh-agent support."
)
sys.exit(1)
# only root can change file owner
if username == 'root':
self.chown = True
sock = (hostname, port)
if proxy_command is not None:
sock = paramiko.proxy.ProxyCommand(proxy_command)
try:
transport = paramiko.Transport(sock)
except socket.gaierror:
self.logger.error(
"Hostname not known. Are you sure you inserted it correctly?")
sys.exit(1)
try:
ssh_host = hostname if port == 22 else "[{}]:{}".format(hostname, port)
known_hosts = None
"""
Before starting the transport session, we have to configure it.
Specifically, we need to configure the preferred PK algorithm.
If the system already knows a public key of a specific kind for
a remote host, we have to peek its type as the preferred one.
"""
if known_hosts_path:
known_hosts = paramiko.HostKeys()
known_hosts_path = os.path.realpath(
os.path.expanduser(known_hosts_path))
try:
known_hosts.load(known_hosts_path)
except IOError:
self.logger.error(
"Error while loading known hosts file at {}. Exiting...".format(
known_hosts_path)
)
sys.exit(1)
known_keys = known_hosts.lookup(ssh_host)
if known_keys is not None:
# one or more keys are already known
# set their type as preferred
transport.get_security_options().key_types = \
tuple(known_keys.keys())
transport.start_client()
if not known_hosts:
self.logger.warning("Security warning: skipping known hosts check...")
else:
pubk = transport.get_remote_server_key()
if ssh_host in known_hosts.keys():
if not known_hosts.check(ssh_host, pubk):
self.logger.error(
"Security warning: "
"remote key fingerprint {} for hostname "
"{} didn't match the one in known_hosts {}. "
"Exiting...".format(
pubk.get_base64(),
ssh_host,
known_hosts.lookup(hostname),
)
)
sys.exit(1)
elif not allow_unknown:
prompt = ("The authenticity of host '{}' can't be established.\n"
"{} key is {}.\n"
"Are you sure you want to continue connecting? [y/n] ").format(
ssh_host, pubk.get_name(), pubk.get_base64())
try:
# Renamed to `input` in Python 3.x
response = raw_input(prompt)
except NameError:
response = input(prompt)
# Note: we do not modify the user's known_hosts file
if not (response == "y" or response == "yes"):
self.logger.error(
"Host authentication failed."
)
sys.exit(1)
def perform_key_auth(pkey):
try:
transport.auth_publickey(
username=username,
key=pkey
)
return True
except paramiko.SSHException:
self.logger.warning(
"Authentication with identity {}... failed".format(pkey.get_base64()[:10])
)
return False
if password: # Password auth, if specified.
transport.auth_password(
username=username,
password=password
)
elif agent_keys: # SSH agent keys have higher priority
for pkey in agent_keys:
if perform_key_auth(pkey):
break # Authentication worked.
else: # None of the keys worked.
raise paramiko.SSHException
elif identity_files: # Then follow identity file (specified from CL or ssh_config)
# Try identity files one by one, until one works
for key_path in identity_files:
key_path = os.path.expanduser(key_path)
try:
key = paramiko.RSAKey.from_private_key_file(key_path)
except paramiko.PasswordRequiredException:
pk_password = getpass(
"It seems that your identity from '{}' is encrypted. "
"Please enter your password: ".format(key_path)
)
try:
key = paramiko.RSAKey.from_private_key_file(key_path, pk_password)
except paramiko.SSHException:
self.logger.error(
"Incorrect passphrase. Cannot decode private key from '{}'.".format(key_path)
)
continue
except IOError or paramiko.SSHException:
self.logger.error(
"Something went wrong while opening '{}'. Skipping it.".format(key_path)
)
continue
if perform_key_auth(key):
break # Authentication worked.
else: # None of the keys worked.
raise paramiko.SSHException
else: # No authentication method specified, we shouldn't arrive here.
assert False
except paramiko.SSHException:
self.logger.error(
"None of the provided authentication methods worked. Exiting."
)
transport.close()
sys.exit(1)
finally:
if agent:
agent.close()
self.sftp = paramiko.SFTPClient.from_transport(transport)
if self.remote_path.startswith("~"):
# nasty hack to let getcwd work without changing dir!
self.sftp.chdir('.')
self.remote_path = self.remote_path.replace(
"~", self.sftp.getcwd()) # home is the initial sftp dir
@staticmethod
def _file_need_upload(l_st, r_st):
return True if \
l_st.st_size != r_st.st_size or int(l_st.st_mtime) != r_st.st_mtime \
else False
@staticmethod
def _must_be_deleted(local_path, r_st):
"""Return True if the remote correspondent of local_path has to be deleted.
i.e. if it doesn't exists locally or if it has a different type from the remote one."""
# if the file doesn't exists
if not os.path.lexists(local_path):
return True
# or if the file type is different
l_st = os.lstat(local_path)
if S_IFMT(r_st.st_mode) != S_IFMT(l_st.st_mode):
return True
return False
def _match_modes(self, remote_path, l_st):
"""Match mod, utime and uid/gid with locals one."""
self.sftp.chmod(remote_path, S_IMODE(l_st.st_mode))
self.sftp.utime(remote_path, (l_st.st_atime, l_st.st_mtime))
if self.chown:
self.sftp.chown(remote_path, l_st.st_uid, l_st.st_gid)
def file_upload(self, local_path, remote_path, l_st):
"""Upload local_path to remote_path and set permission and mtime."""
self.sftp.put(local_path, remote_path)
self._match_modes(remote_path, l_st)
def remote_delete(self, remote_path, r_st):
"""Remove the remote directory node."""
# If it's a directory, then delete content and directory
if S_ISDIR(r_st.st_mode):
for item in self.sftp.listdir_attr(remote_path):
full_path = path_join(remote_path, item.filename)
self.remote_delete(full_path, item)
self.sftp.rmdir(remote_path)
# Or simply delete files
else:
try:
self.sftp.remove(remote_path)
except FileNotFoundError as e:
self.logger.error(
"error while removing {}. trace: {}".format(remote_path, e)
)
def create_update_symlink(self, link_destination, remote_path):
"""Create a new link pointing to link_destination in remote_path position."""
try: # if there's anything, delete it
self.sftp.remove(remote_path)
except IOError: # that's fine, nothing exists there!
pass
finally: # and recreate the link
try:
self.sftp.symlink(link_destination, remote_path)
except OSError as e:
# Sometimes, if links are "too" different, symlink fails.
# Sadly, nothing we can do about it.
self.logger.error("error while symlinking {} to {}: {}".format(
remote_path, link_destination, e))
def node_check_for_upload_create(self, relative_path, f):
"""Check if the given directory tree node has to be uploaded/created on the remote folder."""
if not relative_path:
# we're at the root of the shared directory tree
relative_path = str()
# the (absolute) local address of f.
local_path = path_join(self.local_path, relative_path, f)
try:
l_st = os.lstat(local_path)
except OSError as e:
"""A little background here.
Sometimes, in big clusters configurations (mail, etc.),
files could disappear or be moved, suddenly.
There's nothing to do about it,
system should be stopped before doing backups.
Anyway, we log it, and skip it.
"""
self.logger.error("error while checking {}: {}".format(relative_path, e))
return
if local_path in self.exclude_list:
self.logger.info("Skipping excluded file %s.", local_path)
return
# the (absolute) remote address of f.
remote_path = path_join(self.remote_path, relative_path, f)
# First case: f is a directory
if S_ISDIR(l_st.st_mode):
# we check if the folder exists on the remote side
# it has to be a folder, otherwise it would have already been
# deleted
try:
self.sftp.stat(remote_path)
except IOError: # it doesn't exist yet on remote side
self.sftp.mkdir(remote_path)
self._match_modes(remote_path, l_st)
# now, we should traverse f too (recursion magic!)
self.check_for_upload_create(path_join(relative_path, f))
# Second case: f is a symbolic link
elif S_ISLNK(l_st.st_mode):
# read the local link
local_link = os.readlink(local_path)
absolute_local_link = os.path.realpath(local_link)
# is it absolute?
is_absolute = local_link.startswith("/")
# and does it point inside the shared directory?
# add trailing slash (security)
trailing_local_path = path_join(self.local_path, '')
relpath = os.path.commonprefix(
[absolute_local_link,
trailing_local_path]
) == trailing_local_path
if relpath:
relative_link = absolute_local_link[len(trailing_local_path):]
else:
relative_link = None
"""
# Refactor them all, be efficient!
# Case A: absolute link pointing outside shared directory
# (we can only update the remote part)
if is_absolute and not relpath:
self.create_update_symlink(local_link, remote_path)
# Case B: absolute link pointing inside shared directory
# (we can leave it as it is or fix the prefix to match the one of the remote server)
elif is_absolute and relpath:
if self.fix_symlinks:
self.create_update_symlink(
join(
self.remote_path,
relative_link,
),
remote_path
)
else:
self.create_update_symlink(local_link, remote_path)
# Case C: relative link pointing outside shared directory
# (all we can do is try to make the link anyway)
elif not is_absolute and not relpath:
self.create_update_symlink(local_link, remote_path)
# Case D: relative link pointing inside shared directory
# (we preserve the relativity and link it!)
elif not is_absolute and relpath:
self.create_update_symlink(local_link, remote_path)
"""
if is_absolute and relpath:
if self.fix_symlinks:
self.create_update_symlink(
path_join(
self.remote_path,
relative_link,
),
remote_path
)
else:
self.create_update_symlink(local_link, remote_path)
# Third case: regular file
elif S_ISREG(l_st.st_mode):
try:
r_st = self.sftp.lstat(remote_path)
if self._file_need_upload(l_st, r_st):
self.file_upload(local_path, remote_path, l_st)
except IOError as e:
if e.errno == errno.ENOENT:
self.file_upload(local_path, remote_path, l_st)
# Anything else.
else:
self.logger.warning("Skipping unsupported file %s.", local_path)
def check_for_upload_create(self, relative_path=None):
"""Traverse the relative_path tree and check for files that need to be uploaded/created.
Relativity here refers to the shared directory tree."""
for f in os.listdir(
path_join(
self.local_path, relative_path) if relative_path else self.local_path
):
self.node_check_for_upload_create(relative_path, f)
def run(self):
"""Run the sync.
Confront the local and the remote directories and perform the needed changes."""
# Check if remote path is present
try:
self.sftp.stat(self.remote_path)
except FileNotFoundError as e:
if self.create_remote_directory:
self.sftp.mkdir(self.remote_path)
self.logger.info(
"Created missing remote dir: '" + self.remote_path + "'")
else:
self.logger.error(
"Remote folder does not exists. "
"Add '-r' to create it if missing.")
sys.exit(1)
try:
if self.delete:
# First check for items to be removed
self.check_for_deletion()
# Now scan local for items to upload/create
self.check_for_upload_create()
except FileNotFoundError:
# If this happens, probably the remote folder doesn't exist.
self.logger.error(
"Error while opening remote folder. Are you sure it does exist?")
sys.exit(1)
|
unbit/sftpclone | sftpclone/sftpclone.py | SFTPClone.create_update_symlink | python | def create_update_symlink(self, link_destination, remote_path):
try: # if there's anything, delete it
self.sftp.remove(remote_path)
except IOError: # that's fine, nothing exists there!
pass
finally: # and recreate the link
try:
self.sftp.symlink(link_destination, remote_path)
except OSError as e:
# Sometimes, if links are "too" different, symlink fails.
# Sadly, nothing we can do about it.
self.logger.error("error while symlinking {} to {}: {}".format(
remote_path, link_destination, e)) | Create a new link pointing to link_destination in remote_path position. | train | https://github.com/unbit/sftpclone/blob/1cc89478e680fc4e0d12b1a15b5bafd0390d05da/sftpclone/sftpclone.py#L447-L460 | null | class SFTPClone(object):
"""The SFTPClone class."""
def __init__(self, local_path, remote_url,
identity_files=None, port=None, fix_symlinks=False,
ssh_config_path=None, ssh_agent=False,
exclude_file=None, known_hosts_path=None,
delete=True, allow_unknown=False,
create_remote_directory=False,
):
"""Init the needed parameters and the SFTPClient."""
self.local_path = os.path.realpath(os.path.expanduser(local_path))
self.logger = logger or configure_logging()
self.create_remote_directory = create_remote_directory
if not os.path.exists(self.local_path):
self.logger.error("Local path MUST exist. Exiting.")
sys.exit(1)
if exclude_file:
with open(exclude_file) as f:
# As in rsync's exclude from, ignore lines with leading ; and #
# and treat each path as relative (thus by removing the leading
# /)
exclude_list = [
line.rstrip().lstrip("/")
for line in f
if not line.startswith((";", "#"))
]
# actually, is a set of excluded files
self.exclude_list = {
g
for pattern in exclude_list
for g in glob.glob(path_join(self.local_path, pattern))
}
else:
self.exclude_list = set()
username, password, hostname, self.remote_path = parse_username_password_hostname(remote_url)
identity_files = identity_files or []
proxy_command = None
if ssh_config_path:
try:
with open(os.path.expanduser(ssh_config_path)) as c_file:
ssh_config = paramiko.SSHConfig()
ssh_config.parse(c_file)
c = ssh_config.lookup(hostname)
hostname = c.get("hostname", hostname)
username = c.get("user", username)
port = int(c.get("port", port))
identity_files = c.get("identityfile", identity_files)
proxy_command = c.get("proxycommand")
except Exception as e:
# it could be safe to continue anyway,
# because parameters could have been manually specified
self.logger.error(
"Error while parsing ssh_config file: %s. Trying to continue anyway...", e
)
# Set default values
if not username:
username = getuser() # defaults to current user
port = port or 22
allow_unknown = allow_unknown or False
self.chown = False
self.fix_symlinks = fix_symlinks or False
self.delete = delete if delete is not None else True
if ssh_agent:
agent, agent_keys = get_ssh_agent_keys(self.logger)
else:
agent, agent_keys = None, None
if not identity_files and not password and not agent_keys:
self.logger.error(
"You need to specify either a password, an identity or to enable the ssh-agent support."
)
sys.exit(1)
# only root can change file owner
if username == 'root':
self.chown = True
sock = (hostname, port)
if proxy_command is not None:
sock = paramiko.proxy.ProxyCommand(proxy_command)
try:
transport = paramiko.Transport(sock)
except socket.gaierror:
self.logger.error(
"Hostname not known. Are you sure you inserted it correctly?")
sys.exit(1)
try:
ssh_host = hostname if port == 22 else "[{}]:{}".format(hostname, port)
known_hosts = None
"""
Before starting the transport session, we have to configure it.
Specifically, we need to configure the preferred PK algorithm.
If the system already knows a public key of a specific kind for
a remote host, we have to peek its type as the preferred one.
"""
if known_hosts_path:
known_hosts = paramiko.HostKeys()
known_hosts_path = os.path.realpath(
os.path.expanduser(known_hosts_path))
try:
known_hosts.load(known_hosts_path)
except IOError:
self.logger.error(
"Error while loading known hosts file at {}. Exiting...".format(
known_hosts_path)
)
sys.exit(1)
known_keys = known_hosts.lookup(ssh_host)
if known_keys is not None:
# one or more keys are already known
# set their type as preferred
transport.get_security_options().key_types = \
tuple(known_keys.keys())
transport.start_client()
if not known_hosts:
self.logger.warning("Security warning: skipping known hosts check...")
else:
pubk = transport.get_remote_server_key()
if ssh_host in known_hosts.keys():
if not known_hosts.check(ssh_host, pubk):
self.logger.error(
"Security warning: "
"remote key fingerprint {} for hostname "
"{} didn't match the one in known_hosts {}. "
"Exiting...".format(
pubk.get_base64(),
ssh_host,
known_hosts.lookup(hostname),
)
)
sys.exit(1)
elif not allow_unknown:
prompt = ("The authenticity of host '{}' can't be established.\n"
"{} key is {}.\n"
"Are you sure you want to continue connecting? [y/n] ").format(
ssh_host, pubk.get_name(), pubk.get_base64())
try:
# Renamed to `input` in Python 3.x
response = raw_input(prompt)
except NameError:
response = input(prompt)
# Note: we do not modify the user's known_hosts file
if not (response == "y" or response == "yes"):
self.logger.error(
"Host authentication failed."
)
sys.exit(1)
def perform_key_auth(pkey):
try:
transport.auth_publickey(
username=username,
key=pkey
)
return True
except paramiko.SSHException:
self.logger.warning(
"Authentication with identity {}... failed".format(pkey.get_base64()[:10])
)
return False
if password: # Password auth, if specified.
transport.auth_password(
username=username,
password=password
)
elif agent_keys: # SSH agent keys have higher priority
for pkey in agent_keys:
if perform_key_auth(pkey):
break # Authentication worked.
else: # None of the keys worked.
raise paramiko.SSHException
elif identity_files: # Then follow identity file (specified from CL or ssh_config)
# Try identity files one by one, until one works
for key_path in identity_files:
key_path = os.path.expanduser(key_path)
try:
key = paramiko.RSAKey.from_private_key_file(key_path)
except paramiko.PasswordRequiredException:
pk_password = getpass(
"It seems that your identity from '{}' is encrypted. "
"Please enter your password: ".format(key_path)
)
try:
key = paramiko.RSAKey.from_private_key_file(key_path, pk_password)
except paramiko.SSHException:
self.logger.error(
"Incorrect passphrase. Cannot decode private key from '{}'.".format(key_path)
)
continue
except IOError or paramiko.SSHException:
self.logger.error(
"Something went wrong while opening '{}'. Skipping it.".format(key_path)
)
continue
if perform_key_auth(key):
break # Authentication worked.
else: # None of the keys worked.
raise paramiko.SSHException
else: # No authentication method specified, we shouldn't arrive here.
assert False
except paramiko.SSHException:
self.logger.error(
"None of the provided authentication methods worked. Exiting."
)
transport.close()
sys.exit(1)
finally:
if agent:
agent.close()
self.sftp = paramiko.SFTPClient.from_transport(transport)
if self.remote_path.startswith("~"):
# nasty hack to let getcwd work without changing dir!
self.sftp.chdir('.')
self.remote_path = self.remote_path.replace(
"~", self.sftp.getcwd()) # home is the initial sftp dir
@staticmethod
def _file_need_upload(l_st, r_st):
return True if \
l_st.st_size != r_st.st_size or int(l_st.st_mtime) != r_st.st_mtime \
else False
@staticmethod
def _must_be_deleted(local_path, r_st):
"""Return True if the remote correspondent of local_path has to be deleted.
i.e. if it doesn't exists locally or if it has a different type from the remote one."""
# if the file doesn't exists
if not os.path.lexists(local_path):
return True
# or if the file type is different
l_st = os.lstat(local_path)
if S_IFMT(r_st.st_mode) != S_IFMT(l_st.st_mode):
return True
return False
def _match_modes(self, remote_path, l_st):
"""Match mod, utime and uid/gid with locals one."""
self.sftp.chmod(remote_path, S_IMODE(l_st.st_mode))
self.sftp.utime(remote_path, (l_st.st_atime, l_st.st_mtime))
if self.chown:
self.sftp.chown(remote_path, l_st.st_uid, l_st.st_gid)
def file_upload(self, local_path, remote_path, l_st):
"""Upload local_path to remote_path and set permission and mtime."""
self.sftp.put(local_path, remote_path)
self._match_modes(remote_path, l_st)
def remote_delete(self, remote_path, r_st):
"""Remove the remote directory node."""
# If it's a directory, then delete content and directory
if S_ISDIR(r_st.st_mode):
for item in self.sftp.listdir_attr(remote_path):
full_path = path_join(remote_path, item.filename)
self.remote_delete(full_path, item)
self.sftp.rmdir(remote_path)
# Or simply delete files
else:
try:
self.sftp.remove(remote_path)
except FileNotFoundError as e:
self.logger.error(
"error while removing {}. trace: {}".format(remote_path, e)
)
def check_for_deletion(self, relative_path=None):
"""Traverse the entire remote_path tree.
Find files/directories that need to be deleted,
not being present in the local folder.
"""
if not relative_path:
relative_path = str() # root of shared directory tree
remote_path = path_join(self.remote_path, relative_path)
local_path = path_join(self.local_path, relative_path)
for remote_st in self.sftp.listdir_attr(remote_path):
r_lstat = self.sftp.lstat(path_join(remote_path, remote_st.filename))
inner_remote_path = path_join(remote_path, remote_st.filename)
inner_local_path = path_join(local_path, remote_st.filename)
# check if remote_st is a symlink
# otherwise could delete file outside shared directory
if S_ISLNK(r_lstat.st_mode):
if self._must_be_deleted(inner_local_path, r_lstat):
self.remote_delete(inner_remote_path, r_lstat)
continue
if self._must_be_deleted(inner_local_path, remote_st):
self.remote_delete(inner_remote_path, remote_st)
elif S_ISDIR(remote_st.st_mode):
self.check_for_deletion(
path_join(relative_path, remote_st.filename)
)
def node_check_for_upload_create(self, relative_path, f):
"""Check if the given directory tree node has to be uploaded/created on the remote folder."""
if not relative_path:
# we're at the root of the shared directory tree
relative_path = str()
# the (absolute) local address of f.
local_path = path_join(self.local_path, relative_path, f)
try:
l_st = os.lstat(local_path)
except OSError as e:
"""A little background here.
Sometimes, in big clusters configurations (mail, etc.),
files could disappear or be moved, suddenly.
There's nothing to do about it,
system should be stopped before doing backups.
Anyway, we log it, and skip it.
"""
self.logger.error("error while checking {}: {}".format(relative_path, e))
return
if local_path in self.exclude_list:
self.logger.info("Skipping excluded file %s.", local_path)
return
# the (absolute) remote address of f.
remote_path = path_join(self.remote_path, relative_path, f)
# First case: f is a directory
if S_ISDIR(l_st.st_mode):
# we check if the folder exists on the remote side
# it has to be a folder, otherwise it would have already been
# deleted
try:
self.sftp.stat(remote_path)
except IOError: # it doesn't exist yet on remote side
self.sftp.mkdir(remote_path)
self._match_modes(remote_path, l_st)
# now, we should traverse f too (recursion magic!)
self.check_for_upload_create(path_join(relative_path, f))
# Second case: f is a symbolic link
elif S_ISLNK(l_st.st_mode):
# read the local link
local_link = os.readlink(local_path)
absolute_local_link = os.path.realpath(local_link)
# is it absolute?
is_absolute = local_link.startswith("/")
# and does it point inside the shared directory?
# add trailing slash (security)
trailing_local_path = path_join(self.local_path, '')
relpath = os.path.commonprefix(
[absolute_local_link,
trailing_local_path]
) == trailing_local_path
if relpath:
relative_link = absolute_local_link[len(trailing_local_path):]
else:
relative_link = None
"""
# Refactor them all, be efficient!
# Case A: absolute link pointing outside shared directory
# (we can only update the remote part)
if is_absolute and not relpath:
self.create_update_symlink(local_link, remote_path)
# Case B: absolute link pointing inside shared directory
# (we can leave it as it is or fix the prefix to match the one of the remote server)
elif is_absolute and relpath:
if self.fix_symlinks:
self.create_update_symlink(
join(
self.remote_path,
relative_link,
),
remote_path
)
else:
self.create_update_symlink(local_link, remote_path)
# Case C: relative link pointing outside shared directory
# (all we can do is try to make the link anyway)
elif not is_absolute and not relpath:
self.create_update_symlink(local_link, remote_path)
# Case D: relative link pointing inside shared directory
# (we preserve the relativity and link it!)
elif not is_absolute and relpath:
self.create_update_symlink(local_link, remote_path)
"""
if is_absolute and relpath:
if self.fix_symlinks:
self.create_update_symlink(
path_join(
self.remote_path,
relative_link,
),
remote_path
)
else:
self.create_update_symlink(local_link, remote_path)
# Third case: regular file
elif S_ISREG(l_st.st_mode):
try:
r_st = self.sftp.lstat(remote_path)
if self._file_need_upload(l_st, r_st):
self.file_upload(local_path, remote_path, l_st)
except IOError as e:
if e.errno == errno.ENOENT:
self.file_upload(local_path, remote_path, l_st)
# Anything else.
else:
self.logger.warning("Skipping unsupported file %s.", local_path)
def check_for_upload_create(self, relative_path=None):
"""Traverse the relative_path tree and check for files that need to be uploaded/created.
Relativity here refers to the shared directory tree."""
for f in os.listdir(
path_join(
self.local_path, relative_path) if relative_path else self.local_path
):
self.node_check_for_upload_create(relative_path, f)
def run(self):
"""Run the sync.
Confront the local and the remote directories and perform the needed changes."""
# Check if remote path is present
try:
self.sftp.stat(self.remote_path)
except FileNotFoundError as e:
if self.create_remote_directory:
self.sftp.mkdir(self.remote_path)
self.logger.info(
"Created missing remote dir: '" + self.remote_path + "'")
else:
self.logger.error(
"Remote folder does not exists. "
"Add '-r' to create it if missing.")
sys.exit(1)
try:
if self.delete:
# First check for items to be removed
self.check_for_deletion()
# Now scan local for items to upload/create
self.check_for_upload_create()
except FileNotFoundError:
# If this happens, probably the remote folder doesn't exist.
self.logger.error(
"Error while opening remote folder. Are you sure it does exist?")
sys.exit(1)
|
unbit/sftpclone | sftpclone/sftpclone.py | SFTPClone.node_check_for_upload_create | python | def node_check_for_upload_create(self, relative_path, f):
if not relative_path:
# we're at the root of the shared directory tree
relative_path = str()
# the (absolute) local address of f.
local_path = path_join(self.local_path, relative_path, f)
try:
l_st = os.lstat(local_path)
except OSError as e:
"""A little background here.
Sometimes, in big clusters configurations (mail, etc.),
files could disappear or be moved, suddenly.
There's nothing to do about it,
system should be stopped before doing backups.
Anyway, we log it, and skip it.
"""
self.logger.error("error while checking {}: {}".format(relative_path, e))
return
if local_path in self.exclude_list:
self.logger.info("Skipping excluded file %s.", local_path)
return
# the (absolute) remote address of f.
remote_path = path_join(self.remote_path, relative_path, f)
# First case: f is a directory
if S_ISDIR(l_st.st_mode):
# we check if the folder exists on the remote side
# it has to be a folder, otherwise it would have already been
# deleted
try:
self.sftp.stat(remote_path)
except IOError: # it doesn't exist yet on remote side
self.sftp.mkdir(remote_path)
self._match_modes(remote_path, l_st)
# now, we should traverse f too (recursion magic!)
self.check_for_upload_create(path_join(relative_path, f))
# Second case: f is a symbolic link
elif S_ISLNK(l_st.st_mode):
# read the local link
local_link = os.readlink(local_path)
absolute_local_link = os.path.realpath(local_link)
# is it absolute?
is_absolute = local_link.startswith("/")
# and does it point inside the shared directory?
# add trailing slash (security)
trailing_local_path = path_join(self.local_path, '')
relpath = os.path.commonprefix(
[absolute_local_link,
trailing_local_path]
) == trailing_local_path
if relpath:
relative_link = absolute_local_link[len(trailing_local_path):]
else:
relative_link = None
"""
# Refactor them all, be efficient!
# Case A: absolute link pointing outside shared directory
# (we can only update the remote part)
if is_absolute and not relpath:
self.create_update_symlink(local_link, remote_path)
# Case B: absolute link pointing inside shared directory
# (we can leave it as it is or fix the prefix to match the one of the remote server)
elif is_absolute and relpath:
if self.fix_symlinks:
self.create_update_symlink(
join(
self.remote_path,
relative_link,
),
remote_path
)
else:
self.create_update_symlink(local_link, remote_path)
# Case C: relative link pointing outside shared directory
# (all we can do is try to make the link anyway)
elif not is_absolute and not relpath:
self.create_update_symlink(local_link, remote_path)
# Case D: relative link pointing inside shared directory
# (we preserve the relativity and link it!)
elif not is_absolute and relpath:
self.create_update_symlink(local_link, remote_path)
"""
if is_absolute and relpath:
if self.fix_symlinks:
self.create_update_symlink(
path_join(
self.remote_path,
relative_link,
),
remote_path
)
else:
self.create_update_symlink(local_link, remote_path)
# Third case: regular file
elif S_ISREG(l_st.st_mode):
try:
r_st = self.sftp.lstat(remote_path)
if self._file_need_upload(l_st, r_st):
self.file_upload(local_path, remote_path, l_st)
except IOError as e:
if e.errno == errno.ENOENT:
self.file_upload(local_path, remote_path, l_st)
# Anything else.
else:
self.logger.warning("Skipping unsupported file %s.", local_path) | Check if the given directory tree node has to be uploaded/created on the remote folder. | train | https://github.com/unbit/sftpclone/blob/1cc89478e680fc4e0d12b1a15b5bafd0390d05da/sftpclone/sftpclone.py#L462-L583 | [
"def path_join(*args):\n \"\"\"\n Wrapper around `os.path.join`.\n Makes sure to join paths of the same type (bytes).\n \"\"\"\n args = (paramiko.py3compat.u(arg) for arg in args)\n return os.path.join(*args)\n",
"def _file_need_upload(l_st, r_st):\n return True if \\\n l_st.st_size != r_st.st_size or int(l_st.st_mtime) != r_st.st_mtime \\\n else False\n",
"def _match_modes(self, remote_path, l_st):\n \"\"\"Match mod, utime and uid/gid with locals one.\"\"\"\n self.sftp.chmod(remote_path, S_IMODE(l_st.st_mode))\n self.sftp.utime(remote_path, (l_st.st_atime, l_st.st_mtime))\n\n if self.chown:\n self.sftp.chown(remote_path, l_st.st_uid, l_st.st_gid)\n",
"def create_update_symlink(self, link_destination, remote_path):\n \"\"\"Create a new link pointing to link_destination in remote_path position.\"\"\"\n try: # if there's anything, delete it\n self.sftp.remove(remote_path)\n except IOError: # that's fine, nothing exists there!\n pass\n finally: # and recreate the link\n try:\n self.sftp.symlink(link_destination, remote_path)\n except OSError as e:\n # Sometimes, if links are \"too\" different, symlink fails.\n # Sadly, nothing we can do about it.\n self.logger.error(\"error while symlinking {} to {}: {}\".format(\n remote_path, link_destination, e))\n",
"def check_for_upload_create(self, relative_path=None):\n \"\"\"Traverse the relative_path tree and check for files that need to be uploaded/created.\n\n Relativity here refers to the shared directory tree.\"\"\"\n for f in os.listdir(\n path_join(\n self.local_path, relative_path) if relative_path else self.local_path\n ):\n self.node_check_for_upload_create(relative_path, f)\n"
] | class SFTPClone(object):
"""The SFTPClone class."""
def __init__(self, local_path, remote_url,
identity_files=None, port=None, fix_symlinks=False,
ssh_config_path=None, ssh_agent=False,
exclude_file=None, known_hosts_path=None,
delete=True, allow_unknown=False,
create_remote_directory=False,
):
"""Init the needed parameters and the SFTPClient."""
self.local_path = os.path.realpath(os.path.expanduser(local_path))
self.logger = logger or configure_logging()
self.create_remote_directory = create_remote_directory
if not os.path.exists(self.local_path):
self.logger.error("Local path MUST exist. Exiting.")
sys.exit(1)
if exclude_file:
with open(exclude_file) as f:
# As in rsync's exclude from, ignore lines with leading ; and #
# and treat each path as relative (thus by removing the leading
# /)
exclude_list = [
line.rstrip().lstrip("/")
for line in f
if not line.startswith((";", "#"))
]
# actually, is a set of excluded files
self.exclude_list = {
g
for pattern in exclude_list
for g in glob.glob(path_join(self.local_path, pattern))
}
else:
self.exclude_list = set()
username, password, hostname, self.remote_path = parse_username_password_hostname(remote_url)
identity_files = identity_files or []
proxy_command = None
if ssh_config_path:
try:
with open(os.path.expanduser(ssh_config_path)) as c_file:
ssh_config = paramiko.SSHConfig()
ssh_config.parse(c_file)
c = ssh_config.lookup(hostname)
hostname = c.get("hostname", hostname)
username = c.get("user", username)
port = int(c.get("port", port))
identity_files = c.get("identityfile", identity_files)
proxy_command = c.get("proxycommand")
except Exception as e:
# it could be safe to continue anyway,
# because parameters could have been manually specified
self.logger.error(
"Error while parsing ssh_config file: %s. Trying to continue anyway...", e
)
# Set default values
if not username:
username = getuser() # defaults to current user
port = port or 22
allow_unknown = allow_unknown or False
self.chown = False
self.fix_symlinks = fix_symlinks or False
self.delete = delete if delete is not None else True
if ssh_agent:
agent, agent_keys = get_ssh_agent_keys(self.logger)
else:
agent, agent_keys = None, None
if not identity_files and not password and not agent_keys:
self.logger.error(
"You need to specify either a password, an identity or to enable the ssh-agent support."
)
sys.exit(1)
# only root can change file owner
if username == 'root':
self.chown = True
sock = (hostname, port)
if proxy_command is not None:
sock = paramiko.proxy.ProxyCommand(proxy_command)
try:
transport = paramiko.Transport(sock)
except socket.gaierror:
self.logger.error(
"Hostname not known. Are you sure you inserted it correctly?")
sys.exit(1)
try:
ssh_host = hostname if port == 22 else "[{}]:{}".format(hostname, port)
known_hosts = None
"""
Before starting the transport session, we have to configure it.
Specifically, we need to configure the preferred PK algorithm.
If the system already knows a public key of a specific kind for
a remote host, we have to peek its type as the preferred one.
"""
if known_hosts_path:
known_hosts = paramiko.HostKeys()
known_hosts_path = os.path.realpath(
os.path.expanduser(known_hosts_path))
try:
known_hosts.load(known_hosts_path)
except IOError:
self.logger.error(
"Error while loading known hosts file at {}. Exiting...".format(
known_hosts_path)
)
sys.exit(1)
known_keys = known_hosts.lookup(ssh_host)
if known_keys is not None:
# one or more keys are already known
# set their type as preferred
transport.get_security_options().key_types = \
tuple(known_keys.keys())
transport.start_client()
if not known_hosts:
self.logger.warning("Security warning: skipping known hosts check...")
else:
pubk = transport.get_remote_server_key()
if ssh_host in known_hosts.keys():
if not known_hosts.check(ssh_host, pubk):
self.logger.error(
"Security warning: "
"remote key fingerprint {} for hostname "
"{} didn't match the one in known_hosts {}. "
"Exiting...".format(
pubk.get_base64(),
ssh_host,
known_hosts.lookup(hostname),
)
)
sys.exit(1)
elif not allow_unknown:
prompt = ("The authenticity of host '{}' can't be established.\n"
"{} key is {}.\n"
"Are you sure you want to continue connecting? [y/n] ").format(
ssh_host, pubk.get_name(), pubk.get_base64())
try:
# Renamed to `input` in Python 3.x
response = raw_input(prompt)
except NameError:
response = input(prompt)
# Note: we do not modify the user's known_hosts file
if not (response == "y" or response == "yes"):
self.logger.error(
"Host authentication failed."
)
sys.exit(1)
def perform_key_auth(pkey):
try:
transport.auth_publickey(
username=username,
key=pkey
)
return True
except paramiko.SSHException:
self.logger.warning(
"Authentication with identity {}... failed".format(pkey.get_base64()[:10])
)
return False
if password: # Password auth, if specified.
transport.auth_password(
username=username,
password=password
)
elif agent_keys: # SSH agent keys have higher priority
for pkey in agent_keys:
if perform_key_auth(pkey):
break # Authentication worked.
else: # None of the keys worked.
raise paramiko.SSHException
elif identity_files: # Then follow identity file (specified from CL or ssh_config)
# Try identity files one by one, until one works
for key_path in identity_files:
key_path = os.path.expanduser(key_path)
try:
key = paramiko.RSAKey.from_private_key_file(key_path)
except paramiko.PasswordRequiredException:
pk_password = getpass(
"It seems that your identity from '{}' is encrypted. "
"Please enter your password: ".format(key_path)
)
try:
key = paramiko.RSAKey.from_private_key_file(key_path, pk_password)
except paramiko.SSHException:
self.logger.error(
"Incorrect passphrase. Cannot decode private key from '{}'.".format(key_path)
)
continue
except IOError or paramiko.SSHException:
self.logger.error(
"Something went wrong while opening '{}'. Skipping it.".format(key_path)
)
continue
if perform_key_auth(key):
break # Authentication worked.
else: # None of the keys worked.
raise paramiko.SSHException
else: # No authentication method specified, we shouldn't arrive here.
assert False
except paramiko.SSHException:
self.logger.error(
"None of the provided authentication methods worked. Exiting."
)
transport.close()
sys.exit(1)
finally:
if agent:
agent.close()
self.sftp = paramiko.SFTPClient.from_transport(transport)
if self.remote_path.startswith("~"):
# nasty hack to let getcwd work without changing dir!
self.sftp.chdir('.')
self.remote_path = self.remote_path.replace(
"~", self.sftp.getcwd()) # home is the initial sftp dir
@staticmethod
def _file_need_upload(l_st, r_st):
return True if \
l_st.st_size != r_st.st_size or int(l_st.st_mtime) != r_st.st_mtime \
else False
@staticmethod
def _must_be_deleted(local_path, r_st):
"""Return True if the remote correspondent of local_path has to be deleted.
i.e. if it doesn't exists locally or if it has a different type from the remote one."""
# if the file doesn't exists
if not os.path.lexists(local_path):
return True
# or if the file type is different
l_st = os.lstat(local_path)
if S_IFMT(r_st.st_mode) != S_IFMT(l_st.st_mode):
return True
return False
def _match_modes(self, remote_path, l_st):
"""Match mod, utime and uid/gid with locals one."""
self.sftp.chmod(remote_path, S_IMODE(l_st.st_mode))
self.sftp.utime(remote_path, (l_st.st_atime, l_st.st_mtime))
if self.chown:
self.sftp.chown(remote_path, l_st.st_uid, l_st.st_gid)
def file_upload(self, local_path, remote_path, l_st):
"""Upload local_path to remote_path and set permission and mtime."""
self.sftp.put(local_path, remote_path)
self._match_modes(remote_path, l_st)
def remote_delete(self, remote_path, r_st):
"""Remove the remote directory node."""
# If it's a directory, then delete content and directory
if S_ISDIR(r_st.st_mode):
for item in self.sftp.listdir_attr(remote_path):
full_path = path_join(remote_path, item.filename)
self.remote_delete(full_path, item)
self.sftp.rmdir(remote_path)
# Or simply delete files
else:
try:
self.sftp.remove(remote_path)
except FileNotFoundError as e:
self.logger.error(
"error while removing {}. trace: {}".format(remote_path, e)
)
def check_for_deletion(self, relative_path=None):
"""Traverse the entire remote_path tree.
Find files/directories that need to be deleted,
not being present in the local folder.
"""
if not relative_path:
relative_path = str() # root of shared directory tree
remote_path = path_join(self.remote_path, relative_path)
local_path = path_join(self.local_path, relative_path)
for remote_st in self.sftp.listdir_attr(remote_path):
r_lstat = self.sftp.lstat(path_join(remote_path, remote_st.filename))
inner_remote_path = path_join(remote_path, remote_st.filename)
inner_local_path = path_join(local_path, remote_st.filename)
# check if remote_st is a symlink
# otherwise could delete file outside shared directory
if S_ISLNK(r_lstat.st_mode):
if self._must_be_deleted(inner_local_path, r_lstat):
self.remote_delete(inner_remote_path, r_lstat)
continue
if self._must_be_deleted(inner_local_path, remote_st):
self.remote_delete(inner_remote_path, remote_st)
elif S_ISDIR(remote_st.st_mode):
self.check_for_deletion(
path_join(relative_path, remote_st.filename)
)
def create_update_symlink(self, link_destination, remote_path):
"""Create a new link pointing to link_destination in remote_path position."""
try: # if there's anything, delete it
self.sftp.remove(remote_path)
except IOError: # that's fine, nothing exists there!
pass
finally: # and recreate the link
try:
self.sftp.symlink(link_destination, remote_path)
except OSError as e:
# Sometimes, if links are "too" different, symlink fails.
# Sadly, nothing we can do about it.
self.logger.error("error while symlinking {} to {}: {}".format(
remote_path, link_destination, e))
def check_for_upload_create(self, relative_path=None):
"""Traverse the relative_path tree and check for files that need to be uploaded/created.
Relativity here refers to the shared directory tree."""
for f in os.listdir(
path_join(
self.local_path, relative_path) if relative_path else self.local_path
):
self.node_check_for_upload_create(relative_path, f)
def run(self):
"""Run the sync.
Confront the local and the remote directories and perform the needed changes."""
# Check if remote path is present
try:
self.sftp.stat(self.remote_path)
except FileNotFoundError as e:
if self.create_remote_directory:
self.sftp.mkdir(self.remote_path)
self.logger.info(
"Created missing remote dir: '" + self.remote_path + "'")
else:
self.logger.error(
"Remote folder does not exists. "
"Add '-r' to create it if missing.")
sys.exit(1)
try:
if self.delete:
# First check for items to be removed
self.check_for_deletion()
# Now scan local for items to upload/create
self.check_for_upload_create()
except FileNotFoundError:
# If this happens, probably the remote folder doesn't exist.
self.logger.error(
"Error while opening remote folder. Are you sure it does exist?")
sys.exit(1)
|
unbit/sftpclone | sftpclone/sftpclone.py | SFTPClone.check_for_upload_create | python | def check_for_upload_create(self, relative_path=None):
for f in os.listdir(
path_join(
self.local_path, relative_path) if relative_path else self.local_path
):
self.node_check_for_upload_create(relative_path, f) | Traverse the relative_path tree and check for files that need to be uploaded/created.
Relativity here refers to the shared directory tree. | train | https://github.com/unbit/sftpclone/blob/1cc89478e680fc4e0d12b1a15b5bafd0390d05da/sftpclone/sftpclone.py#L585-L593 | [
"def path_join(*args):\n \"\"\"\n Wrapper around `os.path.join`.\n Makes sure to join paths of the same type (bytes).\n \"\"\"\n args = (paramiko.py3compat.u(arg) for arg in args)\n return os.path.join(*args)\n",
"def node_check_for_upload_create(self, relative_path, f):\n \"\"\"Check if the given directory tree node has to be uploaded/created on the remote folder.\"\"\"\n if not relative_path:\n # we're at the root of the shared directory tree\n relative_path = str()\n\n # the (absolute) local address of f.\n local_path = path_join(self.local_path, relative_path, f)\n try:\n l_st = os.lstat(local_path)\n except OSError as e:\n \"\"\"A little background here.\n Sometimes, in big clusters configurations (mail, etc.),\n files could disappear or be moved, suddenly.\n There's nothing to do about it,\n system should be stopped before doing backups.\n Anyway, we log it, and skip it.\n \"\"\"\n self.logger.error(\"error while checking {}: {}\".format(relative_path, e))\n return\n\n if local_path in self.exclude_list:\n self.logger.info(\"Skipping excluded file %s.\", local_path)\n return\n\n # the (absolute) remote address of f.\n remote_path = path_join(self.remote_path, relative_path, f)\n\n # First case: f is a directory\n if S_ISDIR(l_st.st_mode):\n # we check if the folder exists on the remote side\n # it has to be a folder, otherwise it would have already been\n # deleted\n try:\n self.sftp.stat(remote_path)\n except IOError: # it doesn't exist yet on remote side\n self.sftp.mkdir(remote_path)\n\n self._match_modes(remote_path, l_st)\n\n # now, we should traverse f too (recursion magic!)\n self.check_for_upload_create(path_join(relative_path, f))\n\n # Second case: f is a symbolic link\n elif S_ISLNK(l_st.st_mode):\n # read the local link\n local_link = os.readlink(local_path)\n absolute_local_link = os.path.realpath(local_link)\n\n # is it absolute?\n is_absolute = local_link.startswith(\"/\")\n # and does it point inside the shared directory?\n # add trailing slash (security)\n trailing_local_path = path_join(self.local_path, '')\n relpath = os.path.commonprefix(\n [absolute_local_link,\n trailing_local_path]\n ) == trailing_local_path\n\n if relpath:\n relative_link = absolute_local_link[len(trailing_local_path):]\n else:\n relative_link = None\n\n \"\"\"\n # Refactor them all, be efficient!\n\n # Case A: absolute link pointing outside shared directory\n # (we can only update the remote part)\n if is_absolute and not relpath:\n self.create_update_symlink(local_link, remote_path)\n\n # Case B: absolute link pointing inside shared directory\n # (we can leave it as it is or fix the prefix to match the one of the remote server)\n elif is_absolute and relpath:\n if self.fix_symlinks:\n self.create_update_symlink(\n join(\n self.remote_path,\n relative_link,\n ),\n remote_path\n )\n else:\n self.create_update_symlink(local_link, remote_path)\n\n # Case C: relative link pointing outside shared directory\n # (all we can do is try to make the link anyway)\n elif not is_absolute and not relpath:\n self.create_update_symlink(local_link, remote_path)\n\n # Case D: relative link pointing inside shared directory\n # (we preserve the relativity and link it!)\n elif not is_absolute and relpath:\n self.create_update_symlink(local_link, remote_path)\n \"\"\"\n\n if is_absolute and relpath:\n if self.fix_symlinks:\n self.create_update_symlink(\n path_join(\n self.remote_path,\n relative_link,\n ),\n remote_path\n )\n else:\n self.create_update_symlink(local_link, remote_path)\n\n # Third case: regular file\n elif S_ISREG(l_st.st_mode):\n try:\n r_st = self.sftp.lstat(remote_path)\n if self._file_need_upload(l_st, r_st):\n self.file_upload(local_path, remote_path, l_st)\n except IOError as e:\n if e.errno == errno.ENOENT:\n self.file_upload(local_path, remote_path, l_st)\n\n # Anything else.\n else:\n self.logger.warning(\"Skipping unsupported file %s.\", local_path)\n"
] | class SFTPClone(object):
"""The SFTPClone class."""
def __init__(self, local_path, remote_url,
identity_files=None, port=None, fix_symlinks=False,
ssh_config_path=None, ssh_agent=False,
exclude_file=None, known_hosts_path=None,
delete=True, allow_unknown=False,
create_remote_directory=False,
):
"""Init the needed parameters and the SFTPClient."""
self.local_path = os.path.realpath(os.path.expanduser(local_path))
self.logger = logger or configure_logging()
self.create_remote_directory = create_remote_directory
if not os.path.exists(self.local_path):
self.logger.error("Local path MUST exist. Exiting.")
sys.exit(1)
if exclude_file:
with open(exclude_file) as f:
# As in rsync's exclude from, ignore lines with leading ; and #
# and treat each path as relative (thus by removing the leading
# /)
exclude_list = [
line.rstrip().lstrip("/")
for line in f
if not line.startswith((";", "#"))
]
# actually, is a set of excluded files
self.exclude_list = {
g
for pattern in exclude_list
for g in glob.glob(path_join(self.local_path, pattern))
}
else:
self.exclude_list = set()
username, password, hostname, self.remote_path = parse_username_password_hostname(remote_url)
identity_files = identity_files or []
proxy_command = None
if ssh_config_path:
try:
with open(os.path.expanduser(ssh_config_path)) as c_file:
ssh_config = paramiko.SSHConfig()
ssh_config.parse(c_file)
c = ssh_config.lookup(hostname)
hostname = c.get("hostname", hostname)
username = c.get("user", username)
port = int(c.get("port", port))
identity_files = c.get("identityfile", identity_files)
proxy_command = c.get("proxycommand")
except Exception as e:
# it could be safe to continue anyway,
# because parameters could have been manually specified
self.logger.error(
"Error while parsing ssh_config file: %s. Trying to continue anyway...", e
)
# Set default values
if not username:
username = getuser() # defaults to current user
port = port or 22
allow_unknown = allow_unknown or False
self.chown = False
self.fix_symlinks = fix_symlinks or False
self.delete = delete if delete is not None else True
if ssh_agent:
agent, agent_keys = get_ssh_agent_keys(self.logger)
else:
agent, agent_keys = None, None
if not identity_files and not password and not agent_keys:
self.logger.error(
"You need to specify either a password, an identity or to enable the ssh-agent support."
)
sys.exit(1)
# only root can change file owner
if username == 'root':
self.chown = True
sock = (hostname, port)
if proxy_command is not None:
sock = paramiko.proxy.ProxyCommand(proxy_command)
try:
transport = paramiko.Transport(sock)
except socket.gaierror:
self.logger.error(
"Hostname not known. Are you sure you inserted it correctly?")
sys.exit(1)
try:
ssh_host = hostname if port == 22 else "[{}]:{}".format(hostname, port)
known_hosts = None
"""
Before starting the transport session, we have to configure it.
Specifically, we need to configure the preferred PK algorithm.
If the system already knows a public key of a specific kind for
a remote host, we have to peek its type as the preferred one.
"""
if known_hosts_path:
known_hosts = paramiko.HostKeys()
known_hosts_path = os.path.realpath(
os.path.expanduser(known_hosts_path))
try:
known_hosts.load(known_hosts_path)
except IOError:
self.logger.error(
"Error while loading known hosts file at {}. Exiting...".format(
known_hosts_path)
)
sys.exit(1)
known_keys = known_hosts.lookup(ssh_host)
if known_keys is not None:
# one or more keys are already known
# set their type as preferred
transport.get_security_options().key_types = \
tuple(known_keys.keys())
transport.start_client()
if not known_hosts:
self.logger.warning("Security warning: skipping known hosts check...")
else:
pubk = transport.get_remote_server_key()
if ssh_host in known_hosts.keys():
if not known_hosts.check(ssh_host, pubk):
self.logger.error(
"Security warning: "
"remote key fingerprint {} for hostname "
"{} didn't match the one in known_hosts {}. "
"Exiting...".format(
pubk.get_base64(),
ssh_host,
known_hosts.lookup(hostname),
)
)
sys.exit(1)
elif not allow_unknown:
prompt = ("The authenticity of host '{}' can't be established.\n"
"{} key is {}.\n"
"Are you sure you want to continue connecting? [y/n] ").format(
ssh_host, pubk.get_name(), pubk.get_base64())
try:
# Renamed to `input` in Python 3.x
response = raw_input(prompt)
except NameError:
response = input(prompt)
# Note: we do not modify the user's known_hosts file
if not (response == "y" or response == "yes"):
self.logger.error(
"Host authentication failed."
)
sys.exit(1)
def perform_key_auth(pkey):
try:
transport.auth_publickey(
username=username,
key=pkey
)
return True
except paramiko.SSHException:
self.logger.warning(
"Authentication with identity {}... failed".format(pkey.get_base64()[:10])
)
return False
if password: # Password auth, if specified.
transport.auth_password(
username=username,
password=password
)
elif agent_keys: # SSH agent keys have higher priority
for pkey in agent_keys:
if perform_key_auth(pkey):
break # Authentication worked.
else: # None of the keys worked.
raise paramiko.SSHException
elif identity_files: # Then follow identity file (specified from CL or ssh_config)
# Try identity files one by one, until one works
for key_path in identity_files:
key_path = os.path.expanduser(key_path)
try:
key = paramiko.RSAKey.from_private_key_file(key_path)
except paramiko.PasswordRequiredException:
pk_password = getpass(
"It seems that your identity from '{}' is encrypted. "
"Please enter your password: ".format(key_path)
)
try:
key = paramiko.RSAKey.from_private_key_file(key_path, pk_password)
except paramiko.SSHException:
self.logger.error(
"Incorrect passphrase. Cannot decode private key from '{}'.".format(key_path)
)
continue
except IOError or paramiko.SSHException:
self.logger.error(
"Something went wrong while opening '{}'. Skipping it.".format(key_path)
)
continue
if perform_key_auth(key):
break # Authentication worked.
else: # None of the keys worked.
raise paramiko.SSHException
else: # No authentication method specified, we shouldn't arrive here.
assert False
except paramiko.SSHException:
self.logger.error(
"None of the provided authentication methods worked. Exiting."
)
transport.close()
sys.exit(1)
finally:
if agent:
agent.close()
self.sftp = paramiko.SFTPClient.from_transport(transport)
if self.remote_path.startswith("~"):
# nasty hack to let getcwd work without changing dir!
self.sftp.chdir('.')
self.remote_path = self.remote_path.replace(
"~", self.sftp.getcwd()) # home is the initial sftp dir
@staticmethod
def _file_need_upload(l_st, r_st):
return True if \
l_st.st_size != r_st.st_size or int(l_st.st_mtime) != r_st.st_mtime \
else False
@staticmethod
def _must_be_deleted(local_path, r_st):
"""Return True if the remote correspondent of local_path has to be deleted.
i.e. if it doesn't exists locally or if it has a different type from the remote one."""
# if the file doesn't exists
if not os.path.lexists(local_path):
return True
# or if the file type is different
l_st = os.lstat(local_path)
if S_IFMT(r_st.st_mode) != S_IFMT(l_st.st_mode):
return True
return False
def _match_modes(self, remote_path, l_st):
"""Match mod, utime and uid/gid with locals one."""
self.sftp.chmod(remote_path, S_IMODE(l_st.st_mode))
self.sftp.utime(remote_path, (l_st.st_atime, l_st.st_mtime))
if self.chown:
self.sftp.chown(remote_path, l_st.st_uid, l_st.st_gid)
def file_upload(self, local_path, remote_path, l_st):
"""Upload local_path to remote_path and set permission and mtime."""
self.sftp.put(local_path, remote_path)
self._match_modes(remote_path, l_st)
def remote_delete(self, remote_path, r_st):
"""Remove the remote directory node."""
# If it's a directory, then delete content and directory
if S_ISDIR(r_st.st_mode):
for item in self.sftp.listdir_attr(remote_path):
full_path = path_join(remote_path, item.filename)
self.remote_delete(full_path, item)
self.sftp.rmdir(remote_path)
# Or simply delete files
else:
try:
self.sftp.remove(remote_path)
except FileNotFoundError as e:
self.logger.error(
"error while removing {}. trace: {}".format(remote_path, e)
)
def check_for_deletion(self, relative_path=None):
"""Traverse the entire remote_path tree.
Find files/directories that need to be deleted,
not being present in the local folder.
"""
if not relative_path:
relative_path = str() # root of shared directory tree
remote_path = path_join(self.remote_path, relative_path)
local_path = path_join(self.local_path, relative_path)
for remote_st in self.sftp.listdir_attr(remote_path):
r_lstat = self.sftp.lstat(path_join(remote_path, remote_st.filename))
inner_remote_path = path_join(remote_path, remote_st.filename)
inner_local_path = path_join(local_path, remote_st.filename)
# check if remote_st is a symlink
# otherwise could delete file outside shared directory
if S_ISLNK(r_lstat.st_mode):
if self._must_be_deleted(inner_local_path, r_lstat):
self.remote_delete(inner_remote_path, r_lstat)
continue
if self._must_be_deleted(inner_local_path, remote_st):
self.remote_delete(inner_remote_path, remote_st)
elif S_ISDIR(remote_st.st_mode):
self.check_for_deletion(
path_join(relative_path, remote_st.filename)
)
def create_update_symlink(self, link_destination, remote_path):
"""Create a new link pointing to link_destination in remote_path position."""
try: # if there's anything, delete it
self.sftp.remove(remote_path)
except IOError: # that's fine, nothing exists there!
pass
finally: # and recreate the link
try:
self.sftp.symlink(link_destination, remote_path)
except OSError as e:
# Sometimes, if links are "too" different, symlink fails.
# Sadly, nothing we can do about it.
self.logger.error("error while symlinking {} to {}: {}".format(
remote_path, link_destination, e))
def node_check_for_upload_create(self, relative_path, f):
"""Check if the given directory tree node has to be uploaded/created on the remote folder."""
if not relative_path:
# we're at the root of the shared directory tree
relative_path = str()
# the (absolute) local address of f.
local_path = path_join(self.local_path, relative_path, f)
try:
l_st = os.lstat(local_path)
except OSError as e:
"""A little background here.
Sometimes, in big clusters configurations (mail, etc.),
files could disappear or be moved, suddenly.
There's nothing to do about it,
system should be stopped before doing backups.
Anyway, we log it, and skip it.
"""
self.logger.error("error while checking {}: {}".format(relative_path, e))
return
if local_path in self.exclude_list:
self.logger.info("Skipping excluded file %s.", local_path)
return
# the (absolute) remote address of f.
remote_path = path_join(self.remote_path, relative_path, f)
# First case: f is a directory
if S_ISDIR(l_st.st_mode):
# we check if the folder exists on the remote side
# it has to be a folder, otherwise it would have already been
# deleted
try:
self.sftp.stat(remote_path)
except IOError: # it doesn't exist yet on remote side
self.sftp.mkdir(remote_path)
self._match_modes(remote_path, l_st)
# now, we should traverse f too (recursion magic!)
self.check_for_upload_create(path_join(relative_path, f))
# Second case: f is a symbolic link
elif S_ISLNK(l_st.st_mode):
# read the local link
local_link = os.readlink(local_path)
absolute_local_link = os.path.realpath(local_link)
# is it absolute?
is_absolute = local_link.startswith("/")
# and does it point inside the shared directory?
# add trailing slash (security)
trailing_local_path = path_join(self.local_path, '')
relpath = os.path.commonprefix(
[absolute_local_link,
trailing_local_path]
) == trailing_local_path
if relpath:
relative_link = absolute_local_link[len(trailing_local_path):]
else:
relative_link = None
"""
# Refactor them all, be efficient!
# Case A: absolute link pointing outside shared directory
# (we can only update the remote part)
if is_absolute and not relpath:
self.create_update_symlink(local_link, remote_path)
# Case B: absolute link pointing inside shared directory
# (we can leave it as it is or fix the prefix to match the one of the remote server)
elif is_absolute and relpath:
if self.fix_symlinks:
self.create_update_symlink(
join(
self.remote_path,
relative_link,
),
remote_path
)
else:
self.create_update_symlink(local_link, remote_path)
# Case C: relative link pointing outside shared directory
# (all we can do is try to make the link anyway)
elif not is_absolute and not relpath:
self.create_update_symlink(local_link, remote_path)
# Case D: relative link pointing inside shared directory
# (we preserve the relativity and link it!)
elif not is_absolute and relpath:
self.create_update_symlink(local_link, remote_path)
"""
if is_absolute and relpath:
if self.fix_symlinks:
self.create_update_symlink(
path_join(
self.remote_path,
relative_link,
),
remote_path
)
else:
self.create_update_symlink(local_link, remote_path)
# Third case: regular file
elif S_ISREG(l_st.st_mode):
try:
r_st = self.sftp.lstat(remote_path)
if self._file_need_upload(l_st, r_st):
self.file_upload(local_path, remote_path, l_st)
except IOError as e:
if e.errno == errno.ENOENT:
self.file_upload(local_path, remote_path, l_st)
# Anything else.
else:
self.logger.warning("Skipping unsupported file %s.", local_path)
def run(self):
"""Run the sync.
Confront the local and the remote directories and perform the needed changes."""
# Check if remote path is present
try:
self.sftp.stat(self.remote_path)
except FileNotFoundError as e:
if self.create_remote_directory:
self.sftp.mkdir(self.remote_path)
self.logger.info(
"Created missing remote dir: '" + self.remote_path + "'")
else:
self.logger.error(
"Remote folder does not exists. "
"Add '-r' to create it if missing.")
sys.exit(1)
try:
if self.delete:
# First check for items to be removed
self.check_for_deletion()
# Now scan local for items to upload/create
self.check_for_upload_create()
except FileNotFoundError:
# If this happens, probably the remote folder doesn't exist.
self.logger.error(
"Error while opening remote folder. Are you sure it does exist?")
sys.exit(1)
|
unbit/sftpclone | sftpclone/sftpclone.py | SFTPClone.run | python | def run(self):
# Check if remote path is present
try:
self.sftp.stat(self.remote_path)
except FileNotFoundError as e:
if self.create_remote_directory:
self.sftp.mkdir(self.remote_path)
self.logger.info(
"Created missing remote dir: '" + self.remote_path + "'")
else:
self.logger.error(
"Remote folder does not exists. "
"Add '-r' to create it if missing.")
sys.exit(1)
try:
if self.delete:
# First check for items to be removed
self.check_for_deletion()
# Now scan local for items to upload/create
self.check_for_upload_create()
except FileNotFoundError:
# If this happens, probably the remote folder doesn't exist.
self.logger.error(
"Error while opening remote folder. Are you sure it does exist?")
sys.exit(1) | Run the sync.
Confront the local and the remote directories and perform the needed changes. | train | https://github.com/unbit/sftpclone/blob/1cc89478e680fc4e0d12b1a15b5bafd0390d05da/sftpclone/sftpclone.py#L595-L625 | [
"def check_for_deletion(self, relative_path=None):\n \"\"\"Traverse the entire remote_path tree.\n\n Find files/directories that need to be deleted,\n not being present in the local folder.\n \"\"\"\n if not relative_path:\n relative_path = str() # root of shared directory tree\n\n remote_path = path_join(self.remote_path, relative_path)\n local_path = path_join(self.local_path, relative_path)\n\n for remote_st in self.sftp.listdir_attr(remote_path):\n r_lstat = self.sftp.lstat(path_join(remote_path, remote_st.filename))\n\n inner_remote_path = path_join(remote_path, remote_st.filename)\n inner_local_path = path_join(local_path, remote_st.filename)\n\n # check if remote_st is a symlink\n # otherwise could delete file outside shared directory\n if S_ISLNK(r_lstat.st_mode):\n if self._must_be_deleted(inner_local_path, r_lstat):\n self.remote_delete(inner_remote_path, r_lstat)\n continue\n\n if self._must_be_deleted(inner_local_path, remote_st):\n self.remote_delete(inner_remote_path, remote_st)\n elif S_ISDIR(remote_st.st_mode):\n self.check_for_deletion(\n path_join(relative_path, remote_st.filename)\n )\n",
"def check_for_upload_create(self, relative_path=None):\n \"\"\"Traverse the relative_path tree and check for files that need to be uploaded/created.\n\n Relativity here refers to the shared directory tree.\"\"\"\n for f in os.listdir(\n path_join(\n self.local_path, relative_path) if relative_path else self.local_path\n ):\n self.node_check_for_upload_create(relative_path, f)\n"
] | class SFTPClone(object):
"""The SFTPClone class."""
def __init__(self, local_path, remote_url,
identity_files=None, port=None, fix_symlinks=False,
ssh_config_path=None, ssh_agent=False,
exclude_file=None, known_hosts_path=None,
delete=True, allow_unknown=False,
create_remote_directory=False,
):
"""Init the needed parameters and the SFTPClient."""
self.local_path = os.path.realpath(os.path.expanduser(local_path))
self.logger = logger or configure_logging()
self.create_remote_directory = create_remote_directory
if not os.path.exists(self.local_path):
self.logger.error("Local path MUST exist. Exiting.")
sys.exit(1)
if exclude_file:
with open(exclude_file) as f:
# As in rsync's exclude from, ignore lines with leading ; and #
# and treat each path as relative (thus by removing the leading
# /)
exclude_list = [
line.rstrip().lstrip("/")
for line in f
if not line.startswith((";", "#"))
]
# actually, is a set of excluded files
self.exclude_list = {
g
for pattern in exclude_list
for g in glob.glob(path_join(self.local_path, pattern))
}
else:
self.exclude_list = set()
username, password, hostname, self.remote_path = parse_username_password_hostname(remote_url)
identity_files = identity_files or []
proxy_command = None
if ssh_config_path:
try:
with open(os.path.expanduser(ssh_config_path)) as c_file:
ssh_config = paramiko.SSHConfig()
ssh_config.parse(c_file)
c = ssh_config.lookup(hostname)
hostname = c.get("hostname", hostname)
username = c.get("user", username)
port = int(c.get("port", port))
identity_files = c.get("identityfile", identity_files)
proxy_command = c.get("proxycommand")
except Exception as e:
# it could be safe to continue anyway,
# because parameters could have been manually specified
self.logger.error(
"Error while parsing ssh_config file: %s. Trying to continue anyway...", e
)
# Set default values
if not username:
username = getuser() # defaults to current user
port = port or 22
allow_unknown = allow_unknown or False
self.chown = False
self.fix_symlinks = fix_symlinks or False
self.delete = delete if delete is not None else True
if ssh_agent:
agent, agent_keys = get_ssh_agent_keys(self.logger)
else:
agent, agent_keys = None, None
if not identity_files and not password and not agent_keys:
self.logger.error(
"You need to specify either a password, an identity or to enable the ssh-agent support."
)
sys.exit(1)
# only root can change file owner
if username == 'root':
self.chown = True
sock = (hostname, port)
if proxy_command is not None:
sock = paramiko.proxy.ProxyCommand(proxy_command)
try:
transport = paramiko.Transport(sock)
except socket.gaierror:
self.logger.error(
"Hostname not known. Are you sure you inserted it correctly?")
sys.exit(1)
try:
ssh_host = hostname if port == 22 else "[{}]:{}".format(hostname, port)
known_hosts = None
"""
Before starting the transport session, we have to configure it.
Specifically, we need to configure the preferred PK algorithm.
If the system already knows a public key of a specific kind for
a remote host, we have to peek its type as the preferred one.
"""
if known_hosts_path:
known_hosts = paramiko.HostKeys()
known_hosts_path = os.path.realpath(
os.path.expanduser(known_hosts_path))
try:
known_hosts.load(known_hosts_path)
except IOError:
self.logger.error(
"Error while loading known hosts file at {}. Exiting...".format(
known_hosts_path)
)
sys.exit(1)
known_keys = known_hosts.lookup(ssh_host)
if known_keys is not None:
# one or more keys are already known
# set their type as preferred
transport.get_security_options().key_types = \
tuple(known_keys.keys())
transport.start_client()
if not known_hosts:
self.logger.warning("Security warning: skipping known hosts check...")
else:
pubk = transport.get_remote_server_key()
if ssh_host in known_hosts.keys():
if not known_hosts.check(ssh_host, pubk):
self.logger.error(
"Security warning: "
"remote key fingerprint {} for hostname "
"{} didn't match the one in known_hosts {}. "
"Exiting...".format(
pubk.get_base64(),
ssh_host,
known_hosts.lookup(hostname),
)
)
sys.exit(1)
elif not allow_unknown:
prompt = ("The authenticity of host '{}' can't be established.\n"
"{} key is {}.\n"
"Are you sure you want to continue connecting? [y/n] ").format(
ssh_host, pubk.get_name(), pubk.get_base64())
try:
# Renamed to `input` in Python 3.x
response = raw_input(prompt)
except NameError:
response = input(prompt)
# Note: we do not modify the user's known_hosts file
if not (response == "y" or response == "yes"):
self.logger.error(
"Host authentication failed."
)
sys.exit(1)
def perform_key_auth(pkey):
try:
transport.auth_publickey(
username=username,
key=pkey
)
return True
except paramiko.SSHException:
self.logger.warning(
"Authentication with identity {}... failed".format(pkey.get_base64()[:10])
)
return False
if password: # Password auth, if specified.
transport.auth_password(
username=username,
password=password
)
elif agent_keys: # SSH agent keys have higher priority
for pkey in agent_keys:
if perform_key_auth(pkey):
break # Authentication worked.
else: # None of the keys worked.
raise paramiko.SSHException
elif identity_files: # Then follow identity file (specified from CL or ssh_config)
# Try identity files one by one, until one works
for key_path in identity_files:
key_path = os.path.expanduser(key_path)
try:
key = paramiko.RSAKey.from_private_key_file(key_path)
except paramiko.PasswordRequiredException:
pk_password = getpass(
"It seems that your identity from '{}' is encrypted. "
"Please enter your password: ".format(key_path)
)
try:
key = paramiko.RSAKey.from_private_key_file(key_path, pk_password)
except paramiko.SSHException:
self.logger.error(
"Incorrect passphrase. Cannot decode private key from '{}'.".format(key_path)
)
continue
except IOError or paramiko.SSHException:
self.logger.error(
"Something went wrong while opening '{}'. Skipping it.".format(key_path)
)
continue
if perform_key_auth(key):
break # Authentication worked.
else: # None of the keys worked.
raise paramiko.SSHException
else: # No authentication method specified, we shouldn't arrive here.
assert False
except paramiko.SSHException:
self.logger.error(
"None of the provided authentication methods worked. Exiting."
)
transport.close()
sys.exit(1)
finally:
if agent:
agent.close()
self.sftp = paramiko.SFTPClient.from_transport(transport)
if self.remote_path.startswith("~"):
# nasty hack to let getcwd work without changing dir!
self.sftp.chdir('.')
self.remote_path = self.remote_path.replace(
"~", self.sftp.getcwd()) # home is the initial sftp dir
@staticmethod
def _file_need_upload(l_st, r_st):
return True if \
l_st.st_size != r_st.st_size or int(l_st.st_mtime) != r_st.st_mtime \
else False
@staticmethod
def _must_be_deleted(local_path, r_st):
"""Return True if the remote correspondent of local_path has to be deleted.
i.e. if it doesn't exists locally or if it has a different type from the remote one."""
# if the file doesn't exists
if not os.path.lexists(local_path):
return True
# or if the file type is different
l_st = os.lstat(local_path)
if S_IFMT(r_st.st_mode) != S_IFMT(l_st.st_mode):
return True
return False
def _match_modes(self, remote_path, l_st):
"""Match mod, utime and uid/gid with locals one."""
self.sftp.chmod(remote_path, S_IMODE(l_st.st_mode))
self.sftp.utime(remote_path, (l_st.st_atime, l_st.st_mtime))
if self.chown:
self.sftp.chown(remote_path, l_st.st_uid, l_st.st_gid)
def file_upload(self, local_path, remote_path, l_st):
"""Upload local_path to remote_path and set permission and mtime."""
self.sftp.put(local_path, remote_path)
self._match_modes(remote_path, l_st)
def remote_delete(self, remote_path, r_st):
"""Remove the remote directory node."""
# If it's a directory, then delete content and directory
if S_ISDIR(r_st.st_mode):
for item in self.sftp.listdir_attr(remote_path):
full_path = path_join(remote_path, item.filename)
self.remote_delete(full_path, item)
self.sftp.rmdir(remote_path)
# Or simply delete files
else:
try:
self.sftp.remove(remote_path)
except FileNotFoundError as e:
self.logger.error(
"error while removing {}. trace: {}".format(remote_path, e)
)
def check_for_deletion(self, relative_path=None):
"""Traverse the entire remote_path tree.
Find files/directories that need to be deleted,
not being present in the local folder.
"""
if not relative_path:
relative_path = str() # root of shared directory tree
remote_path = path_join(self.remote_path, relative_path)
local_path = path_join(self.local_path, relative_path)
for remote_st in self.sftp.listdir_attr(remote_path):
r_lstat = self.sftp.lstat(path_join(remote_path, remote_st.filename))
inner_remote_path = path_join(remote_path, remote_st.filename)
inner_local_path = path_join(local_path, remote_st.filename)
# check if remote_st is a symlink
# otherwise could delete file outside shared directory
if S_ISLNK(r_lstat.st_mode):
if self._must_be_deleted(inner_local_path, r_lstat):
self.remote_delete(inner_remote_path, r_lstat)
continue
if self._must_be_deleted(inner_local_path, remote_st):
self.remote_delete(inner_remote_path, remote_st)
elif S_ISDIR(remote_st.st_mode):
self.check_for_deletion(
path_join(relative_path, remote_st.filename)
)
def create_update_symlink(self, link_destination, remote_path):
"""Create a new link pointing to link_destination in remote_path position."""
try: # if there's anything, delete it
self.sftp.remove(remote_path)
except IOError: # that's fine, nothing exists there!
pass
finally: # and recreate the link
try:
self.sftp.symlink(link_destination, remote_path)
except OSError as e:
# Sometimes, if links are "too" different, symlink fails.
# Sadly, nothing we can do about it.
self.logger.error("error while symlinking {} to {}: {}".format(
remote_path, link_destination, e))
def node_check_for_upload_create(self, relative_path, f):
"""Check if the given directory tree node has to be uploaded/created on the remote folder."""
if not relative_path:
# we're at the root of the shared directory tree
relative_path = str()
# the (absolute) local address of f.
local_path = path_join(self.local_path, relative_path, f)
try:
l_st = os.lstat(local_path)
except OSError as e:
"""A little background here.
Sometimes, in big clusters configurations (mail, etc.),
files could disappear or be moved, suddenly.
There's nothing to do about it,
system should be stopped before doing backups.
Anyway, we log it, and skip it.
"""
self.logger.error("error while checking {}: {}".format(relative_path, e))
return
if local_path in self.exclude_list:
self.logger.info("Skipping excluded file %s.", local_path)
return
# the (absolute) remote address of f.
remote_path = path_join(self.remote_path, relative_path, f)
# First case: f is a directory
if S_ISDIR(l_st.st_mode):
# we check if the folder exists on the remote side
# it has to be a folder, otherwise it would have already been
# deleted
try:
self.sftp.stat(remote_path)
except IOError: # it doesn't exist yet on remote side
self.sftp.mkdir(remote_path)
self._match_modes(remote_path, l_st)
# now, we should traverse f too (recursion magic!)
self.check_for_upload_create(path_join(relative_path, f))
# Second case: f is a symbolic link
elif S_ISLNK(l_st.st_mode):
# read the local link
local_link = os.readlink(local_path)
absolute_local_link = os.path.realpath(local_link)
# is it absolute?
is_absolute = local_link.startswith("/")
# and does it point inside the shared directory?
# add trailing slash (security)
trailing_local_path = path_join(self.local_path, '')
relpath = os.path.commonprefix(
[absolute_local_link,
trailing_local_path]
) == trailing_local_path
if relpath:
relative_link = absolute_local_link[len(trailing_local_path):]
else:
relative_link = None
"""
# Refactor them all, be efficient!
# Case A: absolute link pointing outside shared directory
# (we can only update the remote part)
if is_absolute and not relpath:
self.create_update_symlink(local_link, remote_path)
# Case B: absolute link pointing inside shared directory
# (we can leave it as it is or fix the prefix to match the one of the remote server)
elif is_absolute and relpath:
if self.fix_symlinks:
self.create_update_symlink(
join(
self.remote_path,
relative_link,
),
remote_path
)
else:
self.create_update_symlink(local_link, remote_path)
# Case C: relative link pointing outside shared directory
# (all we can do is try to make the link anyway)
elif not is_absolute and not relpath:
self.create_update_symlink(local_link, remote_path)
# Case D: relative link pointing inside shared directory
# (we preserve the relativity and link it!)
elif not is_absolute and relpath:
self.create_update_symlink(local_link, remote_path)
"""
if is_absolute and relpath:
if self.fix_symlinks:
self.create_update_symlink(
path_join(
self.remote_path,
relative_link,
),
remote_path
)
else:
self.create_update_symlink(local_link, remote_path)
# Third case: regular file
elif S_ISREG(l_st.st_mode):
try:
r_st = self.sftp.lstat(remote_path)
if self._file_need_upload(l_st, r_st):
self.file_upload(local_path, remote_path, l_st)
except IOError as e:
if e.errno == errno.ENOENT:
self.file_upload(local_path, remote_path, l_st)
# Anything else.
else:
self.logger.warning("Skipping unsupported file %s.", local_path)
def check_for_upload_create(self, relative_path=None):
"""Traverse the relative_path tree and check for files that need to be uploaded/created.
Relativity here refers to the shared directory tree."""
for f in os.listdir(
path_join(
self.local_path, relative_path) if relative_path else self.local_path
):
self.node_check_for_upload_create(relative_path, f)
|
unbit/sftpclone | sftpclone/t/utils.py | list_files | python | def list_files(start_path):
s = u'\n'
for root, dirs, files in os.walk(start_path):
level = root.replace(start_path, '').count(os.sep)
indent = ' ' * 4 * level
s += u'{}{}/\n'.format(indent, os.path.basename(root))
sub_indent = ' ' * 4 * (level + 1)
for f in files:
s += u'{}{}\n'.format(sub_indent, f)
return s | tree unix command replacement. | train | https://github.com/unbit/sftpclone/blob/1cc89478e680fc4e0d12b1a15b5bafd0390d05da/sftpclone/t/utils.py#L26-L36 | null | #!/usr/bin/env python
# coding=utf-8
"""Various test utils."""
import logging
import os
import sys
from contextlib import contextmanager
from functools import reduce
try: # Python < 3
from StringIO import StringIO
except ImportError:
from io import StringIO
root_path = os.path.dirname(os.path.realpath(__file__))
def t_path(filename="."):
"""Get the path of the test file inside test directory."""
return os.path.join(root_path, filename)
def file_tree(start_path):
"""
Create a nested dictionary that represents the folder structure of `start_path`.
Liberally adapted from
http://code.activestate.com/recipes/577879-create-a-nested-dictionary-from-oswalk/
"""
nested_dirs = {}
root_dir = start_path.rstrip(os.sep)
start = root_dir.rfind(os.sep) + 1
for path, dirs, files in os.walk(root_dir):
folders = path[start:].split(os.sep)
subdir = dict.fromkeys(files)
parent = reduce(dict.get, folders[:-1], nested_dirs)
parent[folders[-1]] = subdir
return nested_dirs
@contextmanager
def capture_sys_output():
"""Capture standard output and error."""
capture_out, capture_err = StringIO(), StringIO()
current_out, current_err = sys.stdout, sys.stderr
try:
sys.stdout, sys.stderr = capture_out, capture_err
yield capture_out, capture_err
finally:
sys.stdout, sys.stderr = current_out, current_err
@contextmanager
def suppress_logging(log_level=logging.CRITICAL):
"""Suppress logging."""
logging.disable(log_level)
yield
logging.disable(logging.NOTSET)
@contextmanager
def override_env_variables():
"""Override user environmental variables with custom one."""
env_vars = ("LOGNAME", "USER", "LNAME", "USERNAME")
old = [os.environ[v] if v in os.environ else None for v in env_vars]
for v in env_vars:
os.environ[v] = "test"
yield
for i, v in enumerate(env_vars):
if old[i]:
os.environ[v] = old[i]
@contextmanager
def override_ssh_auth_env():
"""Override the `$SSH_AUTH_SOCK `env variable to mock the absence of an SSH agent."""
ssh_auth_sock = "SSH_AUTH_SOCK"
old_ssh_auth_sock = os.environ.get(ssh_auth_sock)
del os.environ[ssh_auth_sock]
yield
if old_ssh_auth_sock:
os.environ[ssh_auth_sock] = old_ssh_auth_sock
|
unbit/sftpclone | sftpclone/t/utils.py | file_tree | python | def file_tree(start_path):
nested_dirs = {}
root_dir = start_path.rstrip(os.sep)
start = root_dir.rfind(os.sep) + 1
for path, dirs, files in os.walk(root_dir):
folders = path[start:].split(os.sep)
subdir = dict.fromkeys(files)
parent = reduce(dict.get, folders[:-1], nested_dirs)
parent[folders[-1]] = subdir
return nested_dirs | Create a nested dictionary that represents the folder structure of `start_path`.
Liberally adapted from
http://code.activestate.com/recipes/577879-create-a-nested-dictionary-from-oswalk/ | train | https://github.com/unbit/sftpclone/blob/1cc89478e680fc4e0d12b1a15b5bafd0390d05da/sftpclone/t/utils.py#L39-L54 | null | #!/usr/bin/env python
# coding=utf-8
"""Various test utils."""
import logging
import os
import sys
from contextlib import contextmanager
from functools import reduce
try: # Python < 3
from StringIO import StringIO
except ImportError:
from io import StringIO
root_path = os.path.dirname(os.path.realpath(__file__))
def t_path(filename="."):
"""Get the path of the test file inside test directory."""
return os.path.join(root_path, filename)
def list_files(start_path):
"""tree unix command replacement."""
s = u'\n'
for root, dirs, files in os.walk(start_path):
level = root.replace(start_path, '').count(os.sep)
indent = ' ' * 4 * level
s += u'{}{}/\n'.format(indent, os.path.basename(root))
sub_indent = ' ' * 4 * (level + 1)
for f in files:
s += u'{}{}\n'.format(sub_indent, f)
return s
@contextmanager
def capture_sys_output():
"""Capture standard output and error."""
capture_out, capture_err = StringIO(), StringIO()
current_out, current_err = sys.stdout, sys.stderr
try:
sys.stdout, sys.stderr = capture_out, capture_err
yield capture_out, capture_err
finally:
sys.stdout, sys.stderr = current_out, current_err
@contextmanager
def suppress_logging(log_level=logging.CRITICAL):
"""Suppress logging."""
logging.disable(log_level)
yield
logging.disable(logging.NOTSET)
@contextmanager
def override_env_variables():
"""Override user environmental variables with custom one."""
env_vars = ("LOGNAME", "USER", "LNAME", "USERNAME")
old = [os.environ[v] if v in os.environ else None for v in env_vars]
for v in env_vars:
os.environ[v] = "test"
yield
for i, v in enumerate(env_vars):
if old[i]:
os.environ[v] = old[i]
@contextmanager
def override_ssh_auth_env():
"""Override the `$SSH_AUTH_SOCK `env variable to mock the absence of an SSH agent."""
ssh_auth_sock = "SSH_AUTH_SOCK"
old_ssh_auth_sock = os.environ.get(ssh_auth_sock)
del os.environ[ssh_auth_sock]
yield
if old_ssh_auth_sock:
os.environ[ssh_auth_sock] = old_ssh_auth_sock
|
unbit/sftpclone | sftpclone/t/utils.py | capture_sys_output | python | def capture_sys_output():
capture_out, capture_err = StringIO(), StringIO()
current_out, current_err = sys.stdout, sys.stderr
try:
sys.stdout, sys.stderr = capture_out, capture_err
yield capture_out, capture_err
finally:
sys.stdout, sys.stderr = current_out, current_err | Capture standard output and error. | train | https://github.com/unbit/sftpclone/blob/1cc89478e680fc4e0d12b1a15b5bafd0390d05da/sftpclone/t/utils.py#L58-L66 | null | #!/usr/bin/env python
# coding=utf-8
"""Various test utils."""
import logging
import os
import sys
from contextlib import contextmanager
from functools import reduce
try: # Python < 3
from StringIO import StringIO
except ImportError:
from io import StringIO
root_path = os.path.dirname(os.path.realpath(__file__))
def t_path(filename="."):
"""Get the path of the test file inside test directory."""
return os.path.join(root_path, filename)
def list_files(start_path):
"""tree unix command replacement."""
s = u'\n'
for root, dirs, files in os.walk(start_path):
level = root.replace(start_path, '').count(os.sep)
indent = ' ' * 4 * level
s += u'{}{}/\n'.format(indent, os.path.basename(root))
sub_indent = ' ' * 4 * (level + 1)
for f in files:
s += u'{}{}\n'.format(sub_indent, f)
return s
def file_tree(start_path):
"""
Create a nested dictionary that represents the folder structure of `start_path`.
Liberally adapted from
http://code.activestate.com/recipes/577879-create-a-nested-dictionary-from-oswalk/
"""
nested_dirs = {}
root_dir = start_path.rstrip(os.sep)
start = root_dir.rfind(os.sep) + 1
for path, dirs, files in os.walk(root_dir):
folders = path[start:].split(os.sep)
subdir = dict.fromkeys(files)
parent = reduce(dict.get, folders[:-1], nested_dirs)
parent[folders[-1]] = subdir
return nested_dirs
@contextmanager
@contextmanager
def suppress_logging(log_level=logging.CRITICAL):
"""Suppress logging."""
logging.disable(log_level)
yield
logging.disable(logging.NOTSET)
@contextmanager
def override_env_variables():
"""Override user environmental variables with custom one."""
env_vars = ("LOGNAME", "USER", "LNAME", "USERNAME")
old = [os.environ[v] if v in os.environ else None for v in env_vars]
for v in env_vars:
os.environ[v] = "test"
yield
for i, v in enumerate(env_vars):
if old[i]:
os.environ[v] = old[i]
@contextmanager
def override_ssh_auth_env():
"""Override the `$SSH_AUTH_SOCK `env variable to mock the absence of an SSH agent."""
ssh_auth_sock = "SSH_AUTH_SOCK"
old_ssh_auth_sock = os.environ.get(ssh_auth_sock)
del os.environ[ssh_auth_sock]
yield
if old_ssh_auth_sock:
os.environ[ssh_auth_sock] = old_ssh_auth_sock
|
unbit/sftpclone | sftpclone/t/utils.py | suppress_logging | python | def suppress_logging(log_level=logging.CRITICAL):
logging.disable(log_level)
yield
logging.disable(logging.NOTSET) | Suppress logging. | train | https://github.com/unbit/sftpclone/blob/1cc89478e680fc4e0d12b1a15b5bafd0390d05da/sftpclone/t/utils.py#L70-L74 | null | #!/usr/bin/env python
# coding=utf-8
"""Various test utils."""
import logging
import os
import sys
from contextlib import contextmanager
from functools import reduce
try: # Python < 3
from StringIO import StringIO
except ImportError:
from io import StringIO
root_path = os.path.dirname(os.path.realpath(__file__))
def t_path(filename="."):
"""Get the path of the test file inside test directory."""
return os.path.join(root_path, filename)
def list_files(start_path):
"""tree unix command replacement."""
s = u'\n'
for root, dirs, files in os.walk(start_path):
level = root.replace(start_path, '').count(os.sep)
indent = ' ' * 4 * level
s += u'{}{}/\n'.format(indent, os.path.basename(root))
sub_indent = ' ' * 4 * (level + 1)
for f in files:
s += u'{}{}\n'.format(sub_indent, f)
return s
def file_tree(start_path):
"""
Create a nested dictionary that represents the folder structure of `start_path`.
Liberally adapted from
http://code.activestate.com/recipes/577879-create-a-nested-dictionary-from-oswalk/
"""
nested_dirs = {}
root_dir = start_path.rstrip(os.sep)
start = root_dir.rfind(os.sep) + 1
for path, dirs, files in os.walk(root_dir):
folders = path[start:].split(os.sep)
subdir = dict.fromkeys(files)
parent = reduce(dict.get, folders[:-1], nested_dirs)
parent[folders[-1]] = subdir
return nested_dirs
@contextmanager
def capture_sys_output():
"""Capture standard output and error."""
capture_out, capture_err = StringIO(), StringIO()
current_out, current_err = sys.stdout, sys.stderr
try:
sys.stdout, sys.stderr = capture_out, capture_err
yield capture_out, capture_err
finally:
sys.stdout, sys.stderr = current_out, current_err
@contextmanager
@contextmanager
def override_env_variables():
"""Override user environmental variables with custom one."""
env_vars = ("LOGNAME", "USER", "LNAME", "USERNAME")
old = [os.environ[v] if v in os.environ else None for v in env_vars]
for v in env_vars:
os.environ[v] = "test"
yield
for i, v in enumerate(env_vars):
if old[i]:
os.environ[v] = old[i]
@contextmanager
def override_ssh_auth_env():
"""Override the `$SSH_AUTH_SOCK `env variable to mock the absence of an SSH agent."""
ssh_auth_sock = "SSH_AUTH_SOCK"
old_ssh_auth_sock = os.environ.get(ssh_auth_sock)
del os.environ[ssh_auth_sock]
yield
if old_ssh_auth_sock:
os.environ[ssh_auth_sock] = old_ssh_auth_sock
|
unbit/sftpclone | sftpclone/t/utils.py | override_env_variables | python | def override_env_variables():
env_vars = ("LOGNAME", "USER", "LNAME", "USERNAME")
old = [os.environ[v] if v in os.environ else None for v in env_vars]
for v in env_vars:
os.environ[v] = "test"
yield
for i, v in enumerate(env_vars):
if old[i]:
os.environ[v] = old[i] | Override user environmental variables with custom one. | train | https://github.com/unbit/sftpclone/blob/1cc89478e680fc4e0d12b1a15b5bafd0390d05da/sftpclone/t/utils.py#L78-L89 | null | #!/usr/bin/env python
# coding=utf-8
"""Various test utils."""
import logging
import os
import sys
from contextlib import contextmanager
from functools import reduce
try: # Python < 3
from StringIO import StringIO
except ImportError:
from io import StringIO
root_path = os.path.dirname(os.path.realpath(__file__))
def t_path(filename="."):
"""Get the path of the test file inside test directory."""
return os.path.join(root_path, filename)
def list_files(start_path):
"""tree unix command replacement."""
s = u'\n'
for root, dirs, files in os.walk(start_path):
level = root.replace(start_path, '').count(os.sep)
indent = ' ' * 4 * level
s += u'{}{}/\n'.format(indent, os.path.basename(root))
sub_indent = ' ' * 4 * (level + 1)
for f in files:
s += u'{}{}\n'.format(sub_indent, f)
return s
def file_tree(start_path):
"""
Create a nested dictionary that represents the folder structure of `start_path`.
Liberally adapted from
http://code.activestate.com/recipes/577879-create-a-nested-dictionary-from-oswalk/
"""
nested_dirs = {}
root_dir = start_path.rstrip(os.sep)
start = root_dir.rfind(os.sep) + 1
for path, dirs, files in os.walk(root_dir):
folders = path[start:].split(os.sep)
subdir = dict.fromkeys(files)
parent = reduce(dict.get, folders[:-1], nested_dirs)
parent[folders[-1]] = subdir
return nested_dirs
@contextmanager
def capture_sys_output():
"""Capture standard output and error."""
capture_out, capture_err = StringIO(), StringIO()
current_out, current_err = sys.stdout, sys.stderr
try:
sys.stdout, sys.stderr = capture_out, capture_err
yield capture_out, capture_err
finally:
sys.stdout, sys.stderr = current_out, current_err
@contextmanager
def suppress_logging(log_level=logging.CRITICAL):
"""Suppress logging."""
logging.disable(log_level)
yield
logging.disable(logging.NOTSET)
@contextmanager
@contextmanager
def override_ssh_auth_env():
"""Override the `$SSH_AUTH_SOCK `env variable to mock the absence of an SSH agent."""
ssh_auth_sock = "SSH_AUTH_SOCK"
old_ssh_auth_sock = os.environ.get(ssh_auth_sock)
del os.environ[ssh_auth_sock]
yield
if old_ssh_auth_sock:
os.environ[ssh_auth_sock] = old_ssh_auth_sock
|
unbit/sftpclone | sftpclone/t/utils.py | override_ssh_auth_env | python | def override_ssh_auth_env():
ssh_auth_sock = "SSH_AUTH_SOCK"
old_ssh_auth_sock = os.environ.get(ssh_auth_sock)
del os.environ[ssh_auth_sock]
yield
if old_ssh_auth_sock:
os.environ[ssh_auth_sock] = old_ssh_auth_sock | Override the `$SSH_AUTH_SOCK `env variable to mock the absence of an SSH agent. | train | https://github.com/unbit/sftpclone/blob/1cc89478e680fc4e0d12b1a15b5bafd0390d05da/sftpclone/t/utils.py#L93-L103 | null | #!/usr/bin/env python
# coding=utf-8
"""Various test utils."""
import logging
import os
import sys
from contextlib import contextmanager
from functools import reduce
try: # Python < 3
from StringIO import StringIO
except ImportError:
from io import StringIO
root_path = os.path.dirname(os.path.realpath(__file__))
def t_path(filename="."):
"""Get the path of the test file inside test directory."""
return os.path.join(root_path, filename)
def list_files(start_path):
"""tree unix command replacement."""
s = u'\n'
for root, dirs, files in os.walk(start_path):
level = root.replace(start_path, '').count(os.sep)
indent = ' ' * 4 * level
s += u'{}{}/\n'.format(indent, os.path.basename(root))
sub_indent = ' ' * 4 * (level + 1)
for f in files:
s += u'{}{}\n'.format(sub_indent, f)
return s
def file_tree(start_path):
"""
Create a nested dictionary that represents the folder structure of `start_path`.
Liberally adapted from
http://code.activestate.com/recipes/577879-create-a-nested-dictionary-from-oswalk/
"""
nested_dirs = {}
root_dir = start_path.rstrip(os.sep)
start = root_dir.rfind(os.sep) + 1
for path, dirs, files in os.walk(root_dir):
folders = path[start:].split(os.sep)
subdir = dict.fromkeys(files)
parent = reduce(dict.get, folders[:-1], nested_dirs)
parent[folders[-1]] = subdir
return nested_dirs
@contextmanager
def capture_sys_output():
"""Capture standard output and error."""
capture_out, capture_err = StringIO(), StringIO()
current_out, current_err = sys.stdout, sys.stderr
try:
sys.stdout, sys.stderr = capture_out, capture_err
yield capture_out, capture_err
finally:
sys.stdout, sys.stderr = current_out, current_err
@contextmanager
def suppress_logging(log_level=logging.CRITICAL):
"""Suppress logging."""
logging.disable(log_level)
yield
logging.disable(logging.NOTSET)
@contextmanager
def override_env_variables():
"""Override user environmental variables with custom one."""
env_vars = ("LOGNAME", "USER", "LNAME", "USERNAME")
old = [os.environ[v] if v in os.environ else None for v in env_vars]
for v in env_vars:
os.environ[v] = "test"
yield
for i, v in enumerate(env_vars):
if old[i]:
os.environ[v] = old[i]
@contextmanager
|
vmirly/pyclust | pyclust/_kmedoids.py | _update_centers | python | def _update_centers(X, membs, n_clusters, distance):
centers = np.empty(shape=(n_clusters, X.shape[1]), dtype=float)
sse = np.empty(shape=n_clusters, dtype=float)
for clust_id in range(n_clusters):
memb_ids = np.where(membs == clust_id)[0]
X_clust = X[memb_ids,:]
dist = np.empty(shape=memb_ids.shape[0], dtype=float)
for i,x in enumerate(X_clust):
dist[i] = np.sum(scipy.spatial.distance.cdist(X_clust, np.array([x]), distance))
inx_min = np.argmin(dist)
centers[clust_id,:] = X_clust[inx_min,:]
sse[clust_id] = dist[inx_min]
return(centers, sse) | Update Cluster Centers:
calculate the mean of feature vectors for each cluster.
distance can be a string or callable. | train | https://github.com/vmirly/pyclust/blob/bdb12be4649e70c6c90da2605bc5f4b314e2d07e/pyclust/_kmedoids.py#L8-L27 | null | import numpy as np
import scipy.spatial
from . import _kmeans as kmeans
def _kmedoids_run(X, n_clusters, distance, max_iter, tol, rng):
""" Run a single trial of k-medoids clustering
on dataset X, and given number of clusters
"""
membs = np.empty(shape=X.shape[0], dtype=int)
centers = kmeans._kmeans_init(X, n_clusters, method='', rng=rng)
sse_last = 9999.9
n_iter = 0
for it in range(1,max_iter):
membs = kmeans._assign_clusters(X, centers)
centers,sse_arr = _update_centers(X, membs, n_clusters, distance)
sse_total = np.sum(sse_arr)
if np.abs(sse_total - sse_last) < tol:
n_iter = it
break
sse_last = sse_total
return(centers, membs, sse_total, sse_arr, n_iter)
def _kmedoids(X, n_clusters, distance, max_iter, n_trials, tol, rng):
""" Run multiple trials of k-medoids clustering,
and output he best centers, and cluster labels
"""
n_samples, n_features = X.shape[0], X.shape[1]
centers_best = np.empty(shape=(n_clusters,n_features), dtype=float)
labels_best = np.empty(shape=n_samples, dtype=int)
for i in range(n_trials):
centers, labels, sse_tot, sse_arr, n_iter = \
_kmedoids_run(X, n_clusters, distance, max_iter, tol, rng)
if i==0:
sse_tot_best = sse_tot
sse_arr_best = sse_arr
n_iter_best = n_iter
centers_best = centers.copy()
labels_best = labels.copy()
if sse_tot < sse_tot_best:
sse_tot_best = sse_tot
sse_arr_best = sse_arr
n_iter_best = n_iter
centers_best = centers.copy()
labels_best = labels.copy()
return(centers_best, labels_best, sse_arr_best, n_iter_best)
class KMedoids(object):
"""
KMedoids Clustering
K-medoids clustering take the cluster centroid as the medoid of the data points,
as opposed to the average of data points in a cluster. As a result, K-medoids
gaurantees that the cluster centroid is among the cluster members.
The medoid is defined as the point that minimizes the total within-cluster distances.
K-medoids is more robust to outliers (the reason for this is similar to why
median is more robust to mean).
K-medoids is computationally more expensive, since it involves computation of all
the pairwise distances in a cluster.
Parameters
-------
n_clusters: number of clusters (default = 2)
n_trials: number of trial random centroid initialization (default = 10)
max_iter: maximum number of iterations (default = 100)
tol: tolerance (default = 0.0001)
Attibutes
-------
labels_ : cluster labels for each data item
centers_ : cluster centers
sse_arr_ : array of SSE values for each cluster
n_iter_ : number of iterations for the best trial
Methods
-------
fit(X): fit the model
fit_predict(X): fit the model and return the cluster labels
"""
def __init__(self, n_clusters=2, distance='euclidean',
n_trials=10, max_iter=100, tol=0.001, random_state=None):
self.n_clusters = n_clusters
self.n_trials = n_trials
self.max_iter = max_iter
self.tol = tol
self.distance = distance
self.random_state = random_state
self.rng = np.random.RandomState(random_state)
def fit(self, X):
""" Apply KMeans Clustering
X: dataset with feature vectors
"""
self.centers_, self.labels_, self.sse_arr_, self.n_iter_ = \
_kmedoids(X, self.n_clusters, self.distance, self.max_iter, self.n_trials, self.tol, self.rng)
def fit_predict(self, X):
""" Apply KMeans Clustering,
and return cluster labels
"""
self.fit(X)
return(self.labels_)
|
vmirly/pyclust | pyclust/_kmedoids.py | _kmedoids_run | python | def _kmedoids_run(X, n_clusters, distance, max_iter, tol, rng):
membs = np.empty(shape=X.shape[0], dtype=int)
centers = kmeans._kmeans_init(X, n_clusters, method='', rng=rng)
sse_last = 9999.9
n_iter = 0
for it in range(1,max_iter):
membs = kmeans._assign_clusters(X, centers)
centers,sse_arr = _update_centers(X, membs, n_clusters, distance)
sse_total = np.sum(sse_arr)
if np.abs(sse_total - sse_last) < tol:
n_iter = it
break
sse_last = sse_total
return(centers, membs, sse_total, sse_arr, n_iter) | Run a single trial of k-medoids clustering
on dataset X, and given number of clusters | train | https://github.com/vmirly/pyclust/blob/bdb12be4649e70c6c90da2605bc5f4b314e2d07e/pyclust/_kmedoids.py#L31-L49 | [
"def _kmeans_init(X, n_clusters, method='balanced', rng=None):\n \"\"\" Initialize k=n_clusters centroids randomly\n \"\"\"\n n_samples = X.shape[0]\n if rng is None:\n cent_idx = np.random.choice(n_samples, replace=False, size=n_clusters)\n else:\n #print('Generate random centers using RNG')\n cent_idx = rng.choice(n_samples, replace=False, size=n_clusters)\n\n centers = X[cent_idx,:]\n mean_X = np.mean(X, axis=0)\n\n if method == 'balanced':\n centers[n_clusters-1] = n_clusters*mean_X - np.sum(centers[:(n_clusters-1)], axis=0)\n\n return (centers)\n",
"def _assign_clusters(X, centers):\n \"\"\" Assignment Step:\n assign each point to the closet cluster center\n \"\"\"\n dist2cents = scipy.spatial.distance.cdist(X, centers, metric='seuclidean')\n membs = np.argmin(dist2cents, axis=1)\n\n return(membs)\n",
"def _update_centers(X, membs, n_clusters, distance):\n \"\"\" Update Cluster Centers:\n calculate the mean of feature vectors for each cluster.\n\n distance can be a string or callable.\n \"\"\"\n centers = np.empty(shape=(n_clusters, X.shape[1]), dtype=float)\n sse = np.empty(shape=n_clusters, dtype=float)\n for clust_id in range(n_clusters):\n memb_ids = np.where(membs == clust_id)[0]\n X_clust = X[memb_ids,:]\n\n dist = np.empty(shape=memb_ids.shape[0], dtype=float)\n for i,x in enumerate(X_clust):\n dist[i] = np.sum(scipy.spatial.distance.cdist(X_clust, np.array([x]), distance))\n\n inx_min = np.argmin(dist)\n centers[clust_id,:] = X_clust[inx_min,:]\n sse[clust_id] = dist[inx_min]\n return(centers, sse)\n"
] | import numpy as np
import scipy.spatial
from . import _kmeans as kmeans
def _update_centers(X, membs, n_clusters, distance):
""" Update Cluster Centers:
calculate the mean of feature vectors for each cluster.
distance can be a string or callable.
"""
centers = np.empty(shape=(n_clusters, X.shape[1]), dtype=float)
sse = np.empty(shape=n_clusters, dtype=float)
for clust_id in range(n_clusters):
memb_ids = np.where(membs == clust_id)[0]
X_clust = X[memb_ids,:]
dist = np.empty(shape=memb_ids.shape[0], dtype=float)
for i,x in enumerate(X_clust):
dist[i] = np.sum(scipy.spatial.distance.cdist(X_clust, np.array([x]), distance))
inx_min = np.argmin(dist)
centers[clust_id,:] = X_clust[inx_min,:]
sse[clust_id] = dist[inx_min]
return(centers, sse)
def _kmedoids(X, n_clusters, distance, max_iter, n_trials, tol, rng):
""" Run multiple trials of k-medoids clustering,
and output he best centers, and cluster labels
"""
n_samples, n_features = X.shape[0], X.shape[1]
centers_best = np.empty(shape=(n_clusters,n_features), dtype=float)
labels_best = np.empty(shape=n_samples, dtype=int)
for i in range(n_trials):
centers, labels, sse_tot, sse_arr, n_iter = \
_kmedoids_run(X, n_clusters, distance, max_iter, tol, rng)
if i==0:
sse_tot_best = sse_tot
sse_arr_best = sse_arr
n_iter_best = n_iter
centers_best = centers.copy()
labels_best = labels.copy()
if sse_tot < sse_tot_best:
sse_tot_best = sse_tot
sse_arr_best = sse_arr
n_iter_best = n_iter
centers_best = centers.copy()
labels_best = labels.copy()
return(centers_best, labels_best, sse_arr_best, n_iter_best)
class KMedoids(object):
"""
KMedoids Clustering
K-medoids clustering take the cluster centroid as the medoid of the data points,
as opposed to the average of data points in a cluster. As a result, K-medoids
gaurantees that the cluster centroid is among the cluster members.
The medoid is defined as the point that minimizes the total within-cluster distances.
K-medoids is more robust to outliers (the reason for this is similar to why
median is more robust to mean).
K-medoids is computationally more expensive, since it involves computation of all
the pairwise distances in a cluster.
Parameters
-------
n_clusters: number of clusters (default = 2)
n_trials: number of trial random centroid initialization (default = 10)
max_iter: maximum number of iterations (default = 100)
tol: tolerance (default = 0.0001)
Attibutes
-------
labels_ : cluster labels for each data item
centers_ : cluster centers
sse_arr_ : array of SSE values for each cluster
n_iter_ : number of iterations for the best trial
Methods
-------
fit(X): fit the model
fit_predict(X): fit the model and return the cluster labels
"""
def __init__(self, n_clusters=2, distance='euclidean',
n_trials=10, max_iter=100, tol=0.001, random_state=None):
self.n_clusters = n_clusters
self.n_trials = n_trials
self.max_iter = max_iter
self.tol = tol
self.distance = distance
self.random_state = random_state
self.rng = np.random.RandomState(random_state)
def fit(self, X):
""" Apply KMeans Clustering
X: dataset with feature vectors
"""
self.centers_, self.labels_, self.sse_arr_, self.n_iter_ = \
_kmedoids(X, self.n_clusters, self.distance, self.max_iter, self.n_trials, self.tol, self.rng)
def fit_predict(self, X):
""" Apply KMeans Clustering,
and return cluster labels
"""
self.fit(X)
return(self.labels_)
|
vmirly/pyclust | pyclust/_kmedoids.py | KMedoids.fit | python | def fit(self, X):
self.centers_, self.labels_, self.sse_arr_, self.n_iter_ = \
_kmedoids(X, self.n_clusters, self.distance, self.max_iter, self.n_trials, self.tol, self.rng) | Apply KMeans Clustering
X: dataset with feature vectors | train | https://github.com/vmirly/pyclust/blob/bdb12be4649e70c6c90da2605bc5f4b314e2d07e/pyclust/_kmedoids.py#L129-L134 | [
"def _kmedoids(X, n_clusters, distance, max_iter, n_trials, tol, rng):\n \"\"\" Run multiple trials of k-medoids clustering,\n and output he best centers, and cluster labels\n \"\"\"\n n_samples, n_features = X.shape[0], X.shape[1]\n\n centers_best = np.empty(shape=(n_clusters,n_features), dtype=float)\n labels_best = np.empty(shape=n_samples, dtype=int)\n for i in range(n_trials):\n centers, labels, sse_tot, sse_arr, n_iter = \\\n _kmedoids_run(X, n_clusters, distance, max_iter, tol, rng)\n if i==0:\n sse_tot_best = sse_tot\n sse_arr_best = sse_arr\n n_iter_best = n_iter\n centers_best = centers.copy()\n labels_best = labels.copy()\n if sse_tot < sse_tot_best:\n sse_tot_best = sse_tot\n sse_arr_best = sse_arr\n n_iter_best = n_iter\n centers_best = centers.copy()\n labels_best = labels.copy()\n\n return(centers_best, labels_best, sse_arr_best, n_iter_best)\n"
] | class KMedoids(object):
"""
KMedoids Clustering
K-medoids clustering take the cluster centroid as the medoid of the data points,
as opposed to the average of data points in a cluster. As a result, K-medoids
gaurantees that the cluster centroid is among the cluster members.
The medoid is defined as the point that minimizes the total within-cluster distances.
K-medoids is more robust to outliers (the reason for this is similar to why
median is more robust to mean).
K-medoids is computationally more expensive, since it involves computation of all
the pairwise distances in a cluster.
Parameters
-------
n_clusters: number of clusters (default = 2)
n_trials: number of trial random centroid initialization (default = 10)
max_iter: maximum number of iterations (default = 100)
tol: tolerance (default = 0.0001)
Attibutes
-------
labels_ : cluster labels for each data item
centers_ : cluster centers
sse_arr_ : array of SSE values for each cluster
n_iter_ : number of iterations for the best trial
Methods
-------
fit(X): fit the model
fit_predict(X): fit the model and return the cluster labels
"""
def __init__(self, n_clusters=2, distance='euclidean',
n_trials=10, max_iter=100, tol=0.001, random_state=None):
self.n_clusters = n_clusters
self.n_trials = n_trials
self.max_iter = max_iter
self.tol = tol
self.distance = distance
self.random_state = random_state
self.rng = np.random.RandomState(random_state)
def fit_predict(self, X):
""" Apply KMeans Clustering,
and return cluster labels
"""
self.fit(X)
return(self.labels_)
|
vmirly/pyclust | pyclust/_kernel_kmeans.py | _kernelized_dist2centers | python | def _kernelized_dist2centers(K, n_clusters, wmemb, kernel_dist):
n_samples = K.shape[0]
for j in range(n_clusters):
memb_j = np.where(wmemb == j)[0]
size_j = memb_j.shape[0]
K_sub_j = K[memb_j][:, memb_j]
kernel_dist[:,j] = 1 + np.sum(K_sub_j) /(size_j*size_j)
kernel_dist[:,j] -= 2 * np.sum(K[:, memb_j], axis=1) / size_j
return | Computin the distance in transformed feature space to
cluster centers.
K is the kernel gram matrix.
wmemb contains cluster assignment. {0,1}
Assume j is the cluster id:
||phi(x_i) - Phi_center_j|| = K_ii - 2 sum w_jh K_ih +
sum_r sum_s w_jr w_js K_rs | train | https://github.com/vmirly/pyclust/blob/bdb12be4649e70c6c90da2605bc5f4b314e2d07e/pyclust/_kernel_kmeans.py#L29-L51 | null | ### Theoretical & Algorithmic Discussion
### with Emad Zahedi
import numpy as np
import scipy, scipy.spatial
def _compute_gram_matrix(X, kernel_type, params):
"""
"""
if kernel_type == 'rbf':
if 'gamma' in params:
gamma = params['gamma']
else:
gamma = 1.0 / X.shape[1]
pairwise_dist = scipy.spatial.distance.pdist(X, metric='sqeuclidean')
pairwise_dist = scipy.spatial.distance.squareform(pairwise_dist)
gram_matrix = np.exp( - gamma * pairwise_dist )
np.fill_diagonal(gram_matrix, 1)
else:
pass
return(gram_matrix)
def _kernelized_dist2centers(K, n_clusters, wmemb, kernel_dist):
""" Computin the distance in transformed feature space to
cluster centers.
K is the kernel gram matrix.
wmemb contains cluster assignment. {0,1}
Assume j is the cluster id:
||phi(x_i) - Phi_center_j|| = K_ii - 2 sum w_jh K_ih +
sum_r sum_s w_jr w_js K_rs
"""
n_samples = K.shape[0]
for j in range(n_clusters):
memb_j = np.where(wmemb == j)[0]
size_j = memb_j.shape[0]
K_sub_j = K[memb_j][:, memb_j]
kernel_dist[:,j] = 1 + np.sum(K_sub_j) /(size_j*size_j)
kernel_dist[:,j] -= 2 * np.sum(K[:, memb_j], axis=1) / size_j
return
def _fit_kernelkmeans(K, n_clusters, n_trials, max_iter, converge_tol=0.001):
"""
"""
n_samples = K.shape[0]
kdist = np.empty(shape=(n_samples, n_clusters), dtype=float)
within_distances = np.empty(shape=n_clusters, dtype=float)
best_within_distances = np.infty
for i in range(n_trials):
membs_prev = np.random.randint(n_clusters, size=n_samples)
for it in range(max_iter):
kdist.fill(0)
_kernelized_dist2centers(K, n_clusters, membs_prev, kdist)
membs_curr = np.argmin(kdist, axis=1)
membs_changed_ratio = float(np.sum((membs_curr - membs_prev) != 0)) / n_samples
if membs_changed_ratio < converge_tol:
break
membs_prev = membs_curr
for j in range(n_clusters):
within_distances[j] = np.sum(kdist[np.where(membs_curr == j)[0], j])
if best_within_distances > within_distances.sum():
best_within_distances = within_distances.sum()
best_labels = membs_curr
return(it, best_labels)
class KernelKMeans(object):
"""
"""
def __init__(self, n_clusters=2, kernel='linear', params={}, n_trials=10, max_iter=100):
assert (kernel in ['linear', 'rbf'])
self.n_clusters = n_clusters
self.kernel_type = kernel
self.n_trials = n_trials
self.max_iter = max_iter
self.kernel_params = params
self.kernel_matrix_ = None
def _set_kernel_matrix(self, X=None, kernel_matrix=None):
"""
"""
if self.kernel_matrix_ is None:
if kernel_matrix is None:
if X is None:
raise("Either X or kernel_matrix is needed!")
self.kernel_matrix_ = _compute_gram_matrix(X, self.kernel_type, self.kernel_params)
else:
self.kernel_matrix_ = kernel_matrix
def fit(self, X, kernel_matrix=None):
"""
"""
self._set_kernel_matrix(X, kernel_matrix)
self.n_iter_, self.labels_ = _fit_kernelkmeans(self.kernel_matrix_, self.n_clusters, self.n_trials, self.max_iter)
def fit_predict(self, X):
"""
"""
self.fit(X)
return(self.labels_)
############### Global Kernel K-Means ####################
def _fit_global_kernelkmeans(K, n_clusters, max_iter, converge_tol=0.001):
"""
"""
n_samples = K.shape[0]
kdist = np.empty(shape=(n_samples, n_clusters), dtype=float)
within_distances = np.empty(shape=n_clusters, dtype=float)
best_within_distances = np.infty
for i in range(n_samples):
membs_prev = np.random.randint(n_clusters, size=n_samples)
for it in range(max_iter):
kdist.fill(0)
_kernelized_dist2centers(K, n_clusters, membs_prev, kdist)
membs_curr = np.argmin(kdist, axis=1)
membs_changed_ratio = float(np.sum((membs_curr - membs_prev) != 0)) / n_samples
if membs_changed_ratio < converge_tol:
break
membs_prev = membs_curr
for j in range(n_clusters):
within_distances[j] = np.sum(kdist[np.where(membs_curr == j)[0], j])
if best_within_distances > within_distances.sum():
best_within_distances = within_distances.sum()
best_labels = membs_curr
return(it, best_labels)
class GlobalKernelKMeans(object):
"""
"""
def __init__(self, n_clusters=3, kernel='linear', params={}, n_trials=10, max_iter=100):
self.n_clusters = n_clusters
self.kernel_type = kernel
self.n_trials = n_trials
self.max_iter = max_iter
self.kernel_params = params
self.kernel_matrix_ = None
def fit(self, X):
"""
"""
if self.kernel_matrix_ is None:
self.kernel_matrix_ = _compute_gram_matrix(X, self.kernel_type, self.kernel_params)
def refit(self, n_clusters):
""" Extend clustering to a larger number of clusters
"""
if n_clusters <= self.n_clusters:
pass
else:
pass
|
vmirly/pyclust | pyclust/_gaussian_mixture_model.py | _init_mixture_params | python | def _init_mixture_params(X, n_mixtures, init_method):
init_priors = np.ones(shape=n_mixtures, dtype=float) / n_mixtures
if init_method == 'kmeans':
km = _kmeans.KMeans(n_clusters = n_mixtures, n_trials=20)
km.fit(X)
init_means = km.centers_
else:
inx_rand = np.random.choice(X.shape[0], size=n_mixtures)
init_means = X[inx_rand,:]
if np.any(np.isnan(init_means)):
raise ValueError("Init means are NaN! ")
n_features = X.shape[1]
init_covars = np.empty(shape=(n_mixtures, n_features, n_features), dtype=float)
for i in range(n_mixtures):
init_covars[i] = np.eye(n_features)
return(init_priors, init_means, init_covars) | Initialize mixture density parameters with
equal priors
random means
identity covariance matrices | train | https://github.com/vmirly/pyclust/blob/bdb12be4649e70c6c90da2605bc5f4b314e2d07e/pyclust/_gaussian_mixture_model.py#L8-L35 | null | import numpy as np
import scipy, scipy.linalg
from . import _kmeans
Epsilon = 100 * np.finfo(float).eps
Lambda = 0.1
def _init_mixture_params(X, n_mixtures, init_method):
"""
Initialize mixture density parameters with
equal priors
random means
identity covariance matrices
"""
init_priors = np.ones(shape=n_mixtures, dtype=float) / n_mixtures
if init_method == 'kmeans':
km = _kmeans.KMeans(n_clusters = n_mixtures, n_trials=20)
km.fit(X)
init_means = km.centers_
else:
inx_rand = np.random.choice(X.shape[0], size=n_mixtures)
init_means = X[inx_rand,:]
if np.any(np.isnan(init_means)):
raise ValueError("Init means are NaN! ")
n_features = X.shape[1]
init_covars = np.empty(shape=(n_mixtures, n_features, n_features), dtype=float)
for i in range(n_mixtures):
init_covars[i] = np.eye(n_features)
return(init_priors, init_means, init_covars)
def __log_density_single(x, mean, covar):
""" This is just a test function to calculate
the normal density at x given mean and covariance matrix.
Note: this function is not efficient, so
_log_multivariate_density is recommended for use.
"""
n_dim = mean.shape[0]
dx = x - mean
covar_inv = scipy.linalg.inv(covar)
covar_det = scipy.linalg.det(covar)
den = np.dot(np.dot(dx.T, covar_inv), dx) + n_dim*np.log(2*np.pi) + np.log(covar_det)
return(-1/2 * den)
def _log_multivariate_density(X, means, covars):
"""
Class conditional density:
P(x | mu, Sigma) = 1/((2pi)^d/2 * |Sigma|^1/2) * exp(-1/2 * (x-mu)^T * Sigma^-1 * (x-mu))
log of class conditional density:
log P(x | mu, Sigma) = -1/2*(d*log(2pi) + log(|Sigma|) + (x-mu)^T * Sigma^-1 * (x-mu))
"""
n_samples, n_dim = X.shape
n_components = means.shape[0]
assert(means.shape[0] == covars.shape[0])
log_proba = np.empty(shape=(n_samples, n_components), dtype=float)
for i, (mu, cov) in enumerate(zip(means, covars)):
try:
cov_chol = scipy.linalg.cholesky(cov, lower=True)
except scipy.linalg.LinAlgError:
try:
cov_chol = scipy.linalg.cholesky(cov + Lambda*np.eye(n_dim), lower=True)
except:
raise ValueError("Triangular Matrix Decomposition not performed!\n")
cov_log_det = 2 * np.sum(np.log(np.diagonal(cov_chol)))
try:
cov_solve = scipy.linalg.solve_triangular(cov_chol, (X - mu).T, lower=True).T
except:
raise ValueError("Solve_triangular not perormed!\n")
log_proba[:, i] = -0.5 * (np.sum(cov_solve ** 2, axis=1) + \
n_dim * np.log(2 * np.pi) + cov_log_det)
return(log_proba)
def _log_likelihood_per_sample(X, means, covars):
"""
Theta = (theta_1, theta_2, ... theta_M)
Likelihood of mixture parameters given data: L(Theta | X) = product_i P(x_i | Theta)
log likelihood: log L(Theta | X) = sum_i log(P(x_i | Theta))
and note that p(x_i | Theta) = sum_j prior_j * p(x_i | theta_j)
Probability of sample x being generated from component i:
P(w_i | x) = P(x|w_i) * P(w_i) / P(X)
where P(X) = sum_i P(x|w_i) * P(w_i)
Here post_proba = P/(w_i | x)
and log_likelihood = log(P(x|w_i))
"""
logden = _log_multivariate_density(X, means, covars)
logden_max = logden.max(axis=1)
log_likelihood = np.log(np.sum(np.exp(logden.T - logden_max) + Epsilon, axis=0))
log_likelihood += logden_max
post_proba = np.exp(logden - log_likelihood[:, np.newaxis])
return (log_likelihood, post_proba)
def _validate_params(priors, means, covars):
""" Validation Check for M.L. paramateres
"""
for i,(p,m,cv) in enumerate(zip(priors, means, covars)):
if np.any(np.isinf(p)) or np.any(np.isnan(p)):
raise ValueError("Component %d of priors is not valid " % i)
if np.any(np.isinf(m)) or np.any(np.isnan(m)):
raise ValueError("Component %d of means is not valid " % i)
if np.any(np.isinf(cv)) or np.any(np.isnan(cv)):
raise ValueError("Component %d of covars is not valid " % i)
if (not np.allclose(cv, cv.T) or np.any(scipy.linalg.eigvalsh(cv) <= 0)):
raise ValueError("Component %d of covars must be positive-definite" % i)
def _maximization_step(X, posteriors):
"""
Update class parameters as below:
priors: P(w_i) = sum_x P(w_i | x) ==> Then normalize to get in [0,1]
Class means: center_w_i = sum_x P(w_i|x)*x / sum_i sum_x P(w_i|x)
"""
### Prior probabilities or class weights
sum_post_proba = np.sum(posteriors, axis=0)
prior_proba = sum_post_proba / (sum_post_proba.sum() + Epsilon)
### means
means = np.dot(posteriors.T, X) / (sum_post_proba[:, np.newaxis] + Epsilon)
### covariance matrices
n_components = posteriors.shape[1]
n_features = X.shape[1]
covars = np.empty(shape=(n_components, n_features, n_features), dtype=float)
for i in range(n_components):
post_i = posteriors[:, i]
mean_i = means[i]
diff_i = X - mean_i
with np.errstate(under='ignore'):
covar_i = np.dot(post_i * diff_i.T, diff_i) / (post_i.sum() + Epsilon)
covars[i] = covar_i + Lambda * np.eye(n_features)
_validate_params(prior_proba, means, covars)
return(prior_proba, means, covars)
def _fit_gmm_params(X, n_mixtures, n_init, init_method, n_iter, tol):
"""
"""
best_mean_loglikelihood = -np.infty
for _ in range(n_init):
priors, means, covars = _init_mixture_params(X, n_mixtures, init_method)
prev_mean_loglikelihood = None
for i in range(n_iter):
## E-step
log_likelihoods, posteriors = _log_likelihood_per_sample(X, means, covars)
## M-step
priors, means, covars = _maximization_step(X, posteriors)
## convergence Check
curr_mean_loglikelihood = log_likelihoods.mean()
if prev_mean_loglikelihood is not None:
if np.abs(curr_mean_loglikelihood - prev_mean_loglikelihood) < tol:
break
prev_mean_loglikelihood = curr_mean_loglikelihood
if curr_mean_loglikelihood > best_mean_loglikelihood:
best_mean_loglikelihood = curr_mean_loglikelihood
best_params = {
'priors' : priors,
'means' : means,
'covars' : covars,
'mean_log_likelihood' : curr_mean_loglikelihood,
'n_iter' : i
}
return(best_params)
class GMM(object):
"""
Gaussian Mixture Model (GMM)
Parameters
-------
Attibutes
-------
labels_ : cluster labels for each data item
Methods
-------
fit(X): fit the model
fit_predict(X): fit the model and return the cluster labels
"""
def __init__(self, n_clusters=2, n_trials=10, init_method='', max_iter=100, tol=0.0001):
assert n_clusters >= 2, 'n_clusters should be >= 2'
self.n_clusters = n_clusters
self.n_trials = n_trials
self.init_method = init_method
self.max_iter = max_iter
self.tol = tol
self.converged = False
def fit(self, X):
""" Fit mixture-density parameters with EM algorithm
"""
params_dict = _fit_gmm_params(X=X, n_mixtures=self.n_clusters, \
n_init=self.n_trials, init_method=self.init_method, \
n_iter=self.max_iter, tol=self.tol)
self.priors_ = params_dict['priors']
self.means_ = params_dict['means']
self.covars_ = params_dict['covars']
self.converged = True
self.labels_ = self.predict(X)
def predict_proba(self, X):
"""
"""
if not self.converged:
raise Exception('Mixture model is not fit yet!! Try GMM.fit(X)')
_, post_proba = _log_likelihood_per_sample(X=X, means=self.means_, covars=self.covars_)
return(post_proba)
def predict(self, X):
"""
"""
post_proba = self.predict_proba(X)
return(post_proba.argmax(axis=1))
|
vmirly/pyclust | pyclust/_gaussian_mixture_model.py | __log_density_single | python | def __log_density_single(x, mean, covar):
n_dim = mean.shape[0]
dx = x - mean
covar_inv = scipy.linalg.inv(covar)
covar_det = scipy.linalg.det(covar)
den = np.dot(np.dot(dx.T, covar_inv), dx) + n_dim*np.log(2*np.pi) + np.log(covar_det)
return(-1/2 * den) | This is just a test function to calculate
the normal density at x given mean and covariance matrix.
Note: this function is not efficient, so
_log_multivariate_density is recommended for use. | train | https://github.com/vmirly/pyclust/blob/bdb12be4649e70c6c90da2605bc5f4b314e2d07e/pyclust/_gaussian_mixture_model.py#L39-L54 | null | import numpy as np
import scipy, scipy.linalg
from . import _kmeans
Epsilon = 100 * np.finfo(float).eps
Lambda = 0.1
def _init_mixture_params(X, n_mixtures, init_method):
"""
Initialize mixture density parameters with
equal priors
random means
identity covariance matrices
"""
init_priors = np.ones(shape=n_mixtures, dtype=float) / n_mixtures
if init_method == 'kmeans':
km = _kmeans.KMeans(n_clusters = n_mixtures, n_trials=20)
km.fit(X)
init_means = km.centers_
else:
inx_rand = np.random.choice(X.shape[0], size=n_mixtures)
init_means = X[inx_rand,:]
if np.any(np.isnan(init_means)):
raise ValueError("Init means are NaN! ")
n_features = X.shape[1]
init_covars = np.empty(shape=(n_mixtures, n_features, n_features), dtype=float)
for i in range(n_mixtures):
init_covars[i] = np.eye(n_features)
return(init_priors, init_means, init_covars)
def _log_multivariate_density(X, means, covars):
"""
Class conditional density:
P(x | mu, Sigma) = 1/((2pi)^d/2 * |Sigma|^1/2) * exp(-1/2 * (x-mu)^T * Sigma^-1 * (x-mu))
log of class conditional density:
log P(x | mu, Sigma) = -1/2*(d*log(2pi) + log(|Sigma|) + (x-mu)^T * Sigma^-1 * (x-mu))
"""
n_samples, n_dim = X.shape
n_components = means.shape[0]
assert(means.shape[0] == covars.shape[0])
log_proba = np.empty(shape=(n_samples, n_components), dtype=float)
for i, (mu, cov) in enumerate(zip(means, covars)):
try:
cov_chol = scipy.linalg.cholesky(cov, lower=True)
except scipy.linalg.LinAlgError:
try:
cov_chol = scipy.linalg.cholesky(cov + Lambda*np.eye(n_dim), lower=True)
except:
raise ValueError("Triangular Matrix Decomposition not performed!\n")
cov_log_det = 2 * np.sum(np.log(np.diagonal(cov_chol)))
try:
cov_solve = scipy.linalg.solve_triangular(cov_chol, (X - mu).T, lower=True).T
except:
raise ValueError("Solve_triangular not perormed!\n")
log_proba[:, i] = -0.5 * (np.sum(cov_solve ** 2, axis=1) + \
n_dim * np.log(2 * np.pi) + cov_log_det)
return(log_proba)
def _log_likelihood_per_sample(X, means, covars):
"""
Theta = (theta_1, theta_2, ... theta_M)
Likelihood of mixture parameters given data: L(Theta | X) = product_i P(x_i | Theta)
log likelihood: log L(Theta | X) = sum_i log(P(x_i | Theta))
and note that p(x_i | Theta) = sum_j prior_j * p(x_i | theta_j)
Probability of sample x being generated from component i:
P(w_i | x) = P(x|w_i) * P(w_i) / P(X)
where P(X) = sum_i P(x|w_i) * P(w_i)
Here post_proba = P/(w_i | x)
and log_likelihood = log(P(x|w_i))
"""
logden = _log_multivariate_density(X, means, covars)
logden_max = logden.max(axis=1)
log_likelihood = np.log(np.sum(np.exp(logden.T - logden_max) + Epsilon, axis=0))
log_likelihood += logden_max
post_proba = np.exp(logden - log_likelihood[:, np.newaxis])
return (log_likelihood, post_proba)
def _validate_params(priors, means, covars):
""" Validation Check for M.L. paramateres
"""
for i,(p,m,cv) in enumerate(zip(priors, means, covars)):
if np.any(np.isinf(p)) or np.any(np.isnan(p)):
raise ValueError("Component %d of priors is not valid " % i)
if np.any(np.isinf(m)) or np.any(np.isnan(m)):
raise ValueError("Component %d of means is not valid " % i)
if np.any(np.isinf(cv)) or np.any(np.isnan(cv)):
raise ValueError("Component %d of covars is not valid " % i)
if (not np.allclose(cv, cv.T) or np.any(scipy.linalg.eigvalsh(cv) <= 0)):
raise ValueError("Component %d of covars must be positive-definite" % i)
def _maximization_step(X, posteriors):
"""
Update class parameters as below:
priors: P(w_i) = sum_x P(w_i | x) ==> Then normalize to get in [0,1]
Class means: center_w_i = sum_x P(w_i|x)*x / sum_i sum_x P(w_i|x)
"""
### Prior probabilities or class weights
sum_post_proba = np.sum(posteriors, axis=0)
prior_proba = sum_post_proba / (sum_post_proba.sum() + Epsilon)
### means
means = np.dot(posteriors.T, X) / (sum_post_proba[:, np.newaxis] + Epsilon)
### covariance matrices
n_components = posteriors.shape[1]
n_features = X.shape[1]
covars = np.empty(shape=(n_components, n_features, n_features), dtype=float)
for i in range(n_components):
post_i = posteriors[:, i]
mean_i = means[i]
diff_i = X - mean_i
with np.errstate(under='ignore'):
covar_i = np.dot(post_i * diff_i.T, diff_i) / (post_i.sum() + Epsilon)
covars[i] = covar_i + Lambda * np.eye(n_features)
_validate_params(prior_proba, means, covars)
return(prior_proba, means, covars)
def _fit_gmm_params(X, n_mixtures, n_init, init_method, n_iter, tol):
"""
"""
best_mean_loglikelihood = -np.infty
for _ in range(n_init):
priors, means, covars = _init_mixture_params(X, n_mixtures, init_method)
prev_mean_loglikelihood = None
for i in range(n_iter):
## E-step
log_likelihoods, posteriors = _log_likelihood_per_sample(X, means, covars)
## M-step
priors, means, covars = _maximization_step(X, posteriors)
## convergence Check
curr_mean_loglikelihood = log_likelihoods.mean()
if prev_mean_loglikelihood is not None:
if np.abs(curr_mean_loglikelihood - prev_mean_loglikelihood) < tol:
break
prev_mean_loglikelihood = curr_mean_loglikelihood
if curr_mean_loglikelihood > best_mean_loglikelihood:
best_mean_loglikelihood = curr_mean_loglikelihood
best_params = {
'priors' : priors,
'means' : means,
'covars' : covars,
'mean_log_likelihood' : curr_mean_loglikelihood,
'n_iter' : i
}
return(best_params)
class GMM(object):
"""
Gaussian Mixture Model (GMM)
Parameters
-------
Attibutes
-------
labels_ : cluster labels for each data item
Methods
-------
fit(X): fit the model
fit_predict(X): fit the model and return the cluster labels
"""
def __init__(self, n_clusters=2, n_trials=10, init_method='', max_iter=100, tol=0.0001):
assert n_clusters >= 2, 'n_clusters should be >= 2'
self.n_clusters = n_clusters
self.n_trials = n_trials
self.init_method = init_method
self.max_iter = max_iter
self.tol = tol
self.converged = False
def fit(self, X):
""" Fit mixture-density parameters with EM algorithm
"""
params_dict = _fit_gmm_params(X=X, n_mixtures=self.n_clusters, \
n_init=self.n_trials, init_method=self.init_method, \
n_iter=self.max_iter, tol=self.tol)
self.priors_ = params_dict['priors']
self.means_ = params_dict['means']
self.covars_ = params_dict['covars']
self.converged = True
self.labels_ = self.predict(X)
def predict_proba(self, X):
"""
"""
if not self.converged:
raise Exception('Mixture model is not fit yet!! Try GMM.fit(X)')
_, post_proba = _log_likelihood_per_sample(X=X, means=self.means_, covars=self.covars_)
return(post_proba)
def predict(self, X):
"""
"""
post_proba = self.predict_proba(X)
return(post_proba.argmax(axis=1))
|
vmirly/pyclust | pyclust/_gaussian_mixture_model.py | _log_multivariate_density | python | def _log_multivariate_density(X, means, covars):
n_samples, n_dim = X.shape
n_components = means.shape[0]
assert(means.shape[0] == covars.shape[0])
log_proba = np.empty(shape=(n_samples, n_components), dtype=float)
for i, (mu, cov) in enumerate(zip(means, covars)):
try:
cov_chol = scipy.linalg.cholesky(cov, lower=True)
except scipy.linalg.LinAlgError:
try:
cov_chol = scipy.linalg.cholesky(cov + Lambda*np.eye(n_dim), lower=True)
except:
raise ValueError("Triangular Matrix Decomposition not performed!\n")
cov_log_det = 2 * np.sum(np.log(np.diagonal(cov_chol)))
try:
cov_solve = scipy.linalg.solve_triangular(cov_chol, (X - mu).T, lower=True).T
except:
raise ValueError("Solve_triangular not perormed!\n")
log_proba[:, i] = -0.5 * (np.sum(cov_solve ** 2, axis=1) + \
n_dim * np.log(2 * np.pi) + cov_log_det)
return(log_proba) | Class conditional density:
P(x | mu, Sigma) = 1/((2pi)^d/2 * |Sigma|^1/2) * exp(-1/2 * (x-mu)^T * Sigma^-1 * (x-mu))
log of class conditional density:
log P(x | mu, Sigma) = -1/2*(d*log(2pi) + log(|Sigma|) + (x-mu)^T * Sigma^-1 * (x-mu)) | train | https://github.com/vmirly/pyclust/blob/bdb12be4649e70c6c90da2605bc5f4b314e2d07e/pyclust/_gaussian_mixture_model.py#L57-L90 | null | import numpy as np
import scipy, scipy.linalg
from . import _kmeans
Epsilon = 100 * np.finfo(float).eps
Lambda = 0.1
def _init_mixture_params(X, n_mixtures, init_method):
"""
Initialize mixture density parameters with
equal priors
random means
identity covariance matrices
"""
init_priors = np.ones(shape=n_mixtures, dtype=float) / n_mixtures
if init_method == 'kmeans':
km = _kmeans.KMeans(n_clusters = n_mixtures, n_trials=20)
km.fit(X)
init_means = km.centers_
else:
inx_rand = np.random.choice(X.shape[0], size=n_mixtures)
init_means = X[inx_rand,:]
if np.any(np.isnan(init_means)):
raise ValueError("Init means are NaN! ")
n_features = X.shape[1]
init_covars = np.empty(shape=(n_mixtures, n_features, n_features), dtype=float)
for i in range(n_mixtures):
init_covars[i] = np.eye(n_features)
return(init_priors, init_means, init_covars)
def __log_density_single(x, mean, covar):
""" This is just a test function to calculate
the normal density at x given mean and covariance matrix.
Note: this function is not efficient, so
_log_multivariate_density is recommended for use.
"""
n_dim = mean.shape[0]
dx = x - mean
covar_inv = scipy.linalg.inv(covar)
covar_det = scipy.linalg.det(covar)
den = np.dot(np.dot(dx.T, covar_inv), dx) + n_dim*np.log(2*np.pi) + np.log(covar_det)
return(-1/2 * den)
def _log_likelihood_per_sample(X, means, covars):
"""
Theta = (theta_1, theta_2, ... theta_M)
Likelihood of mixture parameters given data: L(Theta | X) = product_i P(x_i | Theta)
log likelihood: log L(Theta | X) = sum_i log(P(x_i | Theta))
and note that p(x_i | Theta) = sum_j prior_j * p(x_i | theta_j)
Probability of sample x being generated from component i:
P(w_i | x) = P(x|w_i) * P(w_i) / P(X)
where P(X) = sum_i P(x|w_i) * P(w_i)
Here post_proba = P/(w_i | x)
and log_likelihood = log(P(x|w_i))
"""
logden = _log_multivariate_density(X, means, covars)
logden_max = logden.max(axis=1)
log_likelihood = np.log(np.sum(np.exp(logden.T - logden_max) + Epsilon, axis=0))
log_likelihood += logden_max
post_proba = np.exp(logden - log_likelihood[:, np.newaxis])
return (log_likelihood, post_proba)
def _validate_params(priors, means, covars):
""" Validation Check for M.L. paramateres
"""
for i,(p,m,cv) in enumerate(zip(priors, means, covars)):
if np.any(np.isinf(p)) or np.any(np.isnan(p)):
raise ValueError("Component %d of priors is not valid " % i)
if np.any(np.isinf(m)) or np.any(np.isnan(m)):
raise ValueError("Component %d of means is not valid " % i)
if np.any(np.isinf(cv)) or np.any(np.isnan(cv)):
raise ValueError("Component %d of covars is not valid " % i)
if (not np.allclose(cv, cv.T) or np.any(scipy.linalg.eigvalsh(cv) <= 0)):
raise ValueError("Component %d of covars must be positive-definite" % i)
def _maximization_step(X, posteriors):
"""
Update class parameters as below:
priors: P(w_i) = sum_x P(w_i | x) ==> Then normalize to get in [0,1]
Class means: center_w_i = sum_x P(w_i|x)*x / sum_i sum_x P(w_i|x)
"""
### Prior probabilities or class weights
sum_post_proba = np.sum(posteriors, axis=0)
prior_proba = sum_post_proba / (sum_post_proba.sum() + Epsilon)
### means
means = np.dot(posteriors.T, X) / (sum_post_proba[:, np.newaxis] + Epsilon)
### covariance matrices
n_components = posteriors.shape[1]
n_features = X.shape[1]
covars = np.empty(shape=(n_components, n_features, n_features), dtype=float)
for i in range(n_components):
post_i = posteriors[:, i]
mean_i = means[i]
diff_i = X - mean_i
with np.errstate(under='ignore'):
covar_i = np.dot(post_i * diff_i.T, diff_i) / (post_i.sum() + Epsilon)
covars[i] = covar_i + Lambda * np.eye(n_features)
_validate_params(prior_proba, means, covars)
return(prior_proba, means, covars)
def _fit_gmm_params(X, n_mixtures, n_init, init_method, n_iter, tol):
"""
"""
best_mean_loglikelihood = -np.infty
for _ in range(n_init):
priors, means, covars = _init_mixture_params(X, n_mixtures, init_method)
prev_mean_loglikelihood = None
for i in range(n_iter):
## E-step
log_likelihoods, posteriors = _log_likelihood_per_sample(X, means, covars)
## M-step
priors, means, covars = _maximization_step(X, posteriors)
## convergence Check
curr_mean_loglikelihood = log_likelihoods.mean()
if prev_mean_loglikelihood is not None:
if np.abs(curr_mean_loglikelihood - prev_mean_loglikelihood) < tol:
break
prev_mean_loglikelihood = curr_mean_loglikelihood
if curr_mean_loglikelihood > best_mean_loglikelihood:
best_mean_loglikelihood = curr_mean_loglikelihood
best_params = {
'priors' : priors,
'means' : means,
'covars' : covars,
'mean_log_likelihood' : curr_mean_loglikelihood,
'n_iter' : i
}
return(best_params)
class GMM(object):
"""
Gaussian Mixture Model (GMM)
Parameters
-------
Attibutes
-------
labels_ : cluster labels for each data item
Methods
-------
fit(X): fit the model
fit_predict(X): fit the model and return the cluster labels
"""
def __init__(self, n_clusters=2, n_trials=10, init_method='', max_iter=100, tol=0.0001):
assert n_clusters >= 2, 'n_clusters should be >= 2'
self.n_clusters = n_clusters
self.n_trials = n_trials
self.init_method = init_method
self.max_iter = max_iter
self.tol = tol
self.converged = False
def fit(self, X):
""" Fit mixture-density parameters with EM algorithm
"""
params_dict = _fit_gmm_params(X=X, n_mixtures=self.n_clusters, \
n_init=self.n_trials, init_method=self.init_method, \
n_iter=self.max_iter, tol=self.tol)
self.priors_ = params_dict['priors']
self.means_ = params_dict['means']
self.covars_ = params_dict['covars']
self.converged = True
self.labels_ = self.predict(X)
def predict_proba(self, X):
"""
"""
if not self.converged:
raise Exception('Mixture model is not fit yet!! Try GMM.fit(X)')
_, post_proba = _log_likelihood_per_sample(X=X, means=self.means_, covars=self.covars_)
return(post_proba)
def predict(self, X):
"""
"""
post_proba = self.predict_proba(X)
return(post_proba.argmax(axis=1))
|
vmirly/pyclust | pyclust/_gaussian_mixture_model.py | _log_likelihood_per_sample | python | def _log_likelihood_per_sample(X, means, covars):
logden = _log_multivariate_density(X, means, covars)
logden_max = logden.max(axis=1)
log_likelihood = np.log(np.sum(np.exp(logden.T - logden_max) + Epsilon, axis=0))
log_likelihood += logden_max
post_proba = np.exp(logden - log_likelihood[:, np.newaxis])
return (log_likelihood, post_proba) | Theta = (theta_1, theta_2, ... theta_M)
Likelihood of mixture parameters given data: L(Theta | X) = product_i P(x_i | Theta)
log likelihood: log L(Theta | X) = sum_i log(P(x_i | Theta))
and note that p(x_i | Theta) = sum_j prior_j * p(x_i | theta_j)
Probability of sample x being generated from component i:
P(w_i | x) = P(x|w_i) * P(w_i) / P(X)
where P(X) = sum_i P(x|w_i) * P(w_i)
Here post_proba = P/(w_i | x)
and log_likelihood = log(P(x|w_i)) | train | https://github.com/vmirly/pyclust/blob/bdb12be4649e70c6c90da2605bc5f4b314e2d07e/pyclust/_gaussian_mixture_model.py#L95-L120 | [
"def _log_multivariate_density(X, means, covars):\n \"\"\"\n Class conditional density:\n P(x | mu, Sigma) = 1/((2pi)^d/2 * |Sigma|^1/2) * exp(-1/2 * (x-mu)^T * Sigma^-1 * (x-mu))\n\n log of class conditional density:\n log P(x | mu, Sigma) = -1/2*(d*log(2pi) + log(|Sigma|) + (x-mu)^T * Sigma^-1 * (x-mu))\n \"\"\"\n n_samples, n_dim = X.shape\n n_components = means.shape[0]\n\n assert(means.shape[0] == covars.shape[0])\n\n log_proba = np.empty(shape=(n_samples, n_components), dtype=float)\n for i, (mu, cov) in enumerate(zip(means, covars)):\n try:\n cov_chol = scipy.linalg.cholesky(cov, lower=True)\n except scipy.linalg.LinAlgError:\n try:\n cov_chol = scipy.linalg.cholesky(cov + Lambda*np.eye(n_dim), lower=True)\n except:\n raise ValueError(\"Triangular Matrix Decomposition not performed!\\n\")\n\n cov_log_det = 2 * np.sum(np.log(np.diagonal(cov_chol)))\n\n try:\n cov_solve = scipy.linalg.solve_triangular(cov_chol, (X - mu).T, lower=True).T\n except:\n raise ValueError(\"Solve_triangular not perormed!\\n\")\n\n log_proba[:, i] = -0.5 * (np.sum(cov_solve ** 2, axis=1) + \\\n n_dim * np.log(2 * np.pi) + cov_log_det)\n\n return(log_proba)\n"
] | import numpy as np
import scipy, scipy.linalg
from . import _kmeans
Epsilon = 100 * np.finfo(float).eps
Lambda = 0.1
def _init_mixture_params(X, n_mixtures, init_method):
"""
Initialize mixture density parameters with
equal priors
random means
identity covariance matrices
"""
init_priors = np.ones(shape=n_mixtures, dtype=float) / n_mixtures
if init_method == 'kmeans':
km = _kmeans.KMeans(n_clusters = n_mixtures, n_trials=20)
km.fit(X)
init_means = km.centers_
else:
inx_rand = np.random.choice(X.shape[0], size=n_mixtures)
init_means = X[inx_rand,:]
if np.any(np.isnan(init_means)):
raise ValueError("Init means are NaN! ")
n_features = X.shape[1]
init_covars = np.empty(shape=(n_mixtures, n_features, n_features), dtype=float)
for i in range(n_mixtures):
init_covars[i] = np.eye(n_features)
return(init_priors, init_means, init_covars)
def __log_density_single(x, mean, covar):
""" This is just a test function to calculate
the normal density at x given mean and covariance matrix.
Note: this function is not efficient, so
_log_multivariate_density is recommended for use.
"""
n_dim = mean.shape[0]
dx = x - mean
covar_inv = scipy.linalg.inv(covar)
covar_det = scipy.linalg.det(covar)
den = np.dot(np.dot(dx.T, covar_inv), dx) + n_dim*np.log(2*np.pi) + np.log(covar_det)
return(-1/2 * den)
def _log_multivariate_density(X, means, covars):
"""
Class conditional density:
P(x | mu, Sigma) = 1/((2pi)^d/2 * |Sigma|^1/2) * exp(-1/2 * (x-mu)^T * Sigma^-1 * (x-mu))
log of class conditional density:
log P(x | mu, Sigma) = -1/2*(d*log(2pi) + log(|Sigma|) + (x-mu)^T * Sigma^-1 * (x-mu))
"""
n_samples, n_dim = X.shape
n_components = means.shape[0]
assert(means.shape[0] == covars.shape[0])
log_proba = np.empty(shape=(n_samples, n_components), dtype=float)
for i, (mu, cov) in enumerate(zip(means, covars)):
try:
cov_chol = scipy.linalg.cholesky(cov, lower=True)
except scipy.linalg.LinAlgError:
try:
cov_chol = scipy.linalg.cholesky(cov + Lambda*np.eye(n_dim), lower=True)
except:
raise ValueError("Triangular Matrix Decomposition not performed!\n")
cov_log_det = 2 * np.sum(np.log(np.diagonal(cov_chol)))
try:
cov_solve = scipy.linalg.solve_triangular(cov_chol, (X - mu).T, lower=True).T
except:
raise ValueError("Solve_triangular not perormed!\n")
log_proba[:, i] = -0.5 * (np.sum(cov_solve ** 2, axis=1) + \
n_dim * np.log(2 * np.pi) + cov_log_det)
return(log_proba)
def _log_likelihood_per_sample(X, means, covars):
"""
Theta = (theta_1, theta_2, ... theta_M)
Likelihood of mixture parameters given data: L(Theta | X) = product_i P(x_i | Theta)
log likelihood: log L(Theta | X) = sum_i log(P(x_i | Theta))
and note that p(x_i | Theta) = sum_j prior_j * p(x_i | theta_j)
Probability of sample x being generated from component i:
P(w_i | x) = P(x|w_i) * P(w_i) / P(X)
where P(X) = sum_i P(x|w_i) * P(w_i)
Here post_proba = P/(w_i | x)
and log_likelihood = log(P(x|w_i))
"""
logden = _log_multivariate_density(X, means, covars)
logden_max = logden.max(axis=1)
log_likelihood = np.log(np.sum(np.exp(logden.T - logden_max) + Epsilon, axis=0))
log_likelihood += logden_max
post_proba = np.exp(logden - log_likelihood[:, np.newaxis])
return (log_likelihood, post_proba)
def _validate_params(priors, means, covars):
""" Validation Check for M.L. paramateres
"""
for i,(p,m,cv) in enumerate(zip(priors, means, covars)):
if np.any(np.isinf(p)) or np.any(np.isnan(p)):
raise ValueError("Component %d of priors is not valid " % i)
if np.any(np.isinf(m)) or np.any(np.isnan(m)):
raise ValueError("Component %d of means is not valid " % i)
if np.any(np.isinf(cv)) or np.any(np.isnan(cv)):
raise ValueError("Component %d of covars is not valid " % i)
if (not np.allclose(cv, cv.T) or np.any(scipy.linalg.eigvalsh(cv) <= 0)):
raise ValueError("Component %d of covars must be positive-definite" % i)
def _maximization_step(X, posteriors):
"""
Update class parameters as below:
priors: P(w_i) = sum_x P(w_i | x) ==> Then normalize to get in [0,1]
Class means: center_w_i = sum_x P(w_i|x)*x / sum_i sum_x P(w_i|x)
"""
### Prior probabilities or class weights
sum_post_proba = np.sum(posteriors, axis=0)
prior_proba = sum_post_proba / (sum_post_proba.sum() + Epsilon)
### means
means = np.dot(posteriors.T, X) / (sum_post_proba[:, np.newaxis] + Epsilon)
### covariance matrices
n_components = posteriors.shape[1]
n_features = X.shape[1]
covars = np.empty(shape=(n_components, n_features, n_features), dtype=float)
for i in range(n_components):
post_i = posteriors[:, i]
mean_i = means[i]
diff_i = X - mean_i
with np.errstate(under='ignore'):
covar_i = np.dot(post_i * diff_i.T, diff_i) / (post_i.sum() + Epsilon)
covars[i] = covar_i + Lambda * np.eye(n_features)
_validate_params(prior_proba, means, covars)
return(prior_proba, means, covars)
def _fit_gmm_params(X, n_mixtures, n_init, init_method, n_iter, tol):
"""
"""
best_mean_loglikelihood = -np.infty
for _ in range(n_init):
priors, means, covars = _init_mixture_params(X, n_mixtures, init_method)
prev_mean_loglikelihood = None
for i in range(n_iter):
## E-step
log_likelihoods, posteriors = _log_likelihood_per_sample(X, means, covars)
## M-step
priors, means, covars = _maximization_step(X, posteriors)
## convergence Check
curr_mean_loglikelihood = log_likelihoods.mean()
if prev_mean_loglikelihood is not None:
if np.abs(curr_mean_loglikelihood - prev_mean_loglikelihood) < tol:
break
prev_mean_loglikelihood = curr_mean_loglikelihood
if curr_mean_loglikelihood > best_mean_loglikelihood:
best_mean_loglikelihood = curr_mean_loglikelihood
best_params = {
'priors' : priors,
'means' : means,
'covars' : covars,
'mean_log_likelihood' : curr_mean_loglikelihood,
'n_iter' : i
}
return(best_params)
class GMM(object):
"""
Gaussian Mixture Model (GMM)
Parameters
-------
Attibutes
-------
labels_ : cluster labels for each data item
Methods
-------
fit(X): fit the model
fit_predict(X): fit the model and return the cluster labels
"""
def __init__(self, n_clusters=2, n_trials=10, init_method='', max_iter=100, tol=0.0001):
assert n_clusters >= 2, 'n_clusters should be >= 2'
self.n_clusters = n_clusters
self.n_trials = n_trials
self.init_method = init_method
self.max_iter = max_iter
self.tol = tol
self.converged = False
def fit(self, X):
""" Fit mixture-density parameters with EM algorithm
"""
params_dict = _fit_gmm_params(X=X, n_mixtures=self.n_clusters, \
n_init=self.n_trials, init_method=self.init_method, \
n_iter=self.max_iter, tol=self.tol)
self.priors_ = params_dict['priors']
self.means_ = params_dict['means']
self.covars_ = params_dict['covars']
self.converged = True
self.labels_ = self.predict(X)
def predict_proba(self, X):
"""
"""
if not self.converged:
raise Exception('Mixture model is not fit yet!! Try GMM.fit(X)')
_, post_proba = _log_likelihood_per_sample(X=X, means=self.means_, covars=self.covars_)
return(post_proba)
def predict(self, X):
"""
"""
post_proba = self.predict_proba(X)
return(post_proba.argmax(axis=1))
|
vmirly/pyclust | pyclust/_gaussian_mixture_model.py | _validate_params | python | def _validate_params(priors, means, covars):
for i,(p,m,cv) in enumerate(zip(priors, means, covars)):
if np.any(np.isinf(p)) or np.any(np.isnan(p)):
raise ValueError("Component %d of priors is not valid " % i)
if np.any(np.isinf(m)) or np.any(np.isnan(m)):
raise ValueError("Component %d of means is not valid " % i)
if np.any(np.isinf(cv)) or np.any(np.isnan(cv)):
raise ValueError("Component %d of covars is not valid " % i)
if (not np.allclose(cv, cv.T) or np.any(scipy.linalg.eigvalsh(cv) <= 0)):
raise ValueError("Component %d of covars must be positive-definite" % i) | Validation Check for M.L. paramateres | train | https://github.com/vmirly/pyclust/blob/bdb12be4649e70c6c90da2605bc5f4b314e2d07e/pyclust/_gaussian_mixture_model.py#L124-L139 | null | import numpy as np
import scipy, scipy.linalg
from . import _kmeans
Epsilon = 100 * np.finfo(float).eps
Lambda = 0.1
def _init_mixture_params(X, n_mixtures, init_method):
"""
Initialize mixture density parameters with
equal priors
random means
identity covariance matrices
"""
init_priors = np.ones(shape=n_mixtures, dtype=float) / n_mixtures
if init_method == 'kmeans':
km = _kmeans.KMeans(n_clusters = n_mixtures, n_trials=20)
km.fit(X)
init_means = km.centers_
else:
inx_rand = np.random.choice(X.shape[0], size=n_mixtures)
init_means = X[inx_rand,:]
if np.any(np.isnan(init_means)):
raise ValueError("Init means are NaN! ")
n_features = X.shape[1]
init_covars = np.empty(shape=(n_mixtures, n_features, n_features), dtype=float)
for i in range(n_mixtures):
init_covars[i] = np.eye(n_features)
return(init_priors, init_means, init_covars)
def __log_density_single(x, mean, covar):
""" This is just a test function to calculate
the normal density at x given mean and covariance matrix.
Note: this function is not efficient, so
_log_multivariate_density is recommended for use.
"""
n_dim = mean.shape[0]
dx = x - mean
covar_inv = scipy.linalg.inv(covar)
covar_det = scipy.linalg.det(covar)
den = np.dot(np.dot(dx.T, covar_inv), dx) + n_dim*np.log(2*np.pi) + np.log(covar_det)
return(-1/2 * den)
def _log_multivariate_density(X, means, covars):
"""
Class conditional density:
P(x | mu, Sigma) = 1/((2pi)^d/2 * |Sigma|^1/2) * exp(-1/2 * (x-mu)^T * Sigma^-1 * (x-mu))
log of class conditional density:
log P(x | mu, Sigma) = -1/2*(d*log(2pi) + log(|Sigma|) + (x-mu)^T * Sigma^-1 * (x-mu))
"""
n_samples, n_dim = X.shape
n_components = means.shape[0]
assert(means.shape[0] == covars.shape[0])
log_proba = np.empty(shape=(n_samples, n_components), dtype=float)
for i, (mu, cov) in enumerate(zip(means, covars)):
try:
cov_chol = scipy.linalg.cholesky(cov, lower=True)
except scipy.linalg.LinAlgError:
try:
cov_chol = scipy.linalg.cholesky(cov + Lambda*np.eye(n_dim), lower=True)
except:
raise ValueError("Triangular Matrix Decomposition not performed!\n")
cov_log_det = 2 * np.sum(np.log(np.diagonal(cov_chol)))
try:
cov_solve = scipy.linalg.solve_triangular(cov_chol, (X - mu).T, lower=True).T
except:
raise ValueError("Solve_triangular not perormed!\n")
log_proba[:, i] = -0.5 * (np.sum(cov_solve ** 2, axis=1) + \
n_dim * np.log(2 * np.pi) + cov_log_det)
return(log_proba)
def _log_likelihood_per_sample(X, means, covars):
"""
Theta = (theta_1, theta_2, ... theta_M)
Likelihood of mixture parameters given data: L(Theta | X) = product_i P(x_i | Theta)
log likelihood: log L(Theta | X) = sum_i log(P(x_i | Theta))
and note that p(x_i | Theta) = sum_j prior_j * p(x_i | theta_j)
Probability of sample x being generated from component i:
P(w_i | x) = P(x|w_i) * P(w_i) / P(X)
where P(X) = sum_i P(x|w_i) * P(w_i)
Here post_proba = P/(w_i | x)
and log_likelihood = log(P(x|w_i))
"""
logden = _log_multivariate_density(X, means, covars)
logden_max = logden.max(axis=1)
log_likelihood = np.log(np.sum(np.exp(logden.T - logden_max) + Epsilon, axis=0))
log_likelihood += logden_max
post_proba = np.exp(logden - log_likelihood[:, np.newaxis])
return (log_likelihood, post_proba)
def _maximization_step(X, posteriors):
"""
Update class parameters as below:
priors: P(w_i) = sum_x P(w_i | x) ==> Then normalize to get in [0,1]
Class means: center_w_i = sum_x P(w_i|x)*x / sum_i sum_x P(w_i|x)
"""
### Prior probabilities or class weights
sum_post_proba = np.sum(posteriors, axis=0)
prior_proba = sum_post_proba / (sum_post_proba.sum() + Epsilon)
### means
means = np.dot(posteriors.T, X) / (sum_post_proba[:, np.newaxis] + Epsilon)
### covariance matrices
n_components = posteriors.shape[1]
n_features = X.shape[1]
covars = np.empty(shape=(n_components, n_features, n_features), dtype=float)
for i in range(n_components):
post_i = posteriors[:, i]
mean_i = means[i]
diff_i = X - mean_i
with np.errstate(under='ignore'):
covar_i = np.dot(post_i * diff_i.T, diff_i) / (post_i.sum() + Epsilon)
covars[i] = covar_i + Lambda * np.eye(n_features)
_validate_params(prior_proba, means, covars)
return(prior_proba, means, covars)
def _fit_gmm_params(X, n_mixtures, n_init, init_method, n_iter, tol):
"""
"""
best_mean_loglikelihood = -np.infty
for _ in range(n_init):
priors, means, covars = _init_mixture_params(X, n_mixtures, init_method)
prev_mean_loglikelihood = None
for i in range(n_iter):
## E-step
log_likelihoods, posteriors = _log_likelihood_per_sample(X, means, covars)
## M-step
priors, means, covars = _maximization_step(X, posteriors)
## convergence Check
curr_mean_loglikelihood = log_likelihoods.mean()
if prev_mean_loglikelihood is not None:
if np.abs(curr_mean_loglikelihood - prev_mean_loglikelihood) < tol:
break
prev_mean_loglikelihood = curr_mean_loglikelihood
if curr_mean_loglikelihood > best_mean_loglikelihood:
best_mean_loglikelihood = curr_mean_loglikelihood
best_params = {
'priors' : priors,
'means' : means,
'covars' : covars,
'mean_log_likelihood' : curr_mean_loglikelihood,
'n_iter' : i
}
return(best_params)
class GMM(object):
"""
Gaussian Mixture Model (GMM)
Parameters
-------
Attibutes
-------
labels_ : cluster labels for each data item
Methods
-------
fit(X): fit the model
fit_predict(X): fit the model and return the cluster labels
"""
def __init__(self, n_clusters=2, n_trials=10, init_method='', max_iter=100, tol=0.0001):
assert n_clusters >= 2, 'n_clusters should be >= 2'
self.n_clusters = n_clusters
self.n_trials = n_trials
self.init_method = init_method
self.max_iter = max_iter
self.tol = tol
self.converged = False
def fit(self, X):
""" Fit mixture-density parameters with EM algorithm
"""
params_dict = _fit_gmm_params(X=X, n_mixtures=self.n_clusters, \
n_init=self.n_trials, init_method=self.init_method, \
n_iter=self.max_iter, tol=self.tol)
self.priors_ = params_dict['priors']
self.means_ = params_dict['means']
self.covars_ = params_dict['covars']
self.converged = True
self.labels_ = self.predict(X)
def predict_proba(self, X):
"""
"""
if not self.converged:
raise Exception('Mixture model is not fit yet!! Try GMM.fit(X)')
_, post_proba = _log_likelihood_per_sample(X=X, means=self.means_, covars=self.covars_)
return(post_proba)
def predict(self, X):
"""
"""
post_proba = self.predict_proba(X)
return(post_proba.argmax(axis=1))
|
vmirly/pyclust | pyclust/_gaussian_mixture_model.py | _maximization_step | python | def _maximization_step(X, posteriors):
### Prior probabilities or class weights
sum_post_proba = np.sum(posteriors, axis=0)
prior_proba = sum_post_proba / (sum_post_proba.sum() + Epsilon)
### means
means = np.dot(posteriors.T, X) / (sum_post_proba[:, np.newaxis] + Epsilon)
### covariance matrices
n_components = posteriors.shape[1]
n_features = X.shape[1]
covars = np.empty(shape=(n_components, n_features, n_features), dtype=float)
for i in range(n_components):
post_i = posteriors[:, i]
mean_i = means[i]
diff_i = X - mean_i
with np.errstate(under='ignore'):
covar_i = np.dot(post_i * diff_i.T, diff_i) / (post_i.sum() + Epsilon)
covars[i] = covar_i + Lambda * np.eye(n_features)
_validate_params(prior_proba, means, covars)
return(prior_proba, means, covars) | Update class parameters as below:
priors: P(w_i) = sum_x P(w_i | x) ==> Then normalize to get in [0,1]
Class means: center_w_i = sum_x P(w_i|x)*x / sum_i sum_x P(w_i|x) | train | https://github.com/vmirly/pyclust/blob/bdb12be4649e70c6c90da2605bc5f4b314e2d07e/pyclust/_gaussian_mixture_model.py#L143-L173 | null | import numpy as np
import scipy, scipy.linalg
from . import _kmeans
Epsilon = 100 * np.finfo(float).eps
Lambda = 0.1
def _init_mixture_params(X, n_mixtures, init_method):
"""
Initialize mixture density parameters with
equal priors
random means
identity covariance matrices
"""
init_priors = np.ones(shape=n_mixtures, dtype=float) / n_mixtures
if init_method == 'kmeans':
km = _kmeans.KMeans(n_clusters = n_mixtures, n_trials=20)
km.fit(X)
init_means = km.centers_
else:
inx_rand = np.random.choice(X.shape[0], size=n_mixtures)
init_means = X[inx_rand,:]
if np.any(np.isnan(init_means)):
raise ValueError("Init means are NaN! ")
n_features = X.shape[1]
init_covars = np.empty(shape=(n_mixtures, n_features, n_features), dtype=float)
for i in range(n_mixtures):
init_covars[i] = np.eye(n_features)
return(init_priors, init_means, init_covars)
def __log_density_single(x, mean, covar):
""" This is just a test function to calculate
the normal density at x given mean and covariance matrix.
Note: this function is not efficient, so
_log_multivariate_density is recommended for use.
"""
n_dim = mean.shape[0]
dx = x - mean
covar_inv = scipy.linalg.inv(covar)
covar_det = scipy.linalg.det(covar)
den = np.dot(np.dot(dx.T, covar_inv), dx) + n_dim*np.log(2*np.pi) + np.log(covar_det)
return(-1/2 * den)
def _log_multivariate_density(X, means, covars):
"""
Class conditional density:
P(x | mu, Sigma) = 1/((2pi)^d/2 * |Sigma|^1/2) * exp(-1/2 * (x-mu)^T * Sigma^-1 * (x-mu))
log of class conditional density:
log P(x | mu, Sigma) = -1/2*(d*log(2pi) + log(|Sigma|) + (x-mu)^T * Sigma^-1 * (x-mu))
"""
n_samples, n_dim = X.shape
n_components = means.shape[0]
assert(means.shape[0] == covars.shape[0])
log_proba = np.empty(shape=(n_samples, n_components), dtype=float)
for i, (mu, cov) in enumerate(zip(means, covars)):
try:
cov_chol = scipy.linalg.cholesky(cov, lower=True)
except scipy.linalg.LinAlgError:
try:
cov_chol = scipy.linalg.cholesky(cov + Lambda*np.eye(n_dim), lower=True)
except:
raise ValueError("Triangular Matrix Decomposition not performed!\n")
cov_log_det = 2 * np.sum(np.log(np.diagonal(cov_chol)))
try:
cov_solve = scipy.linalg.solve_triangular(cov_chol, (X - mu).T, lower=True).T
except:
raise ValueError("Solve_triangular not perormed!\n")
log_proba[:, i] = -0.5 * (np.sum(cov_solve ** 2, axis=1) + \
n_dim * np.log(2 * np.pi) + cov_log_det)
return(log_proba)
def _log_likelihood_per_sample(X, means, covars):
"""
Theta = (theta_1, theta_2, ... theta_M)
Likelihood of mixture parameters given data: L(Theta | X) = product_i P(x_i | Theta)
log likelihood: log L(Theta | X) = sum_i log(P(x_i | Theta))
and note that p(x_i | Theta) = sum_j prior_j * p(x_i | theta_j)
Probability of sample x being generated from component i:
P(w_i | x) = P(x|w_i) * P(w_i) / P(X)
where P(X) = sum_i P(x|w_i) * P(w_i)
Here post_proba = P/(w_i | x)
and log_likelihood = log(P(x|w_i))
"""
logden = _log_multivariate_density(X, means, covars)
logden_max = logden.max(axis=1)
log_likelihood = np.log(np.sum(np.exp(logden.T - logden_max) + Epsilon, axis=0))
log_likelihood += logden_max
post_proba = np.exp(logden - log_likelihood[:, np.newaxis])
return (log_likelihood, post_proba)
def _validate_params(priors, means, covars):
""" Validation Check for M.L. paramateres
"""
for i,(p,m,cv) in enumerate(zip(priors, means, covars)):
if np.any(np.isinf(p)) or np.any(np.isnan(p)):
raise ValueError("Component %d of priors is not valid " % i)
if np.any(np.isinf(m)) or np.any(np.isnan(m)):
raise ValueError("Component %d of means is not valid " % i)
if np.any(np.isinf(cv)) or np.any(np.isnan(cv)):
raise ValueError("Component %d of covars is not valid " % i)
if (not np.allclose(cv, cv.T) or np.any(scipy.linalg.eigvalsh(cv) <= 0)):
raise ValueError("Component %d of covars must be positive-definite" % i)
def _maximization_step(X, posteriors):
"""
Update class parameters as below:
priors: P(w_i) = sum_x P(w_i | x) ==> Then normalize to get in [0,1]
Class means: center_w_i = sum_x P(w_i|x)*x / sum_i sum_x P(w_i|x)
"""
### Prior probabilities or class weights
sum_post_proba = np.sum(posteriors, axis=0)
prior_proba = sum_post_proba / (sum_post_proba.sum() + Epsilon)
### means
means = np.dot(posteriors.T, X) / (sum_post_proba[:, np.newaxis] + Epsilon)
### covariance matrices
n_components = posteriors.shape[1]
n_features = X.shape[1]
covars = np.empty(shape=(n_components, n_features, n_features), dtype=float)
for i in range(n_components):
post_i = posteriors[:, i]
mean_i = means[i]
diff_i = X - mean_i
with np.errstate(under='ignore'):
covar_i = np.dot(post_i * diff_i.T, diff_i) / (post_i.sum() + Epsilon)
covars[i] = covar_i + Lambda * np.eye(n_features)
_validate_params(prior_proba, means, covars)
return(prior_proba, means, covars)
def _fit_gmm_params(X, n_mixtures, n_init, init_method, n_iter, tol):
"""
"""
best_mean_loglikelihood = -np.infty
for _ in range(n_init):
priors, means, covars = _init_mixture_params(X, n_mixtures, init_method)
prev_mean_loglikelihood = None
for i in range(n_iter):
## E-step
log_likelihoods, posteriors = _log_likelihood_per_sample(X, means, covars)
## M-step
priors, means, covars = _maximization_step(X, posteriors)
## convergence Check
curr_mean_loglikelihood = log_likelihoods.mean()
if prev_mean_loglikelihood is not None:
if np.abs(curr_mean_loglikelihood - prev_mean_loglikelihood) < tol:
break
prev_mean_loglikelihood = curr_mean_loglikelihood
if curr_mean_loglikelihood > best_mean_loglikelihood:
best_mean_loglikelihood = curr_mean_loglikelihood
best_params = {
'priors' : priors,
'means' : means,
'covars' : covars,
'mean_log_likelihood' : curr_mean_loglikelihood,
'n_iter' : i
}
return(best_params)
class GMM(object):
"""
Gaussian Mixture Model (GMM)
Parameters
-------
Attibutes
-------
labels_ : cluster labels for each data item
Methods
-------
fit(X): fit the model
fit_predict(X): fit the model and return the cluster labels
"""
def __init__(self, n_clusters=2, n_trials=10, init_method='', max_iter=100, tol=0.0001):
assert n_clusters >= 2, 'n_clusters should be >= 2'
self.n_clusters = n_clusters
self.n_trials = n_trials
self.init_method = init_method
self.max_iter = max_iter
self.tol = tol
self.converged = False
def fit(self, X):
""" Fit mixture-density parameters with EM algorithm
"""
params_dict = _fit_gmm_params(X=X, n_mixtures=self.n_clusters, \
n_init=self.n_trials, init_method=self.init_method, \
n_iter=self.max_iter, tol=self.tol)
self.priors_ = params_dict['priors']
self.means_ = params_dict['means']
self.covars_ = params_dict['covars']
self.converged = True
self.labels_ = self.predict(X)
def predict_proba(self, X):
"""
"""
if not self.converged:
raise Exception('Mixture model is not fit yet!! Try GMM.fit(X)')
_, post_proba = _log_likelihood_per_sample(X=X, means=self.means_, covars=self.covars_)
return(post_proba)
def predict(self, X):
"""
"""
post_proba = self.predict_proba(X)
return(post_proba.argmax(axis=1))
|
vmirly/pyclust | pyclust/_gaussian_mixture_model.py | GMM.fit | python | def fit(self, X):
params_dict = _fit_gmm_params(X=X, n_mixtures=self.n_clusters, \
n_init=self.n_trials, init_method=self.init_method, \
n_iter=self.max_iter, tol=self.tol)
self.priors_ = params_dict['priors']
self.means_ = params_dict['means']
self.covars_ = params_dict['covars']
self.converged = True
self.labels_ = self.predict(X) | Fit mixture-density parameters with EM algorithm | train | https://github.com/vmirly/pyclust/blob/bdb12be4649e70c6c90da2605bc5f4b314e2d07e/pyclust/_gaussian_mixture_model.py#L247-L258 | [
"def _fit_gmm_params(X, n_mixtures, n_init, init_method, n_iter, tol):\n \"\"\"\n \"\"\"\n\n best_mean_loglikelihood = -np.infty\n\n for _ in range(n_init):\n priors, means, covars = _init_mixture_params(X, n_mixtures, init_method)\n prev_mean_loglikelihood = None\n for i in range(n_iter):\n ## E-step\n log_likelihoods, posteriors = _log_likelihood_per_sample(X, means, covars)\n\n ## M-step\n priors, means, covars = _maximization_step(X, posteriors)\n\n ## convergence Check\n curr_mean_loglikelihood = log_likelihoods.mean()\n\n if prev_mean_loglikelihood is not None:\n if np.abs(curr_mean_loglikelihood - prev_mean_loglikelihood) < tol:\n break\n\n prev_mean_loglikelihood = curr_mean_loglikelihood\n\n if curr_mean_loglikelihood > best_mean_loglikelihood:\n best_mean_loglikelihood = curr_mean_loglikelihood\n best_params = {\n 'priors' : priors,\n 'means' : means,\n 'covars' : covars,\n 'mean_log_likelihood' : curr_mean_loglikelihood,\n 'n_iter' : i\n }\n\n return(best_params)\n",
"def predict(self, X):\n \"\"\"\n \"\"\"\n post_proba = self.predict_proba(X)\n\n return(post_proba.argmax(axis=1))\n"
] | class GMM(object):
"""
Gaussian Mixture Model (GMM)
Parameters
-------
Attibutes
-------
labels_ : cluster labels for each data item
Methods
-------
fit(X): fit the model
fit_predict(X): fit the model and return the cluster labels
"""
def __init__(self, n_clusters=2, n_trials=10, init_method='', max_iter=100, tol=0.0001):
assert n_clusters >= 2, 'n_clusters should be >= 2'
self.n_clusters = n_clusters
self.n_trials = n_trials
self.init_method = init_method
self.max_iter = max_iter
self.tol = tol
self.converged = False
def predict_proba(self, X):
"""
"""
if not self.converged:
raise Exception('Mixture model is not fit yet!! Try GMM.fit(X)')
_, post_proba = _log_likelihood_per_sample(X=X, means=self.means_, covars=self.covars_)
return(post_proba)
def predict(self, X):
"""
"""
post_proba = self.predict_proba(X)
return(post_proba.argmax(axis=1))
|
vmirly/pyclust | pyclust/_kmeans.py | _kmeans_init | python | def _kmeans_init(X, n_clusters, method='balanced', rng=None):
n_samples = X.shape[0]
if rng is None:
cent_idx = np.random.choice(n_samples, replace=False, size=n_clusters)
else:
#print('Generate random centers using RNG')
cent_idx = rng.choice(n_samples, replace=False, size=n_clusters)
centers = X[cent_idx,:]
mean_X = np.mean(X, axis=0)
if method == 'balanced':
centers[n_clusters-1] = n_clusters*mean_X - np.sum(centers[:(n_clusters-1)], axis=0)
return (centers) | Initialize k=n_clusters centroids randomly | train | https://github.com/vmirly/pyclust/blob/bdb12be4649e70c6c90da2605bc5f4b314e2d07e/pyclust/_kmeans.py#L4-L20 | null | import numpy as np
import scipy.spatial
def _kmeans_init(X, n_clusters, method='balanced', rng=None):
""" Initialize k=n_clusters centroids randomly
"""
n_samples = X.shape[0]
if rng is None:
cent_idx = np.random.choice(n_samples, replace=False, size=n_clusters)
else:
#print('Generate random centers using RNG')
cent_idx = rng.choice(n_samples, replace=False, size=n_clusters)
centers = X[cent_idx,:]
mean_X = np.mean(X, axis=0)
if method == 'balanced':
centers[n_clusters-1] = n_clusters*mean_X - np.sum(centers[:(n_clusters-1)], axis=0)
return (centers)
def _assign_clusters(X, centers):
""" Assignment Step:
assign each point to the closet cluster center
"""
dist2cents = scipy.spatial.distance.cdist(X, centers, metric='seuclidean')
membs = np.argmin(dist2cents, axis=1)
return(membs)
def _cal_dist2center(X, center):
""" Calculate the SSE to the cluster center
"""
dmemb2cen = scipy.spatial.distance.cdist(X, center.reshape(1,X.shape[1]), metric='seuclidean')
return(np.sum(dmemb2cen))
def _update_centers(X, membs, n_clusters):
""" Update Cluster Centers:
calculate the mean of feature vectors for each cluster
"""
centers = np.empty(shape=(n_clusters, X.shape[1]), dtype=float)
sse = np.empty(shape=n_clusters, dtype=float)
for clust_id in range(n_clusters):
memb_ids = np.where(membs == clust_id)[0]
if memb_ids.shape[0] == 0:
memb_ids = np.random.choice(X.shape[0], size=1)
#print("Empty cluster replaced with ", memb_ids)
centers[clust_id,:] = np.mean(X[memb_ids,:], axis=0)
sse[clust_id] = _cal_dist2center(X[memb_ids,:], centers[clust_id,:])
return(centers, sse)
def _kmeans_run(X, n_clusters, max_iter, tol):
""" Run a single trial of k-means clustering
on dataset X, and given number of clusters
"""
membs = np.empty(shape=X.shape[0], dtype=int)
centers = _kmeans_init(X, n_clusters)
sse_last = 9999.9
n_iter = 0
for it in range(1,max_iter):
membs = _assign_clusters(X, centers)
centers,sse_arr = _update_centers(X, membs, n_clusters)
sse_total = np.sum(sse_arr)
if np.abs(sse_total - sse_last) < tol:
n_iter = it
break
sse_last = sse_total
return(centers, membs, sse_total, sse_arr, n_iter)
def _kmeans(X, n_clusters, max_iter, n_trials, tol):
""" Run multiple trials of k-means clustering,
and outputt he best centers, and cluster labels
"""
n_samples, n_features = X.shape[0], X.shape[1]
centers_best = np.empty(shape=(n_clusters,n_features), dtype=float)
labels_best = np.empty(shape=n_samples, dtype=int)
for i in range(n_trials):
centers, labels, sse_tot, sse_arr, n_iter = _kmeans_run(X, n_clusters, max_iter, tol)
if i==0:
sse_tot_best = sse_tot
sse_arr_best = sse_arr
n_iter_best = n_iter
centers_best = centers.copy()
labels_best = labels.copy()
if sse_tot < sse_tot_best:
sse_tot_best = sse_tot
sse_arr_best = sse_arr
n_iter_best = n_iter
centers_best = centers.copy()
labels_best = labels.copy()
return(centers_best, labels_best, sse_arr_best, n_iter_best)
class KMeans(object):
"""
KMeans Clustering
Parameters
-------
n_clusters: number of clusters (default = 2)
n_trials: number of trial random centroid initialization (default = 10)
max_iter: maximum number of iterations (default = 100)
tol: tolerance (default = 0.0001)
Attibutes
-------
labels_ : cluster labels for each data item
centers_ : cluster centers
sse_arr_ : array of SSE values for each cluster
n_iter_ : number of iterations for the best trial
Methods
-------
fit(X): fit the model
fit_predict(X): fit the model and return the cluster labels
"""
def __init__(self, n_clusters=2, n_trials=10, max_iter=100, tol=0.001):
self.n_clusters = n_clusters
self.n_trials = n_trials
self.max_iter = max_iter
self.tol = tol
def fit(self, X):
""" Apply KMeans Clustering
X: dataset with feature vectors
"""
self.centers_, self.labels_, self.sse_arr_, self.n_iter_ = \
_kmeans(X, self.n_clusters, self.max_iter, self.n_trials, self.tol)
def fit_predict(self, X):
""" Apply KMeans Clustering,
and return cluster labels
"""
self.fit(X)
return(self.labels_)
|
vmirly/pyclust | pyclust/_kmeans.py | _assign_clusters | python | def _assign_clusters(X, centers):
dist2cents = scipy.spatial.distance.cdist(X, centers, metric='seuclidean')
membs = np.argmin(dist2cents, axis=1)
return(membs) | Assignment Step:
assign each point to the closet cluster center | train | https://github.com/vmirly/pyclust/blob/bdb12be4649e70c6c90da2605bc5f4b314e2d07e/pyclust/_kmeans.py#L23-L30 | null | import numpy as np
import scipy.spatial
def _kmeans_init(X, n_clusters, method='balanced', rng=None):
""" Initialize k=n_clusters centroids randomly
"""
n_samples = X.shape[0]
if rng is None:
cent_idx = np.random.choice(n_samples, replace=False, size=n_clusters)
else:
#print('Generate random centers using RNG')
cent_idx = rng.choice(n_samples, replace=False, size=n_clusters)
centers = X[cent_idx,:]
mean_X = np.mean(X, axis=0)
if method == 'balanced':
centers[n_clusters-1] = n_clusters*mean_X - np.sum(centers[:(n_clusters-1)], axis=0)
return (centers)
def _cal_dist2center(X, center):
""" Calculate the SSE to the cluster center
"""
dmemb2cen = scipy.spatial.distance.cdist(X, center.reshape(1,X.shape[1]), metric='seuclidean')
return(np.sum(dmemb2cen))
def _update_centers(X, membs, n_clusters):
""" Update Cluster Centers:
calculate the mean of feature vectors for each cluster
"""
centers = np.empty(shape=(n_clusters, X.shape[1]), dtype=float)
sse = np.empty(shape=n_clusters, dtype=float)
for clust_id in range(n_clusters):
memb_ids = np.where(membs == clust_id)[0]
if memb_ids.shape[0] == 0:
memb_ids = np.random.choice(X.shape[0], size=1)
#print("Empty cluster replaced with ", memb_ids)
centers[clust_id,:] = np.mean(X[memb_ids,:], axis=0)
sse[clust_id] = _cal_dist2center(X[memb_ids,:], centers[clust_id,:])
return(centers, sse)
def _kmeans_run(X, n_clusters, max_iter, tol):
""" Run a single trial of k-means clustering
on dataset X, and given number of clusters
"""
membs = np.empty(shape=X.shape[0], dtype=int)
centers = _kmeans_init(X, n_clusters)
sse_last = 9999.9
n_iter = 0
for it in range(1,max_iter):
membs = _assign_clusters(X, centers)
centers,sse_arr = _update_centers(X, membs, n_clusters)
sse_total = np.sum(sse_arr)
if np.abs(sse_total - sse_last) < tol:
n_iter = it
break
sse_last = sse_total
return(centers, membs, sse_total, sse_arr, n_iter)
def _kmeans(X, n_clusters, max_iter, n_trials, tol):
""" Run multiple trials of k-means clustering,
and outputt he best centers, and cluster labels
"""
n_samples, n_features = X.shape[0], X.shape[1]
centers_best = np.empty(shape=(n_clusters,n_features), dtype=float)
labels_best = np.empty(shape=n_samples, dtype=int)
for i in range(n_trials):
centers, labels, sse_tot, sse_arr, n_iter = _kmeans_run(X, n_clusters, max_iter, tol)
if i==0:
sse_tot_best = sse_tot
sse_arr_best = sse_arr
n_iter_best = n_iter
centers_best = centers.copy()
labels_best = labels.copy()
if sse_tot < sse_tot_best:
sse_tot_best = sse_tot
sse_arr_best = sse_arr
n_iter_best = n_iter
centers_best = centers.copy()
labels_best = labels.copy()
return(centers_best, labels_best, sse_arr_best, n_iter_best)
class KMeans(object):
"""
KMeans Clustering
Parameters
-------
n_clusters: number of clusters (default = 2)
n_trials: number of trial random centroid initialization (default = 10)
max_iter: maximum number of iterations (default = 100)
tol: tolerance (default = 0.0001)
Attibutes
-------
labels_ : cluster labels for each data item
centers_ : cluster centers
sse_arr_ : array of SSE values for each cluster
n_iter_ : number of iterations for the best trial
Methods
-------
fit(X): fit the model
fit_predict(X): fit the model and return the cluster labels
"""
def __init__(self, n_clusters=2, n_trials=10, max_iter=100, tol=0.001):
self.n_clusters = n_clusters
self.n_trials = n_trials
self.max_iter = max_iter
self.tol = tol
def fit(self, X):
""" Apply KMeans Clustering
X: dataset with feature vectors
"""
self.centers_, self.labels_, self.sse_arr_, self.n_iter_ = \
_kmeans(X, self.n_clusters, self.max_iter, self.n_trials, self.tol)
def fit_predict(self, X):
""" Apply KMeans Clustering,
and return cluster labels
"""
self.fit(X)
return(self.labels_)
|
vmirly/pyclust | pyclust/_kmeans.py | _cal_dist2center | python | def _cal_dist2center(X, center):
dmemb2cen = scipy.spatial.distance.cdist(X, center.reshape(1,X.shape[1]), metric='seuclidean')
return(np.sum(dmemb2cen)) | Calculate the SSE to the cluster center | train | https://github.com/vmirly/pyclust/blob/bdb12be4649e70c6c90da2605bc5f4b314e2d07e/pyclust/_kmeans.py#L32-L36 | null | import numpy as np
import scipy.spatial
def _kmeans_init(X, n_clusters, method='balanced', rng=None):
""" Initialize k=n_clusters centroids randomly
"""
n_samples = X.shape[0]
if rng is None:
cent_idx = np.random.choice(n_samples, replace=False, size=n_clusters)
else:
#print('Generate random centers using RNG')
cent_idx = rng.choice(n_samples, replace=False, size=n_clusters)
centers = X[cent_idx,:]
mean_X = np.mean(X, axis=0)
if method == 'balanced':
centers[n_clusters-1] = n_clusters*mean_X - np.sum(centers[:(n_clusters-1)], axis=0)
return (centers)
def _assign_clusters(X, centers):
""" Assignment Step:
assign each point to the closet cluster center
"""
dist2cents = scipy.spatial.distance.cdist(X, centers, metric='seuclidean')
membs = np.argmin(dist2cents, axis=1)
return(membs)
def _update_centers(X, membs, n_clusters):
""" Update Cluster Centers:
calculate the mean of feature vectors for each cluster
"""
centers = np.empty(shape=(n_clusters, X.shape[1]), dtype=float)
sse = np.empty(shape=n_clusters, dtype=float)
for clust_id in range(n_clusters):
memb_ids = np.where(membs == clust_id)[0]
if memb_ids.shape[0] == 0:
memb_ids = np.random.choice(X.shape[0], size=1)
#print("Empty cluster replaced with ", memb_ids)
centers[clust_id,:] = np.mean(X[memb_ids,:], axis=0)
sse[clust_id] = _cal_dist2center(X[memb_ids,:], centers[clust_id,:])
return(centers, sse)
def _kmeans_run(X, n_clusters, max_iter, tol):
""" Run a single trial of k-means clustering
on dataset X, and given number of clusters
"""
membs = np.empty(shape=X.shape[0], dtype=int)
centers = _kmeans_init(X, n_clusters)
sse_last = 9999.9
n_iter = 0
for it in range(1,max_iter):
membs = _assign_clusters(X, centers)
centers,sse_arr = _update_centers(X, membs, n_clusters)
sse_total = np.sum(sse_arr)
if np.abs(sse_total - sse_last) < tol:
n_iter = it
break
sse_last = sse_total
return(centers, membs, sse_total, sse_arr, n_iter)
def _kmeans(X, n_clusters, max_iter, n_trials, tol):
""" Run multiple trials of k-means clustering,
and outputt he best centers, and cluster labels
"""
n_samples, n_features = X.shape[0], X.shape[1]
centers_best = np.empty(shape=(n_clusters,n_features), dtype=float)
labels_best = np.empty(shape=n_samples, dtype=int)
for i in range(n_trials):
centers, labels, sse_tot, sse_arr, n_iter = _kmeans_run(X, n_clusters, max_iter, tol)
if i==0:
sse_tot_best = sse_tot
sse_arr_best = sse_arr
n_iter_best = n_iter
centers_best = centers.copy()
labels_best = labels.copy()
if sse_tot < sse_tot_best:
sse_tot_best = sse_tot
sse_arr_best = sse_arr
n_iter_best = n_iter
centers_best = centers.copy()
labels_best = labels.copy()
return(centers_best, labels_best, sse_arr_best, n_iter_best)
class KMeans(object):
"""
KMeans Clustering
Parameters
-------
n_clusters: number of clusters (default = 2)
n_trials: number of trial random centroid initialization (default = 10)
max_iter: maximum number of iterations (default = 100)
tol: tolerance (default = 0.0001)
Attibutes
-------
labels_ : cluster labels for each data item
centers_ : cluster centers
sse_arr_ : array of SSE values for each cluster
n_iter_ : number of iterations for the best trial
Methods
-------
fit(X): fit the model
fit_predict(X): fit the model and return the cluster labels
"""
def __init__(self, n_clusters=2, n_trials=10, max_iter=100, tol=0.001):
self.n_clusters = n_clusters
self.n_trials = n_trials
self.max_iter = max_iter
self.tol = tol
def fit(self, X):
""" Apply KMeans Clustering
X: dataset with feature vectors
"""
self.centers_, self.labels_, self.sse_arr_, self.n_iter_ = \
_kmeans(X, self.n_clusters, self.max_iter, self.n_trials, self.tol)
def fit_predict(self, X):
""" Apply KMeans Clustering,
and return cluster labels
"""
self.fit(X)
return(self.labels_)
|
vmirly/pyclust | pyclust/_kmeans.py | _update_centers | python | def _update_centers(X, membs, n_clusters):
centers = np.empty(shape=(n_clusters, X.shape[1]), dtype=float)
sse = np.empty(shape=n_clusters, dtype=float)
for clust_id in range(n_clusters):
memb_ids = np.where(membs == clust_id)[0]
if memb_ids.shape[0] == 0:
memb_ids = np.random.choice(X.shape[0], size=1)
#print("Empty cluster replaced with ", memb_ids)
centers[clust_id,:] = np.mean(X[memb_ids,:], axis=0)
sse[clust_id] = _cal_dist2center(X[memb_ids,:], centers[clust_id,:])
return(centers, sse) | Update Cluster Centers:
calculate the mean of feature vectors for each cluster | train | https://github.com/vmirly/pyclust/blob/bdb12be4649e70c6c90da2605bc5f4b314e2d07e/pyclust/_kmeans.py#L38-L53 | [
"def _cal_dist2center(X, center):\n \"\"\" Calculate the SSE to the cluster center\n \"\"\"\n dmemb2cen = scipy.spatial.distance.cdist(X, center.reshape(1,X.shape[1]), metric='seuclidean')\n return(np.sum(dmemb2cen))\n"
] | import numpy as np
import scipy.spatial
def _kmeans_init(X, n_clusters, method='balanced', rng=None):
""" Initialize k=n_clusters centroids randomly
"""
n_samples = X.shape[0]
if rng is None:
cent_idx = np.random.choice(n_samples, replace=False, size=n_clusters)
else:
#print('Generate random centers using RNG')
cent_idx = rng.choice(n_samples, replace=False, size=n_clusters)
centers = X[cent_idx,:]
mean_X = np.mean(X, axis=0)
if method == 'balanced':
centers[n_clusters-1] = n_clusters*mean_X - np.sum(centers[:(n_clusters-1)], axis=0)
return (centers)
def _assign_clusters(X, centers):
""" Assignment Step:
assign each point to the closet cluster center
"""
dist2cents = scipy.spatial.distance.cdist(X, centers, metric='seuclidean')
membs = np.argmin(dist2cents, axis=1)
return(membs)
def _cal_dist2center(X, center):
""" Calculate the SSE to the cluster center
"""
dmemb2cen = scipy.spatial.distance.cdist(X, center.reshape(1,X.shape[1]), metric='seuclidean')
return(np.sum(dmemb2cen))
def _update_centers(X, membs, n_clusters):
""" Update Cluster Centers:
calculate the mean of feature vectors for each cluster
"""
centers = np.empty(shape=(n_clusters, X.shape[1]), dtype=float)
sse = np.empty(shape=n_clusters, dtype=float)
for clust_id in range(n_clusters):
memb_ids = np.where(membs == clust_id)[0]
if memb_ids.shape[0] == 0:
memb_ids = np.random.choice(X.shape[0], size=1)
#print("Empty cluster replaced with ", memb_ids)
centers[clust_id,:] = np.mean(X[memb_ids,:], axis=0)
sse[clust_id] = _cal_dist2center(X[memb_ids,:], centers[clust_id,:])
return(centers, sse)
def _kmeans_run(X, n_clusters, max_iter, tol):
""" Run a single trial of k-means clustering
on dataset X, and given number of clusters
"""
membs = np.empty(shape=X.shape[0], dtype=int)
centers = _kmeans_init(X, n_clusters)
sse_last = 9999.9
n_iter = 0
for it in range(1,max_iter):
membs = _assign_clusters(X, centers)
centers,sse_arr = _update_centers(X, membs, n_clusters)
sse_total = np.sum(sse_arr)
if np.abs(sse_total - sse_last) < tol:
n_iter = it
break
sse_last = sse_total
return(centers, membs, sse_total, sse_arr, n_iter)
def _kmeans(X, n_clusters, max_iter, n_trials, tol):
""" Run multiple trials of k-means clustering,
and outputt he best centers, and cluster labels
"""
n_samples, n_features = X.shape[0], X.shape[1]
centers_best = np.empty(shape=(n_clusters,n_features), dtype=float)
labels_best = np.empty(shape=n_samples, dtype=int)
for i in range(n_trials):
centers, labels, sse_tot, sse_arr, n_iter = _kmeans_run(X, n_clusters, max_iter, tol)
if i==0:
sse_tot_best = sse_tot
sse_arr_best = sse_arr
n_iter_best = n_iter
centers_best = centers.copy()
labels_best = labels.copy()
if sse_tot < sse_tot_best:
sse_tot_best = sse_tot
sse_arr_best = sse_arr
n_iter_best = n_iter
centers_best = centers.copy()
labels_best = labels.copy()
return(centers_best, labels_best, sse_arr_best, n_iter_best)
class KMeans(object):
"""
KMeans Clustering
Parameters
-------
n_clusters: number of clusters (default = 2)
n_trials: number of trial random centroid initialization (default = 10)
max_iter: maximum number of iterations (default = 100)
tol: tolerance (default = 0.0001)
Attibutes
-------
labels_ : cluster labels for each data item
centers_ : cluster centers
sse_arr_ : array of SSE values for each cluster
n_iter_ : number of iterations for the best trial
Methods
-------
fit(X): fit the model
fit_predict(X): fit the model and return the cluster labels
"""
def __init__(self, n_clusters=2, n_trials=10, max_iter=100, tol=0.001):
self.n_clusters = n_clusters
self.n_trials = n_trials
self.max_iter = max_iter
self.tol = tol
def fit(self, X):
""" Apply KMeans Clustering
X: dataset with feature vectors
"""
self.centers_, self.labels_, self.sse_arr_, self.n_iter_ = \
_kmeans(X, self.n_clusters, self.max_iter, self.n_trials, self.tol)
def fit_predict(self, X):
""" Apply KMeans Clustering,
and return cluster labels
"""
self.fit(X)
return(self.labels_)
|
vmirly/pyclust | pyclust/_kmeans.py | _kmeans_run | python | def _kmeans_run(X, n_clusters, max_iter, tol):
membs = np.empty(shape=X.shape[0], dtype=int)
centers = _kmeans_init(X, n_clusters)
sse_last = 9999.9
n_iter = 0
for it in range(1,max_iter):
membs = _assign_clusters(X, centers)
centers,sse_arr = _update_centers(X, membs, n_clusters)
sse_total = np.sum(sse_arr)
if np.abs(sse_total - sse_last) < tol:
n_iter = it
break
sse_last = sse_total
return(centers, membs, sse_total, sse_arr, n_iter) | Run a single trial of k-means clustering
on dataset X, and given number of clusters | train | https://github.com/vmirly/pyclust/blob/bdb12be4649e70c6c90da2605bc5f4b314e2d07e/pyclust/_kmeans.py#L57-L75 | [
"def _kmeans_init(X, n_clusters, method='balanced', rng=None):\n \"\"\" Initialize k=n_clusters centroids randomly\n \"\"\"\n n_samples = X.shape[0]\n if rng is None:\n cent_idx = np.random.choice(n_samples, replace=False, size=n_clusters)\n else:\n #print('Generate random centers using RNG')\n cent_idx = rng.choice(n_samples, replace=False, size=n_clusters)\n\n centers = X[cent_idx,:]\n mean_X = np.mean(X, axis=0)\n\n if method == 'balanced':\n centers[n_clusters-1] = n_clusters*mean_X - np.sum(centers[:(n_clusters-1)], axis=0)\n\n return (centers)\n",
"def _assign_clusters(X, centers):\n \"\"\" Assignment Step:\n assign each point to the closet cluster center\n \"\"\"\n dist2cents = scipy.spatial.distance.cdist(X, centers, metric='seuclidean')\n membs = np.argmin(dist2cents, axis=1)\n\n return(membs)\n",
"def _update_centers(X, membs, n_clusters):\n \"\"\" Update Cluster Centers:\n calculate the mean of feature vectors for each cluster\n \"\"\"\n centers = np.empty(shape=(n_clusters, X.shape[1]), dtype=float)\n sse = np.empty(shape=n_clusters, dtype=float)\n for clust_id in range(n_clusters):\n memb_ids = np.where(membs == clust_id)[0]\n\n if memb_ids.shape[0] == 0:\n memb_ids = np.random.choice(X.shape[0], size=1)\n #print(\"Empty cluster replaced with \", memb_ids)\n centers[clust_id,:] = np.mean(X[memb_ids,:], axis=0)\n\n sse[clust_id] = _cal_dist2center(X[memb_ids,:], centers[clust_id,:]) \n return(centers, sse)\n"
] | import numpy as np
import scipy.spatial
def _kmeans_init(X, n_clusters, method='balanced', rng=None):
""" Initialize k=n_clusters centroids randomly
"""
n_samples = X.shape[0]
if rng is None:
cent_idx = np.random.choice(n_samples, replace=False, size=n_clusters)
else:
#print('Generate random centers using RNG')
cent_idx = rng.choice(n_samples, replace=False, size=n_clusters)
centers = X[cent_idx,:]
mean_X = np.mean(X, axis=0)
if method == 'balanced':
centers[n_clusters-1] = n_clusters*mean_X - np.sum(centers[:(n_clusters-1)], axis=0)
return (centers)
def _assign_clusters(X, centers):
""" Assignment Step:
assign each point to the closet cluster center
"""
dist2cents = scipy.spatial.distance.cdist(X, centers, metric='seuclidean')
membs = np.argmin(dist2cents, axis=1)
return(membs)
def _cal_dist2center(X, center):
""" Calculate the SSE to the cluster center
"""
dmemb2cen = scipy.spatial.distance.cdist(X, center.reshape(1,X.shape[1]), metric='seuclidean')
return(np.sum(dmemb2cen))
def _update_centers(X, membs, n_clusters):
""" Update Cluster Centers:
calculate the mean of feature vectors for each cluster
"""
centers = np.empty(shape=(n_clusters, X.shape[1]), dtype=float)
sse = np.empty(shape=n_clusters, dtype=float)
for clust_id in range(n_clusters):
memb_ids = np.where(membs == clust_id)[0]
if memb_ids.shape[0] == 0:
memb_ids = np.random.choice(X.shape[0], size=1)
#print("Empty cluster replaced with ", memb_ids)
centers[clust_id,:] = np.mean(X[memb_ids,:], axis=0)
sse[clust_id] = _cal_dist2center(X[memb_ids,:], centers[clust_id,:])
return(centers, sse)
def _kmeans(X, n_clusters, max_iter, n_trials, tol):
""" Run multiple trials of k-means clustering,
and outputt he best centers, and cluster labels
"""
n_samples, n_features = X.shape[0], X.shape[1]
centers_best = np.empty(shape=(n_clusters,n_features), dtype=float)
labels_best = np.empty(shape=n_samples, dtype=int)
for i in range(n_trials):
centers, labels, sse_tot, sse_arr, n_iter = _kmeans_run(X, n_clusters, max_iter, tol)
if i==0:
sse_tot_best = sse_tot
sse_arr_best = sse_arr
n_iter_best = n_iter
centers_best = centers.copy()
labels_best = labels.copy()
if sse_tot < sse_tot_best:
sse_tot_best = sse_tot
sse_arr_best = sse_arr
n_iter_best = n_iter
centers_best = centers.copy()
labels_best = labels.copy()
return(centers_best, labels_best, sse_arr_best, n_iter_best)
class KMeans(object):
"""
KMeans Clustering
Parameters
-------
n_clusters: number of clusters (default = 2)
n_trials: number of trial random centroid initialization (default = 10)
max_iter: maximum number of iterations (default = 100)
tol: tolerance (default = 0.0001)
Attibutes
-------
labels_ : cluster labels for each data item
centers_ : cluster centers
sse_arr_ : array of SSE values for each cluster
n_iter_ : number of iterations for the best trial
Methods
-------
fit(X): fit the model
fit_predict(X): fit the model and return the cluster labels
"""
def __init__(self, n_clusters=2, n_trials=10, max_iter=100, tol=0.001):
self.n_clusters = n_clusters
self.n_trials = n_trials
self.max_iter = max_iter
self.tol = tol
def fit(self, X):
""" Apply KMeans Clustering
X: dataset with feature vectors
"""
self.centers_, self.labels_, self.sse_arr_, self.n_iter_ = \
_kmeans(X, self.n_clusters, self.max_iter, self.n_trials, self.tol)
def fit_predict(self, X):
""" Apply KMeans Clustering,
and return cluster labels
"""
self.fit(X)
return(self.labels_)
|
vmirly/pyclust | pyclust/_kmeans.py | _kmeans | python | def _kmeans(X, n_clusters, max_iter, n_trials, tol):
n_samples, n_features = X.shape[0], X.shape[1]
centers_best = np.empty(shape=(n_clusters,n_features), dtype=float)
labels_best = np.empty(shape=n_samples, dtype=int)
for i in range(n_trials):
centers, labels, sse_tot, sse_arr, n_iter = _kmeans_run(X, n_clusters, max_iter, tol)
if i==0:
sse_tot_best = sse_tot
sse_arr_best = sse_arr
n_iter_best = n_iter
centers_best = centers.copy()
labels_best = labels.copy()
if sse_tot < sse_tot_best:
sse_tot_best = sse_tot
sse_arr_best = sse_arr
n_iter_best = n_iter
centers_best = centers.copy()
labels_best = labels.copy()
return(centers_best, labels_best, sse_arr_best, n_iter_best) | Run multiple trials of k-means clustering,
and outputt he best centers, and cluster labels | train | https://github.com/vmirly/pyclust/blob/bdb12be4649e70c6c90da2605bc5f4b314e2d07e/pyclust/_kmeans.py#L78-L101 | [
"def _kmeans_run(X, n_clusters, max_iter, tol):\n \"\"\" Run a single trial of k-means clustering\n on dataset X, and given number of clusters\n \"\"\"\n membs = np.empty(shape=X.shape[0], dtype=int)\n centers = _kmeans_init(X, n_clusters)\n\n sse_last = 9999.9\n n_iter = 0\n for it in range(1,max_iter):\n membs = _assign_clusters(X, centers)\n centers,sse_arr = _update_centers(X, membs, n_clusters)\n sse_total = np.sum(sse_arr)\n if np.abs(sse_total - sse_last) < tol:\n n_iter = it\n break\n sse_last = sse_total\n\n return(centers, membs, sse_total, sse_arr, n_iter)\n"
] | import numpy as np
import scipy.spatial
def _kmeans_init(X, n_clusters, method='balanced', rng=None):
""" Initialize k=n_clusters centroids randomly
"""
n_samples = X.shape[0]
if rng is None:
cent_idx = np.random.choice(n_samples, replace=False, size=n_clusters)
else:
#print('Generate random centers using RNG')
cent_idx = rng.choice(n_samples, replace=False, size=n_clusters)
centers = X[cent_idx,:]
mean_X = np.mean(X, axis=0)
if method == 'balanced':
centers[n_clusters-1] = n_clusters*mean_X - np.sum(centers[:(n_clusters-1)], axis=0)
return (centers)
def _assign_clusters(X, centers):
""" Assignment Step:
assign each point to the closet cluster center
"""
dist2cents = scipy.spatial.distance.cdist(X, centers, metric='seuclidean')
membs = np.argmin(dist2cents, axis=1)
return(membs)
def _cal_dist2center(X, center):
""" Calculate the SSE to the cluster center
"""
dmemb2cen = scipy.spatial.distance.cdist(X, center.reshape(1,X.shape[1]), metric='seuclidean')
return(np.sum(dmemb2cen))
def _update_centers(X, membs, n_clusters):
""" Update Cluster Centers:
calculate the mean of feature vectors for each cluster
"""
centers = np.empty(shape=(n_clusters, X.shape[1]), dtype=float)
sse = np.empty(shape=n_clusters, dtype=float)
for clust_id in range(n_clusters):
memb_ids = np.where(membs == clust_id)[0]
if memb_ids.shape[0] == 0:
memb_ids = np.random.choice(X.shape[0], size=1)
#print("Empty cluster replaced with ", memb_ids)
centers[clust_id,:] = np.mean(X[memb_ids,:], axis=0)
sse[clust_id] = _cal_dist2center(X[memb_ids,:], centers[clust_id,:])
return(centers, sse)
def _kmeans_run(X, n_clusters, max_iter, tol):
""" Run a single trial of k-means clustering
on dataset X, and given number of clusters
"""
membs = np.empty(shape=X.shape[0], dtype=int)
centers = _kmeans_init(X, n_clusters)
sse_last = 9999.9
n_iter = 0
for it in range(1,max_iter):
membs = _assign_clusters(X, centers)
centers,sse_arr = _update_centers(X, membs, n_clusters)
sse_total = np.sum(sse_arr)
if np.abs(sse_total - sse_last) < tol:
n_iter = it
break
sse_last = sse_total
return(centers, membs, sse_total, sse_arr, n_iter)
class KMeans(object):
"""
KMeans Clustering
Parameters
-------
n_clusters: number of clusters (default = 2)
n_trials: number of trial random centroid initialization (default = 10)
max_iter: maximum number of iterations (default = 100)
tol: tolerance (default = 0.0001)
Attibutes
-------
labels_ : cluster labels for each data item
centers_ : cluster centers
sse_arr_ : array of SSE values for each cluster
n_iter_ : number of iterations for the best trial
Methods
-------
fit(X): fit the model
fit_predict(X): fit the model and return the cluster labels
"""
def __init__(self, n_clusters=2, n_trials=10, max_iter=100, tol=0.001):
self.n_clusters = n_clusters
self.n_trials = n_trials
self.max_iter = max_iter
self.tol = tol
def fit(self, X):
""" Apply KMeans Clustering
X: dataset with feature vectors
"""
self.centers_, self.labels_, self.sse_arr_, self.n_iter_ = \
_kmeans(X, self.n_clusters, self.max_iter, self.n_trials, self.tol)
def fit_predict(self, X):
""" Apply KMeans Clustering,
and return cluster labels
"""
self.fit(X)
return(self.labels_)
|
vmirly/pyclust | pyclust/_kmeans.py | KMeans.fit | python | def fit(self, X):
self.centers_, self.labels_, self.sse_arr_, self.n_iter_ = \
_kmeans(X, self.n_clusters, self.max_iter, self.n_trials, self.tol) | Apply KMeans Clustering
X: dataset with feature vectors | train | https://github.com/vmirly/pyclust/blob/bdb12be4649e70c6c90da2605bc5f4b314e2d07e/pyclust/_kmeans.py#L137-L142 | [
"def _kmeans(X, n_clusters, max_iter, n_trials, tol):\n \"\"\" Run multiple trials of k-means clustering,\n and outputt he best centers, and cluster labels\n \"\"\"\n n_samples, n_features = X.shape[0], X.shape[1]\n\n centers_best = np.empty(shape=(n_clusters,n_features), dtype=float)\n labels_best = np.empty(shape=n_samples, dtype=int)\n for i in range(n_trials):\n centers, labels, sse_tot, sse_arr, n_iter = _kmeans_run(X, n_clusters, max_iter, tol)\n if i==0:\n sse_tot_best = sse_tot\n sse_arr_best = sse_arr\n n_iter_best = n_iter\n centers_best = centers.copy()\n labels_best = labels.copy()\n if sse_tot < sse_tot_best:\n sse_tot_best = sse_tot\n sse_arr_best = sse_arr\n n_iter_best = n_iter\n centers_best = centers.copy()\n labels_best = labels.copy()\n\n return(centers_best, labels_best, sse_arr_best, n_iter_best)\n"
] | class KMeans(object):
"""
KMeans Clustering
Parameters
-------
n_clusters: number of clusters (default = 2)
n_trials: number of trial random centroid initialization (default = 10)
max_iter: maximum number of iterations (default = 100)
tol: tolerance (default = 0.0001)
Attibutes
-------
labels_ : cluster labels for each data item
centers_ : cluster centers
sse_arr_ : array of SSE values for each cluster
n_iter_ : number of iterations for the best trial
Methods
-------
fit(X): fit the model
fit_predict(X): fit the model and return the cluster labels
"""
def __init__(self, n_clusters=2, n_trials=10, max_iter=100, tol=0.001):
self.n_clusters = n_clusters
self.n_trials = n_trials
self.max_iter = max_iter
self.tol = tol
def fit_predict(self, X):
""" Apply KMeans Clustering,
and return cluster labels
"""
self.fit(X)
return(self.labels_)
|
vmirly/pyclust | pyclust/_bisect_kmeans.py | _cut_tree | python | def _cut_tree(tree, n_clusters, membs):
## starting from root,
## a node is added to the cut_set or
## its children are added to node_set
assert(n_clusters >= 2)
assert(n_clusters <= len(tree.leaves()))
cut_centers = dict() #np.empty(shape=(n_clusters, ndim), dtype=float)
for i in range(n_clusters-1):
if i==0:
search_set = set(tree.children(0))
node_set,cut_set = set(), set()
else:
search_set = node_set.union(cut_set)
node_set,cut_set = set(), set()
if i+2 == n_clusters:
cut_set = search_set
else:
for _ in range(len(search_set)):
n = search_set.pop()
if n.data['ilev'] is None or n.data['ilev']>i+2:
cut_set.add(n)
else:
nid = n.identifier
if n.data['ilev']-2==i:
node_set = node_set.union(set(tree.children(nid)))
conv_membs = membs.copy()
for node in cut_set:
nid = node.identifier
label = node.data['label']
cut_centers[label] = node.data['center']
sub_leaves = tree.leaves(nid)
for leaf in sub_leaves:
indx = np.where(conv_membs == leaf)[0]
conv_membs[indx] = nid
return(conv_membs, cut_centers) | Cut the tree to get desired number of clusters as n_clusters
2 <= n_desired <= n_clusters | train | https://github.com/vmirly/pyclust/blob/bdb12be4649e70c6c90da2605bc5f4b314e2d07e/pyclust/_bisect_kmeans.py#L27-L70 | null | import numpy as np
import treelib
from . import _kmeans
def _select_cluster_2_split(membs, tree):
leaf_nodes = tree.leaves()
num_leaves = len(leaf_nodes)
if len(leaf_nodes)>1:
sse_arr = np.empty(shape=num_leaves, dtype=float)
labels = np.empty(shape=num_leaves, dtype=int)
for i,node in enumerate(leaf_nodes):
sse_arr[i] = node.data['sse']
labels[i] = node.data['label']
id_max = np.argmax(sse_arr)
clust_id = labels[id_max]
memb_ids = np.where(membs == clust_id)[0]
return(clust_id,memb_ids)
else:
return(0,np.arange(membs.shape[0]))
def _cut_tree(tree, n_clusters, membs):
""" Cut the tree to get desired number of clusters as n_clusters
2 <= n_desired <= n_clusters
"""
## starting from root,
## a node is added to the cut_set or
## its children are added to node_set
assert(n_clusters >= 2)
assert(n_clusters <= len(tree.leaves()))
cut_centers = dict() #np.empty(shape=(n_clusters, ndim), dtype=float)
for i in range(n_clusters-1):
if i==0:
search_set = set(tree.children(0))
node_set,cut_set = set(), set()
else:
search_set = node_set.union(cut_set)
node_set,cut_set = set(), set()
if i+2 == n_clusters:
cut_set = search_set
else:
for _ in range(len(search_set)):
n = search_set.pop()
if n.data['ilev'] is None or n.data['ilev']>i+2:
cut_set.add(n)
else:
nid = n.identifier
if n.data['ilev']-2==i:
node_set = node_set.union(set(tree.children(nid)))
conv_membs = membs.copy()
for node in cut_set:
nid = node.identifier
label = node.data['label']
cut_centers[label] = node.data['center']
sub_leaves = tree.leaves(nid)
for leaf in sub_leaves:
indx = np.where(conv_membs == leaf)[0]
conv_membs[indx] = nid
return(conv_membs, cut_centers)
def _add_tree_node(tree, label, ilev, X=None, size=None, center=None, sse=None, parent=None):
""" Add a node to the tree
if parent is not known, the node is a root
The nodes of this tree keep properties of each cluster/subcluster:
size --> cluster size as the number of points in the cluster
center --> mean of the cluster
label --> cluster label
sse --> sum-squared-error for that single cluster
ilev --> the level at which this node is split into 2 children
"""
if size is None:
size = X.shape[0]
if (center is None):
center = np.mean(X, axis=0)
if (sse is None):
sse = _kmeans._cal_dist2center(X, center)
center = list(center)
datadict = {
'size' : size,
'center': center,
'label' : label,
'sse' : sse,
'ilev' : None
}
if (parent is None):
tree.create_node(label, label, data=datadict)
else:
tree.create_node(label, label, parent=parent, data=datadict)
tree.get_node(parent).data['ilev'] = ilev
return(tree)
def _bisect_kmeans(X, n_clusters, n_trials, max_iter, tol):
""" Apply Bisecting Kmeans clustering
to reach n_clusters number of clusters
"""
membs = np.empty(shape=X.shape[0], dtype=int)
centers = dict() #np.empty(shape=(n_clusters,X.shape[1]), dtype=float)
sse_arr = dict() #-1.0*np.ones(shape=n_clusters, dtype=float)
## data structure to store cluster hierarchies
tree = treelib.Tree()
tree = _add_tree_node(tree, 0, ilev=0, X=X)
km = _kmeans.KMeans(n_clusters=2, n_trials=n_trials, max_iter=max_iter, tol=tol)
for i in range(1,n_clusters):
sel_clust_id,sel_memb_ids = _select_cluster_2_split(membs, tree)
X_sub = X[sel_memb_ids,:]
km.fit(X_sub)
#print("Bisecting Step %d :"%i, sel_clust_id, km.sse_arr_, km.centers_)
## Updating the clusters & properties
#sse_arr[[sel_clust_id,i]] = km.sse_arr_
#centers[[sel_clust_id,i]] = km.centers_
tree = _add_tree_node(tree, 2*i-1, i, \
size=np.sum(km.labels_ == 0), center=km.centers_[0], \
sse=km.sse_arr_[0], parent= sel_clust_id)
tree = _add_tree_node(tree, 2*i, i, \
size=np.sum(km.labels_ == 1), center=km.centers_[1], \
sse=km.sse_arr_[1], parent= sel_clust_id)
pred_labels = km.labels_
pred_labels[np.where(pred_labels == 1)[0]] = 2*i
pred_labels[np.where(pred_labels == 0)[0]] = 2*i - 1
#if sel_clust_id == 1:
# pred_labels[np.where(pred_labels == 0)[0]] = sel_clust_id
# pred_labels[np.where(pred_labels == 1)[0]] = i
#else:
# pred_labels[np.where(pred_labels == 1)[0]] = i
# pred_labels[np.where(pred_labels == 0)[0]] = sel_clust_id
membs[sel_memb_ids] = pred_labels
for n in tree.leaves():
label = n.data['label']
centers[label] = n.data['center']
sse_arr[label] = n.data['sse']
return(centers, membs, sse_arr, tree)
class BisectKMeans(object):
"""
bisecting KMeans Clustering
Parameters
-------
n_clusters: number of clusters (default = 2)
n_trials: number of trial random centroid initialization (default = 10)
max_iter: maximum number of iterations (default = 100)
tol: tolerance (default = 0.0001)
Attibutes
-------
labels_ : cluster labels for each data item
centers_ : cluster centers
sse_arr_ : array of SSE values for each cluster
n_iter_ : number of iterations for the best trial
tree_ : tree hierarchy of bisecting clusters
Methods
-------
fit(X): fit the model
fit_predict(X): fit the model and return the cluster labels
"""
def __init__(self, n_clusters=2, n_trials=10, max_iter=100, tol=0.0001):
assert n_clusters >= 2, 'n_clusters should be >= 2'
self.n_clusters = n_clusters
self.n_trials = n_trials
self.max_iter = max_iter
self.tol = tol
def fit(self, X):
"""
"""
self.centers_, self.labels_, self.sse_arr_, self.tree_ = \
_bisect_kmeans(X, self.n_clusters, self.n_trials, self.max_iter, self.tol)
def fit_predict(self, X):
"""
"""
self.fit(X)
return(self.labels_)
def cut(self, n_desired):
"""
"""
return(_cut_tree(self.tree_, n_desired, self.labels_))
|
vmirly/pyclust | pyclust/_bisect_kmeans.py | _add_tree_node | python | def _add_tree_node(tree, label, ilev, X=None, size=None, center=None, sse=None, parent=None):
if size is None:
size = X.shape[0]
if (center is None):
center = np.mean(X, axis=0)
if (sse is None):
sse = _kmeans._cal_dist2center(X, center)
center = list(center)
datadict = {
'size' : size,
'center': center,
'label' : label,
'sse' : sse,
'ilev' : None
}
if (parent is None):
tree.create_node(label, label, data=datadict)
else:
tree.create_node(label, label, parent=parent, data=datadict)
tree.get_node(parent).data['ilev'] = ilev
return(tree) | Add a node to the tree
if parent is not known, the node is a root
The nodes of this tree keep properties of each cluster/subcluster:
size --> cluster size as the number of points in the cluster
center --> mean of the cluster
label --> cluster label
sse --> sum-squared-error for that single cluster
ilev --> the level at which this node is split into 2 children | train | https://github.com/vmirly/pyclust/blob/bdb12be4649e70c6c90da2605bc5f4b314e2d07e/pyclust/_bisect_kmeans.py#L73-L105 | null | import numpy as np
import treelib
from . import _kmeans
def _select_cluster_2_split(membs, tree):
leaf_nodes = tree.leaves()
num_leaves = len(leaf_nodes)
if len(leaf_nodes)>1:
sse_arr = np.empty(shape=num_leaves, dtype=float)
labels = np.empty(shape=num_leaves, dtype=int)
for i,node in enumerate(leaf_nodes):
sse_arr[i] = node.data['sse']
labels[i] = node.data['label']
id_max = np.argmax(sse_arr)
clust_id = labels[id_max]
memb_ids = np.where(membs == clust_id)[0]
return(clust_id,memb_ids)
else:
return(0,np.arange(membs.shape[0]))
def _cut_tree(tree, n_clusters, membs):
""" Cut the tree to get desired number of clusters as n_clusters
2 <= n_desired <= n_clusters
"""
## starting from root,
## a node is added to the cut_set or
## its children are added to node_set
assert(n_clusters >= 2)
assert(n_clusters <= len(tree.leaves()))
cut_centers = dict() #np.empty(shape=(n_clusters, ndim), dtype=float)
for i in range(n_clusters-1):
if i==0:
search_set = set(tree.children(0))
node_set,cut_set = set(), set()
else:
search_set = node_set.union(cut_set)
node_set,cut_set = set(), set()
if i+2 == n_clusters:
cut_set = search_set
else:
for _ in range(len(search_set)):
n = search_set.pop()
if n.data['ilev'] is None or n.data['ilev']>i+2:
cut_set.add(n)
else:
nid = n.identifier
if n.data['ilev']-2==i:
node_set = node_set.union(set(tree.children(nid)))
conv_membs = membs.copy()
for node in cut_set:
nid = node.identifier
label = node.data['label']
cut_centers[label] = node.data['center']
sub_leaves = tree.leaves(nid)
for leaf in sub_leaves:
indx = np.where(conv_membs == leaf)[0]
conv_membs[indx] = nid
return(conv_membs, cut_centers)
def _bisect_kmeans(X, n_clusters, n_trials, max_iter, tol):
""" Apply Bisecting Kmeans clustering
to reach n_clusters number of clusters
"""
membs = np.empty(shape=X.shape[0], dtype=int)
centers = dict() #np.empty(shape=(n_clusters,X.shape[1]), dtype=float)
sse_arr = dict() #-1.0*np.ones(shape=n_clusters, dtype=float)
## data structure to store cluster hierarchies
tree = treelib.Tree()
tree = _add_tree_node(tree, 0, ilev=0, X=X)
km = _kmeans.KMeans(n_clusters=2, n_trials=n_trials, max_iter=max_iter, tol=tol)
for i in range(1,n_clusters):
sel_clust_id,sel_memb_ids = _select_cluster_2_split(membs, tree)
X_sub = X[sel_memb_ids,:]
km.fit(X_sub)
#print("Bisecting Step %d :"%i, sel_clust_id, km.sse_arr_, km.centers_)
## Updating the clusters & properties
#sse_arr[[sel_clust_id,i]] = km.sse_arr_
#centers[[sel_clust_id,i]] = km.centers_
tree = _add_tree_node(tree, 2*i-1, i, \
size=np.sum(km.labels_ == 0), center=km.centers_[0], \
sse=km.sse_arr_[0], parent= sel_clust_id)
tree = _add_tree_node(tree, 2*i, i, \
size=np.sum(km.labels_ == 1), center=km.centers_[1], \
sse=km.sse_arr_[1], parent= sel_clust_id)
pred_labels = km.labels_
pred_labels[np.where(pred_labels == 1)[0]] = 2*i
pred_labels[np.where(pred_labels == 0)[0]] = 2*i - 1
#if sel_clust_id == 1:
# pred_labels[np.where(pred_labels == 0)[0]] = sel_clust_id
# pred_labels[np.where(pred_labels == 1)[0]] = i
#else:
# pred_labels[np.where(pred_labels == 1)[0]] = i
# pred_labels[np.where(pred_labels == 0)[0]] = sel_clust_id
membs[sel_memb_ids] = pred_labels
for n in tree.leaves():
label = n.data['label']
centers[label] = n.data['center']
sse_arr[label] = n.data['sse']
return(centers, membs, sse_arr, tree)
class BisectKMeans(object):
"""
bisecting KMeans Clustering
Parameters
-------
n_clusters: number of clusters (default = 2)
n_trials: number of trial random centroid initialization (default = 10)
max_iter: maximum number of iterations (default = 100)
tol: tolerance (default = 0.0001)
Attibutes
-------
labels_ : cluster labels for each data item
centers_ : cluster centers
sse_arr_ : array of SSE values for each cluster
n_iter_ : number of iterations for the best trial
tree_ : tree hierarchy of bisecting clusters
Methods
-------
fit(X): fit the model
fit_predict(X): fit the model and return the cluster labels
"""
def __init__(self, n_clusters=2, n_trials=10, max_iter=100, tol=0.0001):
assert n_clusters >= 2, 'n_clusters should be >= 2'
self.n_clusters = n_clusters
self.n_trials = n_trials
self.max_iter = max_iter
self.tol = tol
def fit(self, X):
"""
"""
self.centers_, self.labels_, self.sse_arr_, self.tree_ = \
_bisect_kmeans(X, self.n_clusters, self.n_trials, self.max_iter, self.tol)
def fit_predict(self, X):
"""
"""
self.fit(X)
return(self.labels_)
def cut(self, n_desired):
"""
"""
return(_cut_tree(self.tree_, n_desired, self.labels_))
|
vmirly/pyclust | pyclust/_bisect_kmeans.py | _bisect_kmeans | python | def _bisect_kmeans(X, n_clusters, n_trials, max_iter, tol):
membs = np.empty(shape=X.shape[0], dtype=int)
centers = dict() #np.empty(shape=(n_clusters,X.shape[1]), dtype=float)
sse_arr = dict() #-1.0*np.ones(shape=n_clusters, dtype=float)
## data structure to store cluster hierarchies
tree = treelib.Tree()
tree = _add_tree_node(tree, 0, ilev=0, X=X)
km = _kmeans.KMeans(n_clusters=2, n_trials=n_trials, max_iter=max_iter, tol=tol)
for i in range(1,n_clusters):
sel_clust_id,sel_memb_ids = _select_cluster_2_split(membs, tree)
X_sub = X[sel_memb_ids,:]
km.fit(X_sub)
#print("Bisecting Step %d :"%i, sel_clust_id, km.sse_arr_, km.centers_)
## Updating the clusters & properties
#sse_arr[[sel_clust_id,i]] = km.sse_arr_
#centers[[sel_clust_id,i]] = km.centers_
tree = _add_tree_node(tree, 2*i-1, i, \
size=np.sum(km.labels_ == 0), center=km.centers_[0], \
sse=km.sse_arr_[0], parent= sel_clust_id)
tree = _add_tree_node(tree, 2*i, i, \
size=np.sum(km.labels_ == 1), center=km.centers_[1], \
sse=km.sse_arr_[1], parent= sel_clust_id)
pred_labels = km.labels_
pred_labels[np.where(pred_labels == 1)[0]] = 2*i
pred_labels[np.where(pred_labels == 0)[0]] = 2*i - 1
#if sel_clust_id == 1:
# pred_labels[np.where(pred_labels == 0)[0]] = sel_clust_id
# pred_labels[np.where(pred_labels == 1)[0]] = i
#else:
# pred_labels[np.where(pred_labels == 1)[0]] = i
# pred_labels[np.where(pred_labels == 0)[0]] = sel_clust_id
membs[sel_memb_ids] = pred_labels
for n in tree.leaves():
label = n.data['label']
centers[label] = n.data['center']
sse_arr[label] = n.data['sse']
return(centers, membs, sse_arr, tree) | Apply Bisecting Kmeans clustering
to reach n_clusters number of clusters | train | https://github.com/vmirly/pyclust/blob/bdb12be4649e70c6c90da2605bc5f4b314e2d07e/pyclust/_bisect_kmeans.py#L110-L157 | [
"def _select_cluster_2_split(membs, tree):\n leaf_nodes = tree.leaves()\n num_leaves = len(leaf_nodes)\n if len(leaf_nodes)>1:\n sse_arr = np.empty(shape=num_leaves, dtype=float)\n labels = np.empty(shape=num_leaves, dtype=int)\n\n for i,node in enumerate(leaf_nodes):\n sse_arr[i] = node.data['sse']\n labels[i] = node.data['label']\n id_max = np.argmax(sse_arr)\n clust_id = labels[id_max]\n memb_ids = np.where(membs == clust_id)[0]\n return(clust_id,memb_ids)\n else:\n return(0,np.arange(membs.shape[0]))\n",
"def _add_tree_node(tree, label, ilev, X=None, size=None, center=None, sse=None, parent=None):\n \"\"\" Add a node to the tree\n if parent is not known, the node is a root\n\n The nodes of this tree keep properties of each cluster/subcluster:\n size --> cluster size as the number of points in the cluster\n center --> mean of the cluster\n label --> cluster label\n sse --> sum-squared-error for that single cluster\n ilev --> the level at which this node is split into 2 children\n \"\"\"\n if size is None:\n size = X.shape[0]\n if (center is None):\n center = np.mean(X, axis=0)\n if (sse is None):\n sse = _kmeans._cal_dist2center(X, center)\n\n center = list(center)\n datadict = {\n 'size' : size,\n 'center': center, \n 'label' : label, \n 'sse' : sse,\n 'ilev' : None \n }\n if (parent is None):\n tree.create_node(label, label, data=datadict)\n else:\n tree.create_node(label, label, parent=parent, data=datadict)\n tree.get_node(parent).data['ilev'] = ilev\n\n return(tree)\n",
"def fit(self, X):\n \"\"\" Apply KMeans Clustering\n X: dataset with feature vectors\n \"\"\"\n self.centers_, self.labels_, self.sse_arr_, self.n_iter_ = \\\n _kmeans(X, self.n_clusters, self.max_iter, self.n_trials, self.tol)\n"
] | import numpy as np
import treelib
from . import _kmeans
def _select_cluster_2_split(membs, tree):
leaf_nodes = tree.leaves()
num_leaves = len(leaf_nodes)
if len(leaf_nodes)>1:
sse_arr = np.empty(shape=num_leaves, dtype=float)
labels = np.empty(shape=num_leaves, dtype=int)
for i,node in enumerate(leaf_nodes):
sse_arr[i] = node.data['sse']
labels[i] = node.data['label']
id_max = np.argmax(sse_arr)
clust_id = labels[id_max]
memb_ids = np.where(membs == clust_id)[0]
return(clust_id,memb_ids)
else:
return(0,np.arange(membs.shape[0]))
def _cut_tree(tree, n_clusters, membs):
""" Cut the tree to get desired number of clusters as n_clusters
2 <= n_desired <= n_clusters
"""
## starting from root,
## a node is added to the cut_set or
## its children are added to node_set
assert(n_clusters >= 2)
assert(n_clusters <= len(tree.leaves()))
cut_centers = dict() #np.empty(shape=(n_clusters, ndim), dtype=float)
for i in range(n_clusters-1):
if i==0:
search_set = set(tree.children(0))
node_set,cut_set = set(), set()
else:
search_set = node_set.union(cut_set)
node_set,cut_set = set(), set()
if i+2 == n_clusters:
cut_set = search_set
else:
for _ in range(len(search_set)):
n = search_set.pop()
if n.data['ilev'] is None or n.data['ilev']>i+2:
cut_set.add(n)
else:
nid = n.identifier
if n.data['ilev']-2==i:
node_set = node_set.union(set(tree.children(nid)))
conv_membs = membs.copy()
for node in cut_set:
nid = node.identifier
label = node.data['label']
cut_centers[label] = node.data['center']
sub_leaves = tree.leaves(nid)
for leaf in sub_leaves:
indx = np.where(conv_membs == leaf)[0]
conv_membs[indx] = nid
return(conv_membs, cut_centers)
def _add_tree_node(tree, label, ilev, X=None, size=None, center=None, sse=None, parent=None):
""" Add a node to the tree
if parent is not known, the node is a root
The nodes of this tree keep properties of each cluster/subcluster:
size --> cluster size as the number of points in the cluster
center --> mean of the cluster
label --> cluster label
sse --> sum-squared-error for that single cluster
ilev --> the level at which this node is split into 2 children
"""
if size is None:
size = X.shape[0]
if (center is None):
center = np.mean(X, axis=0)
if (sse is None):
sse = _kmeans._cal_dist2center(X, center)
center = list(center)
datadict = {
'size' : size,
'center': center,
'label' : label,
'sse' : sse,
'ilev' : None
}
if (parent is None):
tree.create_node(label, label, data=datadict)
else:
tree.create_node(label, label, parent=parent, data=datadict)
tree.get_node(parent).data['ilev'] = ilev
return(tree)
class BisectKMeans(object):
"""
bisecting KMeans Clustering
Parameters
-------
n_clusters: number of clusters (default = 2)
n_trials: number of trial random centroid initialization (default = 10)
max_iter: maximum number of iterations (default = 100)
tol: tolerance (default = 0.0001)
Attibutes
-------
labels_ : cluster labels for each data item
centers_ : cluster centers
sse_arr_ : array of SSE values for each cluster
n_iter_ : number of iterations for the best trial
tree_ : tree hierarchy of bisecting clusters
Methods
-------
fit(X): fit the model
fit_predict(X): fit the model and return the cluster labels
"""
def __init__(self, n_clusters=2, n_trials=10, max_iter=100, tol=0.0001):
assert n_clusters >= 2, 'n_clusters should be >= 2'
self.n_clusters = n_clusters
self.n_trials = n_trials
self.max_iter = max_iter
self.tol = tol
def fit(self, X):
"""
"""
self.centers_, self.labels_, self.sse_arr_, self.tree_ = \
_bisect_kmeans(X, self.n_clusters, self.n_trials, self.max_iter, self.tol)
def fit_predict(self, X):
"""
"""
self.fit(X)
return(self.labels_)
def cut(self, n_desired):
"""
"""
return(_cut_tree(self.tree_, n_desired, self.labels_))
|
shoeffner/cvloop | cvloop/functions.py | DrawHat.load_hat | python | def load_hat(self, path): # pylint: disable=no-self-use
hat = cv2.imread(path, cv2.IMREAD_UNCHANGED)
if hat is None:
raise ValueError('No hat image found at `{}`'.format(path))
b, g, r, a = cv2.split(hat)
return cv2.merge((r, g, b, a)) | Loads the hat from a picture at path.
Args:
path: The path to load from
Returns:
The hat data. | train | https://github.com/shoeffner/cvloop/blob/3ddd311e9b679d16c8fd36779931380374de343c/cvloop/functions.py#L173-L186 | null | class DrawHat:
"""Draws hats above detected faces.
Uses a Haar cascade for face detection and draws provided hats above the
detected faces.
The default hat (examples/hat.png) is taken from
https://pixabay.com/en/hat-trilby-black-brim-crease-felt-157581/
and was released unter CC0 Public Domain.
"""
def __init__(self, hat_path=os.path.join(os.curdir, 'hat.png'),
cascade_path=os.path.join(
OPENCV_CASCADE_PATH, 'haarcascades',
'haarcascade_frontalface_default.xml'),
w_offset=1.3, x_offset=-20, y_offset=80, draw_box=False):
# pragma pylint: disable=line-too-long
"""Initializes a `DrawHat` instance.
Args:
hat_path: The path to the hat file. Defaults to ./hat.png .
cascade_path: The path to the face cascade file.
Defaults to
`cvloop.OPENCV_CASCADE_PATH/haarcascades/haarcascade_frontalface_default.xml`
w_offset: Hat width additional scaling.
x_offset: Number of pixels right to move hat.
y_offset: Number of pixels down to move hat.
draw_box: If True, draws boxes around detected faces.
"""
# pragma pylint: enable=line-too-long
self.w_offset = w_offset
self.x_offset = x_offset
self.y_offset = y_offset
self.draw_box = draw_box
self.cascade = cv2.CascadeClassifier(cascade_path)
self.hat = self.load_hat(hat_path)
def find_faces(self, image, draw_box=False):
"""Uses a haarcascade to detect faces inside an image.
Args:
image: The image.
draw_box: If True, the image will be marked with a rectangle.
Return:
The faces as returned by OpenCV's detectMultiScale method for
cascades.
"""
frame_gray = cv2.cvtColor(image, cv2.COLOR_RGB2GRAY)
faces = self.cascade.detectMultiScale(
frame_gray,
scaleFactor=1.3,
minNeighbors=5,
minSize=(50, 50),
flags=0)
if draw_box:
for x, y, w, h in faces:
cv2.rectangle(image, (x, y),
(x + w, y + h), (0, 255, 0), 2)
return faces
def __call__(self, image): # pylint: disable=too-many-locals
"""Draws a hat on top of detected faces inside the image.
Args:
image: The image.
Returns:
The image with a hat.
"""
frame_height = image.shape[0]
frame_width = image.shape[1]
faces = self.find_faces(image, self.draw_box)
for x, y, w, h in faces: # pylint: disable=unused-variable
hat = self.hat.copy()
# Scale hat to fit face.
hat_width = int(w * self.w_offset)
hat_height = int(hat_width * hat.shape[0] / hat.shape[1])
hat = cv2.resize(hat, (hat_width, hat_height))
# Clip hat if outside frame.
hat_left = 0
hat_top = 0
hat_bottom = hat_height
hat_right = hat_width
y0 = y - hat_height + self.y_offset
if y0 < 0: # If the hat starts above the frame, clip it.
hat_top = abs(y0) # Find beginning of hat ROI.
y0 = 0
y1 = y0 + hat_height - hat_top
if y1 > frame_height:
hat_bottom = hat_height - (y1 - frame_height)
y1 = frame_height
x0 = x + self.x_offset
if x0 < 0:
hat_left = abs(x0)
x0 = 0
x1 = x0 + hat_width - hat_left
if x1 > frame_width:
hat_right = hat_width - (x1 - frame_width)
x1 = frame_width
# Remove background from hat image.
for c in range(0, 3):
hat_slice = hat[hat_top:hat_bottom, hat_left:hat_right, c] * \
(hat[hat_top:hat_bottom, hat_left:hat_right, 3] / 255.0)
bg_slice = image[y0:y1, x0:x1, c] * \
(1.0 - hat[hat_top:hat_bottom, hat_left:hat_right, 3]
/ 255.0)
image[y0:y1, x0:x1, c] = hat_slice + bg_slice
return image
|
shoeffner/cvloop | cvloop/functions.py | DrawHat.find_faces | python | def find_faces(self, image, draw_box=False):
frame_gray = cv2.cvtColor(image, cv2.COLOR_RGB2GRAY)
faces = self.cascade.detectMultiScale(
frame_gray,
scaleFactor=1.3,
minNeighbors=5,
minSize=(50, 50),
flags=0)
if draw_box:
for x, y, w, h in faces:
cv2.rectangle(image, (x, y),
(x + w, y + h), (0, 255, 0), 2)
return faces | Uses a haarcascade to detect faces inside an image.
Args:
image: The image.
draw_box: If True, the image will be marked with a rectangle.
Return:
The faces as returned by OpenCV's detectMultiScale method for
cascades. | train | https://github.com/shoeffner/cvloop/blob/3ddd311e9b679d16c8fd36779931380374de343c/cvloop/functions.py#L188-L211 | null | class DrawHat:
"""Draws hats above detected faces.
Uses a Haar cascade for face detection and draws provided hats above the
detected faces.
The default hat (examples/hat.png) is taken from
https://pixabay.com/en/hat-trilby-black-brim-crease-felt-157581/
and was released unter CC0 Public Domain.
"""
def __init__(self, hat_path=os.path.join(os.curdir, 'hat.png'),
cascade_path=os.path.join(
OPENCV_CASCADE_PATH, 'haarcascades',
'haarcascade_frontalface_default.xml'),
w_offset=1.3, x_offset=-20, y_offset=80, draw_box=False):
# pragma pylint: disable=line-too-long
"""Initializes a `DrawHat` instance.
Args:
hat_path: The path to the hat file. Defaults to ./hat.png .
cascade_path: The path to the face cascade file.
Defaults to
`cvloop.OPENCV_CASCADE_PATH/haarcascades/haarcascade_frontalface_default.xml`
w_offset: Hat width additional scaling.
x_offset: Number of pixels right to move hat.
y_offset: Number of pixels down to move hat.
draw_box: If True, draws boxes around detected faces.
"""
# pragma pylint: enable=line-too-long
self.w_offset = w_offset
self.x_offset = x_offset
self.y_offset = y_offset
self.draw_box = draw_box
self.cascade = cv2.CascadeClassifier(cascade_path)
self.hat = self.load_hat(hat_path)
def load_hat(self, path): # pylint: disable=no-self-use
"""Loads the hat from a picture at path.
Args:
path: The path to load from
Returns:
The hat data.
"""
hat = cv2.imread(path, cv2.IMREAD_UNCHANGED)
if hat is None:
raise ValueError('No hat image found at `{}`'.format(path))
b, g, r, a = cv2.split(hat)
return cv2.merge((r, g, b, a))
def __call__(self, image): # pylint: disable=too-many-locals
"""Draws a hat on top of detected faces inside the image.
Args:
image: The image.
Returns:
The image with a hat.
"""
frame_height = image.shape[0]
frame_width = image.shape[1]
faces = self.find_faces(image, self.draw_box)
for x, y, w, h in faces: # pylint: disable=unused-variable
hat = self.hat.copy()
# Scale hat to fit face.
hat_width = int(w * self.w_offset)
hat_height = int(hat_width * hat.shape[0] / hat.shape[1])
hat = cv2.resize(hat, (hat_width, hat_height))
# Clip hat if outside frame.
hat_left = 0
hat_top = 0
hat_bottom = hat_height
hat_right = hat_width
y0 = y - hat_height + self.y_offset
if y0 < 0: # If the hat starts above the frame, clip it.
hat_top = abs(y0) # Find beginning of hat ROI.
y0 = 0
y1 = y0 + hat_height - hat_top
if y1 > frame_height:
hat_bottom = hat_height - (y1 - frame_height)
y1 = frame_height
x0 = x + self.x_offset
if x0 < 0:
hat_left = abs(x0)
x0 = 0
x1 = x0 + hat_width - hat_left
if x1 > frame_width:
hat_right = hat_width - (x1 - frame_width)
x1 = frame_width
# Remove background from hat image.
for c in range(0, 3):
hat_slice = hat[hat_top:hat_bottom, hat_left:hat_right, c] * \
(hat[hat_top:hat_bottom, hat_left:hat_right, 3] / 255.0)
bg_slice = image[y0:y1, x0:x1, c] * \
(1.0 - hat[hat_top:hat_bottom, hat_left:hat_right, 3]
/ 255.0)
image[y0:y1, x0:x1, c] = hat_slice + bg_slice
return image
|
shoeffner/cvloop | tools/create_functions_ipynb.py | is_mod_function | python | def is_mod_function(mod, fun):
return inspect.isfunction(fun) and inspect.getmodule(fun) == mod | Checks if a function in a module was declared in that module.
http://stackoverflow.com/a/1107150/3004221
Args:
mod: the module
fun: the function | train | https://github.com/shoeffner/cvloop/blob/3ddd311e9b679d16c8fd36779931380374de343c/tools/create_functions_ipynb.py#L15-L24 | null | """This modules creates the example notebook for the cvloop.functions
notebook."""
import inspect
import json
import sys
sys.path.insert(0, '../cvloop')
import cvloop.functions # noqa: E402
GENERATE_ARGS = True
def is_mod_class(mod, cls):
"""Checks if a class in a module was declared in that module.
Args:
mod: the module
cls: the class
"""
return inspect.isclass(cls) and inspect.getmodule(cls) == mod
def list_functions(mod_name):
"""Lists all functions declared in a module.
http://stackoverflow.com/a/1107150/3004221
Args:
mod_name: the module name
Returns:
A list of functions declared in that module.
"""
mod = sys.modules[mod_name]
return [func.__name__ for func in mod.__dict__.values()
if is_mod_function(mod, func)]
def list_classes(mod_name):
"""Lists all classes declared in a module.
Args:
mod_name: the module name
Returns:
A list of functions declared in that module.
"""
mod = sys.modules[mod_name]
return [cls.__name__ for cls in mod.__dict__.values()
if is_mod_class(mod, cls)]
def get_linenumbers(functions, module, searchstr='def {}(image):\n'):
"""Returns a dictionary which maps function names to line numbers.
Args:
functions: a list of function names
module: the module to look the functions up
searchstr: the string to search for
Returns:
A dictionary with functions as keys and their line numbers as values.
"""
lines = inspect.getsourcelines(module)[0]
line_numbers = {}
for function in functions:
try:
line_numbers[function] = lines.index(
searchstr.format(function)) + 1
except ValueError:
print(r'Can not find `{}`'.format(searchstr.format(function)))
line_numbers[function] = 0
return line_numbers
def format_doc(fun):
"""Formats the documentation in a nicer way and for notebook cells."""
SEPARATOR = '============================='
func = cvloop.functions.__dict__[fun]
doc_lines = ['{}'.format(l).strip() for l in func.__doc__.split('\n')]
if hasattr(func, '__init__'):
doc_lines.append(SEPARATOR)
doc_lines += ['{}'.format(l).strip() for l in
func.__init__.__doc__.split('\n')]
mod_lines = []
argblock = False
returnblock = False
for line in doc_lines:
if line == SEPARATOR:
mod_lines.append('\n#### `{}.__init__(...)`:\n\n'.format(fun))
elif 'Args:' in line:
argblock = True
if GENERATE_ARGS:
mod_lines.append('**{}**\n'.format(line))
elif 'Returns:' in line:
returnblock = True
mod_lines.append('\n**{}**'.format(line))
elif not argblock and not returnblock:
mod_lines.append('{}\n'.format(line))
elif argblock and not returnblock and ':' in line:
if GENERATE_ARGS:
mod_lines.append('- *{}:* {}\n'.format(
*line.split(':')))
elif returnblock:
mod_lines.append(line)
else:
mod_lines.append('{}\n'.format(line))
return mod_lines
def create_description_cell(fun, line_number):
"""Creates a markdown cell with a title and the help doc string of a
function."""
return {
'cell_type': 'markdown',
'metadata': {},
'source': [
'## `cvloop.functions.{}` '.format(fun),
'<small>[[Source](https://github.com/shoeffner/cvloop/blob/',
'develop/cvloop/functions.py#L{})]</small>\n\n'
.format(line_number),
*format_doc(fun),
]
}
def create_code_cell(fun, isclass=False):
"""Creates a code cell which uses a simple cvloop and embeds the function
in question.
Args:
isclass: Defaults to False. If True, an instance will be created inside
the code cell.
"""
return {
'cell_type': 'code',
'metadata': {},
'outputs': [],
'execution_count': None,
'source': [
'from cvloop import cvloop, {}\n'.format(fun),
'cvloop(function={}{}, side_by_side=True)'.format(fun, '()' if
isclass else '')
]
}
def main():
"""Main function creates the cvloop.functions example notebook."""
notebook = {
'cells': [
{
'cell_type': 'markdown',
'metadata': {},
'source': [
'# cvloop functions\n\n',
'This notebook shows an overview over all cvloop ',
'functions provided in the [`cvloop.functions` module](',
'https://github.com/shoeffner/cvloop/blob/',
'develop/cvloop/functions.py).'
]
},
],
'nbformat': 4,
'nbformat_minor': 1,
'metadata': {
'language_info': {
'codemirror_mode': {
'name': 'ipython',
'version': 3
},
'file_extension': '.py',
'mimetype': 'text/x-python',
'name': 'python',
'nbconvert_exporter': 'python',
'pygments_lexer': 'ipython3',
'version': '3.5.1+'
}
}
}
classes = list_classes('cvloop.functions')
functions = list_functions('cvloop.functions')
line_numbers_cls = get_linenumbers(classes, cvloop.functions,
'class {}:\n')
line_numbers = get_linenumbers(functions, cvloop.functions)
for cls in classes:
line_number = line_numbers_cls[cls]
notebook['cells'].append(create_description_cell(cls, line_number))
notebook['cells'].append(create_code_cell(cls, isclass=True))
for func in functions:
line_number = line_numbers[func]
notebook['cells'].append(create_description_cell(func, line_number))
notebook['cells'].append(create_code_cell(func))
with open(sys.argv[1], 'w') as nfile:
json.dump(notebook, nfile, indent=4)
if __name__ == '__main__':
main()
|
shoeffner/cvloop | tools/create_functions_ipynb.py | is_mod_class | python | def is_mod_class(mod, cls):
return inspect.isclass(cls) and inspect.getmodule(cls) == mod | Checks if a class in a module was declared in that module.
Args:
mod: the module
cls: the class | train | https://github.com/shoeffner/cvloop/blob/3ddd311e9b679d16c8fd36779931380374de343c/tools/create_functions_ipynb.py#L27-L34 | null | """This modules creates the example notebook for the cvloop.functions
notebook."""
import inspect
import json
import sys
sys.path.insert(0, '../cvloop')
import cvloop.functions # noqa: E402
GENERATE_ARGS = True
def is_mod_function(mod, fun):
"""Checks if a function in a module was declared in that module.
http://stackoverflow.com/a/1107150/3004221
Args:
mod: the module
fun: the function
"""
return inspect.isfunction(fun) and inspect.getmodule(fun) == mod
def list_functions(mod_name):
"""Lists all functions declared in a module.
http://stackoverflow.com/a/1107150/3004221
Args:
mod_name: the module name
Returns:
A list of functions declared in that module.
"""
mod = sys.modules[mod_name]
return [func.__name__ for func in mod.__dict__.values()
if is_mod_function(mod, func)]
def list_classes(mod_name):
"""Lists all classes declared in a module.
Args:
mod_name: the module name
Returns:
A list of functions declared in that module.
"""
mod = sys.modules[mod_name]
return [cls.__name__ for cls in mod.__dict__.values()
if is_mod_class(mod, cls)]
def get_linenumbers(functions, module, searchstr='def {}(image):\n'):
"""Returns a dictionary which maps function names to line numbers.
Args:
functions: a list of function names
module: the module to look the functions up
searchstr: the string to search for
Returns:
A dictionary with functions as keys and their line numbers as values.
"""
lines = inspect.getsourcelines(module)[0]
line_numbers = {}
for function in functions:
try:
line_numbers[function] = lines.index(
searchstr.format(function)) + 1
except ValueError:
print(r'Can not find `{}`'.format(searchstr.format(function)))
line_numbers[function] = 0
return line_numbers
def format_doc(fun):
"""Formats the documentation in a nicer way and for notebook cells."""
SEPARATOR = '============================='
func = cvloop.functions.__dict__[fun]
doc_lines = ['{}'.format(l).strip() for l in func.__doc__.split('\n')]
if hasattr(func, '__init__'):
doc_lines.append(SEPARATOR)
doc_lines += ['{}'.format(l).strip() for l in
func.__init__.__doc__.split('\n')]
mod_lines = []
argblock = False
returnblock = False
for line in doc_lines:
if line == SEPARATOR:
mod_lines.append('\n#### `{}.__init__(...)`:\n\n'.format(fun))
elif 'Args:' in line:
argblock = True
if GENERATE_ARGS:
mod_lines.append('**{}**\n'.format(line))
elif 'Returns:' in line:
returnblock = True
mod_lines.append('\n**{}**'.format(line))
elif not argblock and not returnblock:
mod_lines.append('{}\n'.format(line))
elif argblock and not returnblock and ':' in line:
if GENERATE_ARGS:
mod_lines.append('- *{}:* {}\n'.format(
*line.split(':')))
elif returnblock:
mod_lines.append(line)
else:
mod_lines.append('{}\n'.format(line))
return mod_lines
def create_description_cell(fun, line_number):
"""Creates a markdown cell with a title and the help doc string of a
function."""
return {
'cell_type': 'markdown',
'metadata': {},
'source': [
'## `cvloop.functions.{}` '.format(fun),
'<small>[[Source](https://github.com/shoeffner/cvloop/blob/',
'develop/cvloop/functions.py#L{})]</small>\n\n'
.format(line_number),
*format_doc(fun),
]
}
def create_code_cell(fun, isclass=False):
"""Creates a code cell which uses a simple cvloop and embeds the function
in question.
Args:
isclass: Defaults to False. If True, an instance will be created inside
the code cell.
"""
return {
'cell_type': 'code',
'metadata': {},
'outputs': [],
'execution_count': None,
'source': [
'from cvloop import cvloop, {}\n'.format(fun),
'cvloop(function={}{}, side_by_side=True)'.format(fun, '()' if
isclass else '')
]
}
def main():
"""Main function creates the cvloop.functions example notebook."""
notebook = {
'cells': [
{
'cell_type': 'markdown',
'metadata': {},
'source': [
'# cvloop functions\n\n',
'This notebook shows an overview over all cvloop ',
'functions provided in the [`cvloop.functions` module](',
'https://github.com/shoeffner/cvloop/blob/',
'develop/cvloop/functions.py).'
]
},
],
'nbformat': 4,
'nbformat_minor': 1,
'metadata': {
'language_info': {
'codemirror_mode': {
'name': 'ipython',
'version': 3
},
'file_extension': '.py',
'mimetype': 'text/x-python',
'name': 'python',
'nbconvert_exporter': 'python',
'pygments_lexer': 'ipython3',
'version': '3.5.1+'
}
}
}
classes = list_classes('cvloop.functions')
functions = list_functions('cvloop.functions')
line_numbers_cls = get_linenumbers(classes, cvloop.functions,
'class {}:\n')
line_numbers = get_linenumbers(functions, cvloop.functions)
for cls in classes:
line_number = line_numbers_cls[cls]
notebook['cells'].append(create_description_cell(cls, line_number))
notebook['cells'].append(create_code_cell(cls, isclass=True))
for func in functions:
line_number = line_numbers[func]
notebook['cells'].append(create_description_cell(func, line_number))
notebook['cells'].append(create_code_cell(func))
with open(sys.argv[1], 'w') as nfile:
json.dump(notebook, nfile, indent=4)
if __name__ == '__main__':
main()
|
shoeffner/cvloop | tools/create_functions_ipynb.py | list_functions | python | def list_functions(mod_name):
mod = sys.modules[mod_name]
return [func.__name__ for func in mod.__dict__.values()
if is_mod_function(mod, func)] | Lists all functions declared in a module.
http://stackoverflow.com/a/1107150/3004221
Args:
mod_name: the module name
Returns:
A list of functions declared in that module. | train | https://github.com/shoeffner/cvloop/blob/3ddd311e9b679d16c8fd36779931380374de343c/tools/create_functions_ipynb.py#L37-L49 | null | """This modules creates the example notebook for the cvloop.functions
notebook."""
import inspect
import json
import sys
sys.path.insert(0, '../cvloop')
import cvloop.functions # noqa: E402
GENERATE_ARGS = True
def is_mod_function(mod, fun):
"""Checks if a function in a module was declared in that module.
http://stackoverflow.com/a/1107150/3004221
Args:
mod: the module
fun: the function
"""
return inspect.isfunction(fun) and inspect.getmodule(fun) == mod
def is_mod_class(mod, cls):
"""Checks if a class in a module was declared in that module.
Args:
mod: the module
cls: the class
"""
return inspect.isclass(cls) and inspect.getmodule(cls) == mod
def list_classes(mod_name):
"""Lists all classes declared in a module.
Args:
mod_name: the module name
Returns:
A list of functions declared in that module.
"""
mod = sys.modules[mod_name]
return [cls.__name__ for cls in mod.__dict__.values()
if is_mod_class(mod, cls)]
def get_linenumbers(functions, module, searchstr='def {}(image):\n'):
"""Returns a dictionary which maps function names to line numbers.
Args:
functions: a list of function names
module: the module to look the functions up
searchstr: the string to search for
Returns:
A dictionary with functions as keys and their line numbers as values.
"""
lines = inspect.getsourcelines(module)[0]
line_numbers = {}
for function in functions:
try:
line_numbers[function] = lines.index(
searchstr.format(function)) + 1
except ValueError:
print(r'Can not find `{}`'.format(searchstr.format(function)))
line_numbers[function] = 0
return line_numbers
def format_doc(fun):
"""Formats the documentation in a nicer way and for notebook cells."""
SEPARATOR = '============================='
func = cvloop.functions.__dict__[fun]
doc_lines = ['{}'.format(l).strip() for l in func.__doc__.split('\n')]
if hasattr(func, '__init__'):
doc_lines.append(SEPARATOR)
doc_lines += ['{}'.format(l).strip() for l in
func.__init__.__doc__.split('\n')]
mod_lines = []
argblock = False
returnblock = False
for line in doc_lines:
if line == SEPARATOR:
mod_lines.append('\n#### `{}.__init__(...)`:\n\n'.format(fun))
elif 'Args:' in line:
argblock = True
if GENERATE_ARGS:
mod_lines.append('**{}**\n'.format(line))
elif 'Returns:' in line:
returnblock = True
mod_lines.append('\n**{}**'.format(line))
elif not argblock and not returnblock:
mod_lines.append('{}\n'.format(line))
elif argblock and not returnblock and ':' in line:
if GENERATE_ARGS:
mod_lines.append('- *{}:* {}\n'.format(
*line.split(':')))
elif returnblock:
mod_lines.append(line)
else:
mod_lines.append('{}\n'.format(line))
return mod_lines
def create_description_cell(fun, line_number):
"""Creates a markdown cell with a title and the help doc string of a
function."""
return {
'cell_type': 'markdown',
'metadata': {},
'source': [
'## `cvloop.functions.{}` '.format(fun),
'<small>[[Source](https://github.com/shoeffner/cvloop/blob/',
'develop/cvloop/functions.py#L{})]</small>\n\n'
.format(line_number),
*format_doc(fun),
]
}
def create_code_cell(fun, isclass=False):
"""Creates a code cell which uses a simple cvloop and embeds the function
in question.
Args:
isclass: Defaults to False. If True, an instance will be created inside
the code cell.
"""
return {
'cell_type': 'code',
'metadata': {},
'outputs': [],
'execution_count': None,
'source': [
'from cvloop import cvloop, {}\n'.format(fun),
'cvloop(function={}{}, side_by_side=True)'.format(fun, '()' if
isclass else '')
]
}
def main():
"""Main function creates the cvloop.functions example notebook."""
notebook = {
'cells': [
{
'cell_type': 'markdown',
'metadata': {},
'source': [
'# cvloop functions\n\n',
'This notebook shows an overview over all cvloop ',
'functions provided in the [`cvloop.functions` module](',
'https://github.com/shoeffner/cvloop/blob/',
'develop/cvloop/functions.py).'
]
},
],
'nbformat': 4,
'nbformat_minor': 1,
'metadata': {
'language_info': {
'codemirror_mode': {
'name': 'ipython',
'version': 3
},
'file_extension': '.py',
'mimetype': 'text/x-python',
'name': 'python',
'nbconvert_exporter': 'python',
'pygments_lexer': 'ipython3',
'version': '3.5.1+'
}
}
}
classes = list_classes('cvloop.functions')
functions = list_functions('cvloop.functions')
line_numbers_cls = get_linenumbers(classes, cvloop.functions,
'class {}:\n')
line_numbers = get_linenumbers(functions, cvloop.functions)
for cls in classes:
line_number = line_numbers_cls[cls]
notebook['cells'].append(create_description_cell(cls, line_number))
notebook['cells'].append(create_code_cell(cls, isclass=True))
for func in functions:
line_number = line_numbers[func]
notebook['cells'].append(create_description_cell(func, line_number))
notebook['cells'].append(create_code_cell(func))
with open(sys.argv[1], 'w') as nfile:
json.dump(notebook, nfile, indent=4)
if __name__ == '__main__':
main()
|
shoeffner/cvloop | tools/create_functions_ipynb.py | list_classes | python | def list_classes(mod_name):
mod = sys.modules[mod_name]
return [cls.__name__ for cls in mod.__dict__.values()
if is_mod_class(mod, cls)] | Lists all classes declared in a module.
Args:
mod_name: the module name
Returns:
A list of functions declared in that module. | train | https://github.com/shoeffner/cvloop/blob/3ddd311e9b679d16c8fd36779931380374de343c/tools/create_functions_ipynb.py#L52-L62 | null | """This modules creates the example notebook for the cvloop.functions
notebook."""
import inspect
import json
import sys
sys.path.insert(0, '../cvloop')
import cvloop.functions # noqa: E402
GENERATE_ARGS = True
def is_mod_function(mod, fun):
"""Checks if a function in a module was declared in that module.
http://stackoverflow.com/a/1107150/3004221
Args:
mod: the module
fun: the function
"""
return inspect.isfunction(fun) and inspect.getmodule(fun) == mod
def is_mod_class(mod, cls):
"""Checks if a class in a module was declared in that module.
Args:
mod: the module
cls: the class
"""
return inspect.isclass(cls) and inspect.getmodule(cls) == mod
def list_functions(mod_name):
"""Lists all functions declared in a module.
http://stackoverflow.com/a/1107150/3004221
Args:
mod_name: the module name
Returns:
A list of functions declared in that module.
"""
mod = sys.modules[mod_name]
return [func.__name__ for func in mod.__dict__.values()
if is_mod_function(mod, func)]
def get_linenumbers(functions, module, searchstr='def {}(image):\n'):
"""Returns a dictionary which maps function names to line numbers.
Args:
functions: a list of function names
module: the module to look the functions up
searchstr: the string to search for
Returns:
A dictionary with functions as keys and their line numbers as values.
"""
lines = inspect.getsourcelines(module)[0]
line_numbers = {}
for function in functions:
try:
line_numbers[function] = lines.index(
searchstr.format(function)) + 1
except ValueError:
print(r'Can not find `{}`'.format(searchstr.format(function)))
line_numbers[function] = 0
return line_numbers
def format_doc(fun):
"""Formats the documentation in a nicer way and for notebook cells."""
SEPARATOR = '============================='
func = cvloop.functions.__dict__[fun]
doc_lines = ['{}'.format(l).strip() for l in func.__doc__.split('\n')]
if hasattr(func, '__init__'):
doc_lines.append(SEPARATOR)
doc_lines += ['{}'.format(l).strip() for l in
func.__init__.__doc__.split('\n')]
mod_lines = []
argblock = False
returnblock = False
for line in doc_lines:
if line == SEPARATOR:
mod_lines.append('\n#### `{}.__init__(...)`:\n\n'.format(fun))
elif 'Args:' in line:
argblock = True
if GENERATE_ARGS:
mod_lines.append('**{}**\n'.format(line))
elif 'Returns:' in line:
returnblock = True
mod_lines.append('\n**{}**'.format(line))
elif not argblock and not returnblock:
mod_lines.append('{}\n'.format(line))
elif argblock and not returnblock and ':' in line:
if GENERATE_ARGS:
mod_lines.append('- *{}:* {}\n'.format(
*line.split(':')))
elif returnblock:
mod_lines.append(line)
else:
mod_lines.append('{}\n'.format(line))
return mod_lines
def create_description_cell(fun, line_number):
"""Creates a markdown cell with a title and the help doc string of a
function."""
return {
'cell_type': 'markdown',
'metadata': {},
'source': [
'## `cvloop.functions.{}` '.format(fun),
'<small>[[Source](https://github.com/shoeffner/cvloop/blob/',
'develop/cvloop/functions.py#L{})]</small>\n\n'
.format(line_number),
*format_doc(fun),
]
}
def create_code_cell(fun, isclass=False):
"""Creates a code cell which uses a simple cvloop and embeds the function
in question.
Args:
isclass: Defaults to False. If True, an instance will be created inside
the code cell.
"""
return {
'cell_type': 'code',
'metadata': {},
'outputs': [],
'execution_count': None,
'source': [
'from cvloop import cvloop, {}\n'.format(fun),
'cvloop(function={}{}, side_by_side=True)'.format(fun, '()' if
isclass else '')
]
}
def main():
"""Main function creates the cvloop.functions example notebook."""
notebook = {
'cells': [
{
'cell_type': 'markdown',
'metadata': {},
'source': [
'# cvloop functions\n\n',
'This notebook shows an overview over all cvloop ',
'functions provided in the [`cvloop.functions` module](',
'https://github.com/shoeffner/cvloop/blob/',
'develop/cvloop/functions.py).'
]
},
],
'nbformat': 4,
'nbformat_minor': 1,
'metadata': {
'language_info': {
'codemirror_mode': {
'name': 'ipython',
'version': 3
},
'file_extension': '.py',
'mimetype': 'text/x-python',
'name': 'python',
'nbconvert_exporter': 'python',
'pygments_lexer': 'ipython3',
'version': '3.5.1+'
}
}
}
classes = list_classes('cvloop.functions')
functions = list_functions('cvloop.functions')
line_numbers_cls = get_linenumbers(classes, cvloop.functions,
'class {}:\n')
line_numbers = get_linenumbers(functions, cvloop.functions)
for cls in classes:
line_number = line_numbers_cls[cls]
notebook['cells'].append(create_description_cell(cls, line_number))
notebook['cells'].append(create_code_cell(cls, isclass=True))
for func in functions:
line_number = line_numbers[func]
notebook['cells'].append(create_description_cell(func, line_number))
notebook['cells'].append(create_code_cell(func))
with open(sys.argv[1], 'w') as nfile:
json.dump(notebook, nfile, indent=4)
if __name__ == '__main__':
main()
|
shoeffner/cvloop | tools/create_functions_ipynb.py | get_linenumbers | python | def get_linenumbers(functions, module, searchstr='def {}(image):\n'):
lines = inspect.getsourcelines(module)[0]
line_numbers = {}
for function in functions:
try:
line_numbers[function] = lines.index(
searchstr.format(function)) + 1
except ValueError:
print(r'Can not find `{}`'.format(searchstr.format(function)))
line_numbers[function] = 0
return line_numbers | Returns a dictionary which maps function names to line numbers.
Args:
functions: a list of function names
module: the module to look the functions up
searchstr: the string to search for
Returns:
A dictionary with functions as keys and their line numbers as values. | train | https://github.com/shoeffner/cvloop/blob/3ddd311e9b679d16c8fd36779931380374de343c/tools/create_functions_ipynb.py#L65-L84 | null | """This modules creates the example notebook for the cvloop.functions
notebook."""
import inspect
import json
import sys
sys.path.insert(0, '../cvloop')
import cvloop.functions # noqa: E402
GENERATE_ARGS = True
def is_mod_function(mod, fun):
"""Checks if a function in a module was declared in that module.
http://stackoverflow.com/a/1107150/3004221
Args:
mod: the module
fun: the function
"""
return inspect.isfunction(fun) and inspect.getmodule(fun) == mod
def is_mod_class(mod, cls):
"""Checks if a class in a module was declared in that module.
Args:
mod: the module
cls: the class
"""
return inspect.isclass(cls) and inspect.getmodule(cls) == mod
def list_functions(mod_name):
"""Lists all functions declared in a module.
http://stackoverflow.com/a/1107150/3004221
Args:
mod_name: the module name
Returns:
A list of functions declared in that module.
"""
mod = sys.modules[mod_name]
return [func.__name__ for func in mod.__dict__.values()
if is_mod_function(mod, func)]
def list_classes(mod_name):
"""Lists all classes declared in a module.
Args:
mod_name: the module name
Returns:
A list of functions declared in that module.
"""
mod = sys.modules[mod_name]
return [cls.__name__ for cls in mod.__dict__.values()
if is_mod_class(mod, cls)]
def format_doc(fun):
"""Formats the documentation in a nicer way and for notebook cells."""
SEPARATOR = '============================='
func = cvloop.functions.__dict__[fun]
doc_lines = ['{}'.format(l).strip() for l in func.__doc__.split('\n')]
if hasattr(func, '__init__'):
doc_lines.append(SEPARATOR)
doc_lines += ['{}'.format(l).strip() for l in
func.__init__.__doc__.split('\n')]
mod_lines = []
argblock = False
returnblock = False
for line in doc_lines:
if line == SEPARATOR:
mod_lines.append('\n#### `{}.__init__(...)`:\n\n'.format(fun))
elif 'Args:' in line:
argblock = True
if GENERATE_ARGS:
mod_lines.append('**{}**\n'.format(line))
elif 'Returns:' in line:
returnblock = True
mod_lines.append('\n**{}**'.format(line))
elif not argblock and not returnblock:
mod_lines.append('{}\n'.format(line))
elif argblock and not returnblock and ':' in line:
if GENERATE_ARGS:
mod_lines.append('- *{}:* {}\n'.format(
*line.split(':')))
elif returnblock:
mod_lines.append(line)
else:
mod_lines.append('{}\n'.format(line))
return mod_lines
def create_description_cell(fun, line_number):
"""Creates a markdown cell with a title and the help doc string of a
function."""
return {
'cell_type': 'markdown',
'metadata': {},
'source': [
'## `cvloop.functions.{}` '.format(fun),
'<small>[[Source](https://github.com/shoeffner/cvloop/blob/',
'develop/cvloop/functions.py#L{})]</small>\n\n'
.format(line_number),
*format_doc(fun),
]
}
def create_code_cell(fun, isclass=False):
"""Creates a code cell which uses a simple cvloop and embeds the function
in question.
Args:
isclass: Defaults to False. If True, an instance will be created inside
the code cell.
"""
return {
'cell_type': 'code',
'metadata': {},
'outputs': [],
'execution_count': None,
'source': [
'from cvloop import cvloop, {}\n'.format(fun),
'cvloop(function={}{}, side_by_side=True)'.format(fun, '()' if
isclass else '')
]
}
def main():
"""Main function creates the cvloop.functions example notebook."""
notebook = {
'cells': [
{
'cell_type': 'markdown',
'metadata': {},
'source': [
'# cvloop functions\n\n',
'This notebook shows an overview over all cvloop ',
'functions provided in the [`cvloop.functions` module](',
'https://github.com/shoeffner/cvloop/blob/',
'develop/cvloop/functions.py).'
]
},
],
'nbformat': 4,
'nbformat_minor': 1,
'metadata': {
'language_info': {
'codemirror_mode': {
'name': 'ipython',
'version': 3
},
'file_extension': '.py',
'mimetype': 'text/x-python',
'name': 'python',
'nbconvert_exporter': 'python',
'pygments_lexer': 'ipython3',
'version': '3.5.1+'
}
}
}
classes = list_classes('cvloop.functions')
functions = list_functions('cvloop.functions')
line_numbers_cls = get_linenumbers(classes, cvloop.functions,
'class {}:\n')
line_numbers = get_linenumbers(functions, cvloop.functions)
for cls in classes:
line_number = line_numbers_cls[cls]
notebook['cells'].append(create_description_cell(cls, line_number))
notebook['cells'].append(create_code_cell(cls, isclass=True))
for func in functions:
line_number = line_numbers[func]
notebook['cells'].append(create_description_cell(func, line_number))
notebook['cells'].append(create_code_cell(func))
with open(sys.argv[1], 'w') as nfile:
json.dump(notebook, nfile, indent=4)
if __name__ == '__main__':
main()
|
shoeffner/cvloop | tools/create_functions_ipynb.py | format_doc | python | def format_doc(fun):
SEPARATOR = '============================='
func = cvloop.functions.__dict__[fun]
doc_lines = ['{}'.format(l).strip() for l in func.__doc__.split('\n')]
if hasattr(func, '__init__'):
doc_lines.append(SEPARATOR)
doc_lines += ['{}'.format(l).strip() for l in
func.__init__.__doc__.split('\n')]
mod_lines = []
argblock = False
returnblock = False
for line in doc_lines:
if line == SEPARATOR:
mod_lines.append('\n#### `{}.__init__(...)`:\n\n'.format(fun))
elif 'Args:' in line:
argblock = True
if GENERATE_ARGS:
mod_lines.append('**{}**\n'.format(line))
elif 'Returns:' in line:
returnblock = True
mod_lines.append('\n**{}**'.format(line))
elif not argblock and not returnblock:
mod_lines.append('{}\n'.format(line))
elif argblock and not returnblock and ':' in line:
if GENERATE_ARGS:
mod_lines.append('- *{}:* {}\n'.format(
*line.split(':')))
elif returnblock:
mod_lines.append(line)
else:
mod_lines.append('{}\n'.format(line))
return mod_lines | Formats the documentation in a nicer way and for notebook cells. | train | https://github.com/shoeffner/cvloop/blob/3ddd311e9b679d16c8fd36779931380374de343c/tools/create_functions_ipynb.py#L87-L121 | null | """This modules creates the example notebook for the cvloop.functions
notebook."""
import inspect
import json
import sys
sys.path.insert(0, '../cvloop')
import cvloop.functions # noqa: E402
GENERATE_ARGS = True
def is_mod_function(mod, fun):
"""Checks if a function in a module was declared in that module.
http://stackoverflow.com/a/1107150/3004221
Args:
mod: the module
fun: the function
"""
return inspect.isfunction(fun) and inspect.getmodule(fun) == mod
def is_mod_class(mod, cls):
"""Checks if a class in a module was declared in that module.
Args:
mod: the module
cls: the class
"""
return inspect.isclass(cls) and inspect.getmodule(cls) == mod
def list_functions(mod_name):
"""Lists all functions declared in a module.
http://stackoverflow.com/a/1107150/3004221
Args:
mod_name: the module name
Returns:
A list of functions declared in that module.
"""
mod = sys.modules[mod_name]
return [func.__name__ for func in mod.__dict__.values()
if is_mod_function(mod, func)]
def list_classes(mod_name):
"""Lists all classes declared in a module.
Args:
mod_name: the module name
Returns:
A list of functions declared in that module.
"""
mod = sys.modules[mod_name]
return [cls.__name__ for cls in mod.__dict__.values()
if is_mod_class(mod, cls)]
def get_linenumbers(functions, module, searchstr='def {}(image):\n'):
"""Returns a dictionary which maps function names to line numbers.
Args:
functions: a list of function names
module: the module to look the functions up
searchstr: the string to search for
Returns:
A dictionary with functions as keys and their line numbers as values.
"""
lines = inspect.getsourcelines(module)[0]
line_numbers = {}
for function in functions:
try:
line_numbers[function] = lines.index(
searchstr.format(function)) + 1
except ValueError:
print(r'Can not find `{}`'.format(searchstr.format(function)))
line_numbers[function] = 0
return line_numbers
def create_description_cell(fun, line_number):
"""Creates a markdown cell with a title and the help doc string of a
function."""
return {
'cell_type': 'markdown',
'metadata': {},
'source': [
'## `cvloop.functions.{}` '.format(fun),
'<small>[[Source](https://github.com/shoeffner/cvloop/blob/',
'develop/cvloop/functions.py#L{})]</small>\n\n'
.format(line_number),
*format_doc(fun),
]
}
def create_code_cell(fun, isclass=False):
"""Creates a code cell which uses a simple cvloop and embeds the function
in question.
Args:
isclass: Defaults to False. If True, an instance will be created inside
the code cell.
"""
return {
'cell_type': 'code',
'metadata': {},
'outputs': [],
'execution_count': None,
'source': [
'from cvloop import cvloop, {}\n'.format(fun),
'cvloop(function={}{}, side_by_side=True)'.format(fun, '()' if
isclass else '')
]
}
def main():
"""Main function creates the cvloop.functions example notebook."""
notebook = {
'cells': [
{
'cell_type': 'markdown',
'metadata': {},
'source': [
'# cvloop functions\n\n',
'This notebook shows an overview over all cvloop ',
'functions provided in the [`cvloop.functions` module](',
'https://github.com/shoeffner/cvloop/blob/',
'develop/cvloop/functions.py).'
]
},
],
'nbformat': 4,
'nbformat_minor': 1,
'metadata': {
'language_info': {
'codemirror_mode': {
'name': 'ipython',
'version': 3
},
'file_extension': '.py',
'mimetype': 'text/x-python',
'name': 'python',
'nbconvert_exporter': 'python',
'pygments_lexer': 'ipython3',
'version': '3.5.1+'
}
}
}
classes = list_classes('cvloop.functions')
functions = list_functions('cvloop.functions')
line_numbers_cls = get_linenumbers(classes, cvloop.functions,
'class {}:\n')
line_numbers = get_linenumbers(functions, cvloop.functions)
for cls in classes:
line_number = line_numbers_cls[cls]
notebook['cells'].append(create_description_cell(cls, line_number))
notebook['cells'].append(create_code_cell(cls, isclass=True))
for func in functions:
line_number = line_numbers[func]
notebook['cells'].append(create_description_cell(func, line_number))
notebook['cells'].append(create_code_cell(func))
with open(sys.argv[1], 'w') as nfile:
json.dump(notebook, nfile, indent=4)
if __name__ == '__main__':
main()
|
shoeffner/cvloop | tools/create_functions_ipynb.py | main | python | def main():
notebook = {
'cells': [
{
'cell_type': 'markdown',
'metadata': {},
'source': [
'# cvloop functions\n\n',
'This notebook shows an overview over all cvloop ',
'functions provided in the [`cvloop.functions` module](',
'https://github.com/shoeffner/cvloop/blob/',
'develop/cvloop/functions.py).'
]
},
],
'nbformat': 4,
'nbformat_minor': 1,
'metadata': {
'language_info': {
'codemirror_mode': {
'name': 'ipython',
'version': 3
},
'file_extension': '.py',
'mimetype': 'text/x-python',
'name': 'python',
'nbconvert_exporter': 'python',
'pygments_lexer': 'ipython3',
'version': '3.5.1+'
}
}
}
classes = list_classes('cvloop.functions')
functions = list_functions('cvloop.functions')
line_numbers_cls = get_linenumbers(classes, cvloop.functions,
'class {}:\n')
line_numbers = get_linenumbers(functions, cvloop.functions)
for cls in classes:
line_number = line_numbers_cls[cls]
notebook['cells'].append(create_description_cell(cls, line_number))
notebook['cells'].append(create_code_cell(cls, isclass=True))
for func in functions:
line_number = line_numbers[func]
notebook['cells'].append(create_description_cell(func, line_number))
notebook['cells'].append(create_code_cell(func))
with open(sys.argv[1], 'w') as nfile:
json.dump(notebook, nfile, indent=4) | Main function creates the cvloop.functions example notebook. | train | https://github.com/shoeffner/cvloop/blob/3ddd311e9b679d16c8fd36779931380374de343c/tools/create_functions_ipynb.py#L161-L212 | [
"def list_functions(mod_name):\n \"\"\"Lists all functions declared in a module.\n\n http://stackoverflow.com/a/1107150/3004221\n\n Args:\n mod_name: the module name\n Returns:\n A list of functions declared in that module.\n \"\"\"\n mod = sys.modules[mod_name]\n return [func.__name__ for func in mod.__dict__.values()\n if is_mod_function(mod, func)]\n",
"def list_classes(mod_name):\n \"\"\"Lists all classes declared in a module.\n\n Args:\n mod_name: the module name\n Returns:\n A list of functions declared in that module.\n \"\"\"\n mod = sys.modules[mod_name]\n return [cls.__name__ for cls in mod.__dict__.values()\n if is_mod_class(mod, cls)]\n",
"def get_linenumbers(functions, module, searchstr='def {}(image):\\n'):\n \"\"\"Returns a dictionary which maps function names to line numbers.\n\n Args:\n functions: a list of function names\n module: the module to look the functions up\n searchstr: the string to search for\n Returns:\n A dictionary with functions as keys and their line numbers as values.\n \"\"\"\n lines = inspect.getsourcelines(module)[0]\n line_numbers = {}\n for function in functions:\n try:\n line_numbers[function] = lines.index(\n searchstr.format(function)) + 1\n except ValueError:\n print(r'Can not find `{}`'.format(searchstr.format(function)))\n line_numbers[function] = 0\n return line_numbers\n",
"def create_description_cell(fun, line_number):\n \"\"\"Creates a markdown cell with a title and the help doc string of a\n function.\"\"\"\n return {\n 'cell_type': 'markdown',\n 'metadata': {},\n 'source': [\n '## `cvloop.functions.{}` '.format(fun),\n '<small>[[Source](https://github.com/shoeffner/cvloop/blob/',\n 'develop/cvloop/functions.py#L{})]</small>\\n\\n'\n .format(line_number),\n *format_doc(fun),\n ]\n }\n",
"def create_code_cell(fun, isclass=False):\n \"\"\"Creates a code cell which uses a simple cvloop and embeds the function\n in question.\n\n Args:\n isclass: Defaults to False. If True, an instance will be created inside\n the code cell.\n \"\"\"\n return {\n 'cell_type': 'code',\n 'metadata': {},\n 'outputs': [],\n 'execution_count': None,\n 'source': [\n 'from cvloop import cvloop, {}\\n'.format(fun),\n 'cvloop(function={}{}, side_by_side=True)'.format(fun, '()' if\n isclass else '')\n ]\n }\n"
] | """This modules creates the example notebook for the cvloop.functions
notebook."""
import inspect
import json
import sys
sys.path.insert(0, '../cvloop')
import cvloop.functions # noqa: E402
GENERATE_ARGS = True
def is_mod_function(mod, fun):
"""Checks if a function in a module was declared in that module.
http://stackoverflow.com/a/1107150/3004221
Args:
mod: the module
fun: the function
"""
return inspect.isfunction(fun) and inspect.getmodule(fun) == mod
def is_mod_class(mod, cls):
"""Checks if a class in a module was declared in that module.
Args:
mod: the module
cls: the class
"""
return inspect.isclass(cls) and inspect.getmodule(cls) == mod
def list_functions(mod_name):
"""Lists all functions declared in a module.
http://stackoverflow.com/a/1107150/3004221
Args:
mod_name: the module name
Returns:
A list of functions declared in that module.
"""
mod = sys.modules[mod_name]
return [func.__name__ for func in mod.__dict__.values()
if is_mod_function(mod, func)]
def list_classes(mod_name):
"""Lists all classes declared in a module.
Args:
mod_name: the module name
Returns:
A list of functions declared in that module.
"""
mod = sys.modules[mod_name]
return [cls.__name__ for cls in mod.__dict__.values()
if is_mod_class(mod, cls)]
def get_linenumbers(functions, module, searchstr='def {}(image):\n'):
"""Returns a dictionary which maps function names to line numbers.
Args:
functions: a list of function names
module: the module to look the functions up
searchstr: the string to search for
Returns:
A dictionary with functions as keys and their line numbers as values.
"""
lines = inspect.getsourcelines(module)[0]
line_numbers = {}
for function in functions:
try:
line_numbers[function] = lines.index(
searchstr.format(function)) + 1
except ValueError:
print(r'Can not find `{}`'.format(searchstr.format(function)))
line_numbers[function] = 0
return line_numbers
def format_doc(fun):
"""Formats the documentation in a nicer way and for notebook cells."""
SEPARATOR = '============================='
func = cvloop.functions.__dict__[fun]
doc_lines = ['{}'.format(l).strip() for l in func.__doc__.split('\n')]
if hasattr(func, '__init__'):
doc_lines.append(SEPARATOR)
doc_lines += ['{}'.format(l).strip() for l in
func.__init__.__doc__.split('\n')]
mod_lines = []
argblock = False
returnblock = False
for line in doc_lines:
if line == SEPARATOR:
mod_lines.append('\n#### `{}.__init__(...)`:\n\n'.format(fun))
elif 'Args:' in line:
argblock = True
if GENERATE_ARGS:
mod_lines.append('**{}**\n'.format(line))
elif 'Returns:' in line:
returnblock = True
mod_lines.append('\n**{}**'.format(line))
elif not argblock and not returnblock:
mod_lines.append('{}\n'.format(line))
elif argblock and not returnblock and ':' in line:
if GENERATE_ARGS:
mod_lines.append('- *{}:* {}\n'.format(
*line.split(':')))
elif returnblock:
mod_lines.append(line)
else:
mod_lines.append('{}\n'.format(line))
return mod_lines
def create_description_cell(fun, line_number):
"""Creates a markdown cell with a title and the help doc string of a
function."""
return {
'cell_type': 'markdown',
'metadata': {},
'source': [
'## `cvloop.functions.{}` '.format(fun),
'<small>[[Source](https://github.com/shoeffner/cvloop/blob/',
'develop/cvloop/functions.py#L{})]</small>\n\n'
.format(line_number),
*format_doc(fun),
]
}
def create_code_cell(fun, isclass=False):
"""Creates a code cell which uses a simple cvloop and embeds the function
in question.
Args:
isclass: Defaults to False. If True, an instance will be created inside
the code cell.
"""
return {
'cell_type': 'code',
'metadata': {},
'outputs': [],
'execution_count': None,
'source': [
'from cvloop import cvloop, {}\n'.format(fun),
'cvloop(function={}{}, side_by_side=True)'.format(fun, '()' if
isclass else '')
]
}
if __name__ == '__main__':
main()
|
shoeffner/cvloop | cvloop/cvloop.py | prepare_axes | python | def prepare_axes(axes, title, size, cmap=None):
if axes is None:
return None
# prepare axis itself
axes.set_xlim([0, size[1]])
axes.set_ylim([size[0], 0])
axes.set_aspect('equal')
axes.axis('off')
if isinstance(cmap, str):
title = '{} (cmap: {})'.format(title, cmap)
axes.set_title(title)
# prepare image data
axes_image = image.AxesImage(axes, cmap=cmap,
extent=(0, size[1], size[0], 0))
axes_image.set_data(np.random.random((size[0], size[1], 3)))
axes.add_image(axes_image)
return axes_image | Prepares an axes object for clean plotting.
Removes x and y axes labels and ticks, sets the aspect ratio to be
equal, uses the size to determine the drawing area and fills the image
with random colors as visual feedback.
Creates an AxesImage to be shown inside the axes object and sets the
needed properties.
Args:
axes: The axes object to modify.
title: The title.
size: The size of the expected image.
cmap: The colormap if a custom color map is needed.
(Default: None)
Returns:
The AxesImage's handle. | train | https://github.com/shoeffner/cvloop/blob/3ddd311e9b679d16c8fd36779931380374de343c/cvloop/cvloop.py#L29-L67 | null | """Provides a videoloop to be used in jupyter notebooks.
It automatically selects the notebook backend for matplotlib, if the
default notebook backend (inline) is detected.
"""
import itertools
from IPython.core.getipython import get_ipython
from IPython.core.magics.pylab import PylabMagics
import numpy as np
import cv2
# Monkeypatch backend to include "pause" button and fire the pause_event.
from matplotlib.backends.backend_nbagg import NavigationIPy # noqa: E402
NavigationIPy.toolitems += [('Pause', 'Pause/Resume video',
'fa fa-pause icon-pause', 'pause')]
NavigationIPy.pause = lambda self: self.canvas.callbacks.process('pause_event')
# pragma pylint: disable=wrong-import-position
import matplotlib.pyplot as plt # noqa: E402
import matplotlib.animation as animation # noqa: E402
import matplotlib.image as image # noqa: E402
import matplotlib.patches as patches # noqa: E402
# pragma pylint: enable=wrong-import-position
def is_color_image(frame):
"""Checks if an image is a color image.
A color image is an image with at least three dimensions and in the
third dimension at least three color channels.
Returns:
True if the image is a color image.
"""
return len(frame.shape) >= 3 and frame.shape[2] >= 3
def to_gray(frame):
"""If the input is a color image, it is converted to gray scale.
The first color channel is considered as R, the second as G, and
the last as B. The gray scale image is then the weighted sum:
gray = .299 R + .587 G + .114 B
Returns:
Either the converted image (if it was a color image) or the
original.
"""
if not is_color_image(frame):
return frame
return np.dot(frame[..., :3], [.299, .587, .114])
class cvloop(animation.TimedAnimation): # noqa: E501 pylint: disable=invalid-name, too-many-instance-attributes
"""Uses a TimedAnimation to efficiently render video sources with blit."""
def __init__(self, source=None, function=lambda x: x, *,
side_by_side=False, convert_color=cv2.COLOR_BGR2RGB,
cmaps=None, print_info=False, annotations=None,
annotations_default={'shape': 'RECT',
'color': '#228B22',
'line': 2,
'size': (20, 20)}):
"""Runs a video loop for the specified source and modifies the stream
with the function.
The source can either be an integer for a webcam device, a string to
load a video file or a VideoCapture object. In the last case, the
capture object will not be released by this function.
The function takes in a frame and returns the modified frame. The
default value just passes the value through, it is equivalent to the
identity function.
If side_by_side is True, the input as well as the modified image are
shown side by side, otherwise only the output is shown.
If convert_color can be any value for cv2.cvtColor, e.g.
cv2.COLOR_BGR2RGB. If it is -1, no color conversion is performed,
otherwise a color conversion using cv2.cvtColor is performed before the
image is passed to the function.
Args:
source: The video source; ints for webcams/devices, a string to
load a video file. To fine tune a video source, it is
possible to pass a VideoCapture object directly.
(Default: 0)
function: The modification function.
(Default: identity function `lambda x: x`)
side_by_side: If True, both images are shown, the original and the
modified image.
(Default: False)
convert_color: Converts the image with the given value using
`cv2.cvtColor`, unless value is -1.
(Default: `cv2.COLOR_BGR2RGB`)
cmaps: If None, the plot function makes guesses about what color
maps to use (if at all). If a single value, that color map
is used for all plots (e.g. cmaps='gray'). If cmaps is a
tuple, the first value is used on the original image, the
second value for the modified image. If cmaps is a tuple,
None-entries are ignored and result in the normal guessing.
print_info: If True, prints some info about the resource:
dimensions, color channels, data type. Skips the output
of one frame.
annotations: A list or tuple of annotations. Each annotation is a
list or tuple in turn of this format:
[x, y, frame, options]
x: the x coordinate of the center
y: the y coordinate of the center
frame: the frame number
options: A dictionary. This is optional (leaving the
list with only three elements). Allows the
following keys:
shape: 'RECT' or 'CIRC' (rectangle, circle)
line: linewidth
color: RGB tuple, gray scalar or html hex-string
size: radius for CIRC, (width, height) for RECT
annotations_default: A default format, that will be used if no
specific format is given for an annotation. If no format is
specified the following defaults are used:
shape: 'RECT',
color: '#228B22', (forestgreen)
line: 2,
size: (20, 20)
"""
if plt.get_backend() in (
'module://ipykernel.pylab.backend_inline',
'nbAgg'):
# Calls IPython's magic variables
for conf in get_ipython().configurables:
if isinstance(conf, PylabMagics):
conf.matplotlib(line='notebook')
conf.matplotlib(line='notebook')
if source is not None:
if isinstance(source, type(cv2.VideoCapture())) \
or hasattr(source, 'read'):
self.capture = source
else:
self.capture = cv2.VideoCapture(source)
else:
self.capture = cv2.VideoCapture(0)
self.figure = plt.figure()
self.connect_event_handlers()
self.function = function
self.convert_color = convert_color
self.annotations = (None if not annotations else
sorted(annotations, key=lambda a: a[2]))
self.annotations_default = annotations_default
self.annotation_artists = []
self.original = None
self.processed = None
self.frame_offset = 0
try:
self.cmap_original = cmaps if isinstance(cmaps, str) else cmaps[0]
except (IndexError, TypeError):
self.cmap_original = None
try:
self.cmap_processed = cmaps if isinstance(cmaps, str) else cmaps[1]
except (IndexError, TypeError):
self.cmap_processed = None
if side_by_side:
axes_original = self.figure.add_subplot(1, 2, 1)
axes_processed = self.figure.add_subplot(1, 2, 2)
else:
axes_original = None
axes_processed = self.figure.add_subplot(1, 1, 1)
if print_info:
self.print_info(self.capture)
self.size = self.determine_size(self.capture)
self.original = prepare_axes(axes_original, 'Original',
self.size, self.cmap_original)
self.processed = prepare_axes(axes_processed, 'Processed',
self.size, self.cmap_processed)
self.axes_processed = axes_processed
self.update_info()
super().__init__(self.figure, interval=50, blit=True)
plt.show()
def connect_event_handlers(self):
"""Connects event handlers to the figure."""
self.figure.canvas.mpl_connect('close_event', self.evt_release)
self.figure.canvas.mpl_connect('pause_event', self.evt_toggle_pause)
def evt_release(self, *args): # pylint: disable=unused-argument
"""Tries to release the capture."""
try:
self.capture.release()
except AttributeError:
pass
def evt_toggle_pause(self, *args): # pylint: disable=unused-argument
"""Pauses and resumes the video source."""
if self.event_source._timer is None: # noqa: e501 pylint: disable=protected-access
self.event_source.start()
else:
self.event_source.stop()
def print_info(self, capture):
"""Prints information about the unprocessed image.
Reads one frame from the source to determine image colors, dimensions
and data types.
Args:
capture: the source to read from.
"""
self.frame_offset += 1
ret, frame = capture.read()
if ret:
print('Capture Information')
print('\tDimensions (HxW): {}x{}'.format(*frame.shape[0:2]))
print('\tColor channels: {}'.format(frame.shape[2] if
len(frame.shape) > 2 else 1))
print('\tColor range: {}-{}'.format(np.min(frame),
np.max(frame)))
print('\tdtype: {}'.format(frame.dtype))
else:
print('No source found.')
def determine_size(self, capture):
"""Determines the height and width of the image source.
If no dimensions are available, this method defaults to a resolution of
640x480, thus returns (480, 640).
If capture has a get method it is assumed to understand
`cv2.CAP_PROP_FRAME_WIDTH` and `cv2.CAP_PROP_FRAME_HEIGHT` to get the
information. Otherwise it reads one frame from the source to determine
image dimensions.
Args:
capture: the source to read from.
Returns:
A tuple containing integers of height and width (simple casts).
"""
width = 640
height = 480
if capture and hasattr(capture, 'get'):
width = capture.get(cv2.CAP_PROP_FRAME_WIDTH)
height = capture.get(cv2.CAP_PROP_FRAME_HEIGHT)
else:
self.frame_offset += 1
ret, frame = capture.read()
if ret:
width = frame.shape[1]
height = frame.shape[0]
return (int(height), int(width))
def new_frame_seq(self):
"""Returns an endless frame counter.
Starts at self.frame_offset, in case some methods had to read frames
beforehand to gather information.
This function is called by TimedAnimation.
Returns:
an endless frame count
"""
return itertools.count(self.frame_offset)
def _init_draw(self):
"""Initializes the drawing of the frames by setting the images to
random colors.
This function is called by TimedAnimation.
"""
if self.original is not None:
self.original.set_data(np.random.random((10, 10, 3)))
self.processed.set_data(np.random.random((10, 10, 3)))
def read_frame(self):
"""Reads a frame and converts the color if needed.
In case no frame is available, i.e. self.capture.read() returns False
as the first return value, the event_source of the TimedAnimation is
stopped, and if possible the capture source released.
Returns:
None if stopped, otherwise the color converted source image.
"""
ret, frame = self.capture.read()
if not ret:
self.event_source.stop()
try:
self.capture.release()
except AttributeError:
# has no release method, thus just pass
pass
return None
if self.convert_color != -1 and is_color_image(frame):
return cv2.cvtColor(frame, self.convert_color)
return frame
def process_frame(self, frame):
"""Processes a frame with the user specified function.
Args:
frame: The input frame.
Returns:
The processed frame.
"""
return self.function(frame)
def annotate(self, framedata):
"""Annotates the processed axis with given annotations for
the provided framedata.
Args:
framedata: The current frame number.
"""
for artist in self.annotation_artists:
artist.remove()
self.annotation_artists = []
for annotation in self.annotations:
if annotation[2] > framedata:
return
if annotation[2] == framedata:
pos = annotation[0:2]
shape = self.annotations_default['shape']
color = self.annotations_default['color']
size = self.annotations_default['size']
line = self.annotations_default['line']
if len(annotation) > 3:
shape = annotation[3].get('shape', shape)
color = annotation[3].get('color', color)
size = annotation[3].get('size', size)
line = annotation[3].get('line', line)
if shape == 'CIRC' and hasattr(size, '__len__'):
size = 30
if not hasattr(color, '__len__'):
color = (color,) * 3
if shape == 'RECT':
patch = patches.Rectangle((pos[0] - size[0] // 2,
pos[1] - size[1] // 2),
size[0], size[1], fill=False,
lw=line, fc='none', ec=color)
elif shape == 'CIRC':
patch = patches.CirclePolygon(pos, radius=size, fc='none',
ec=color, lw=line)
self.annotation_artists.append(patch)
self.axes_processed.add_artist(self.annotation_artists[-1])
def _draw_frame(self, framedata):
"""Reads, processes and draws the frames.
If needed for color maps, conversions to gray scale are performed. In
case the images are no color images and no custom color maps are
defined, the colormap `gray` is applied.
This function is called by TimedAnimation.
Args:
framedata: The frame data.
"""
original = self.read_frame()
if original is None:
self.update_info(self.info_string(message='Finished.',
frame=framedata))
return
if self.original is not None:
processed = self.process_frame(original.copy())
if self.cmap_original is not None:
original = to_gray(original)
elif not is_color_image(original):
self.original.set_cmap('gray')
self.original.set_data(original)
else:
processed = self.process_frame(original)
if self.cmap_processed is not None:
processed = to_gray(processed)
elif not is_color_image(processed):
self.processed.set_cmap('gray')
if self.annotations:
self.annotate(framedata)
self.processed.set_data(processed)
self.update_info(self.info_string(frame=framedata))
def update_info(self, custom=None):
"""Updates the figure's suptitle.
Calls self.info_string() unless custom is provided.
Args:
custom: Overwrite it with this string, unless None.
"""
self.figure.suptitle(self.info_string() if custom is None else custom)
def info_string(self, size=None, message='', frame=-1):
"""Returns information about the stream.
Generates a string containing size, frame number, and info messages.
Omits unnecessary information (e.g. empty messages and frame -1).
This method is primarily used to update the suptitle of the plot
figure.
Returns:
An info string.
"""
info = []
if size is not None:
info.append('Size: {1}x{0}'.format(*size))
elif self.size is not None:
info.append('Size: {1}x{0}'.format(*self.size))
if frame >= 0:
info.append('Frame: {}'.format(frame))
if message != '':
info.append('{}'.format(message))
return ' '.join(info)
|
shoeffner/cvloop | cvloop/cvloop.py | cvloop.connect_event_handlers | python | def connect_event_handlers(self):
self.figure.canvas.mpl_connect('close_event', self.evt_release)
self.figure.canvas.mpl_connect('pause_event', self.evt_toggle_pause) | Connects event handlers to the figure. | train | https://github.com/shoeffner/cvloop/blob/3ddd311e9b679d16c8fd36779931380374de343c/cvloop/cvloop.py#L237-L240 | null | class cvloop(animation.TimedAnimation): # noqa: E501 pylint: disable=invalid-name, too-many-instance-attributes
"""Uses a TimedAnimation to efficiently render video sources with blit."""
def __init__(self, source=None, function=lambda x: x, *,
side_by_side=False, convert_color=cv2.COLOR_BGR2RGB,
cmaps=None, print_info=False, annotations=None,
annotations_default={'shape': 'RECT',
'color': '#228B22',
'line': 2,
'size': (20, 20)}):
"""Runs a video loop for the specified source and modifies the stream
with the function.
The source can either be an integer for a webcam device, a string to
load a video file or a VideoCapture object. In the last case, the
capture object will not be released by this function.
The function takes in a frame and returns the modified frame. The
default value just passes the value through, it is equivalent to the
identity function.
If side_by_side is True, the input as well as the modified image are
shown side by side, otherwise only the output is shown.
If convert_color can be any value for cv2.cvtColor, e.g.
cv2.COLOR_BGR2RGB. If it is -1, no color conversion is performed,
otherwise a color conversion using cv2.cvtColor is performed before the
image is passed to the function.
Args:
source: The video source; ints for webcams/devices, a string to
load a video file. To fine tune a video source, it is
possible to pass a VideoCapture object directly.
(Default: 0)
function: The modification function.
(Default: identity function `lambda x: x`)
side_by_side: If True, both images are shown, the original and the
modified image.
(Default: False)
convert_color: Converts the image with the given value using
`cv2.cvtColor`, unless value is -1.
(Default: `cv2.COLOR_BGR2RGB`)
cmaps: If None, the plot function makes guesses about what color
maps to use (if at all). If a single value, that color map
is used for all plots (e.g. cmaps='gray'). If cmaps is a
tuple, the first value is used on the original image, the
second value for the modified image. If cmaps is a tuple,
None-entries are ignored and result in the normal guessing.
print_info: If True, prints some info about the resource:
dimensions, color channels, data type. Skips the output
of one frame.
annotations: A list or tuple of annotations. Each annotation is a
list or tuple in turn of this format:
[x, y, frame, options]
x: the x coordinate of the center
y: the y coordinate of the center
frame: the frame number
options: A dictionary. This is optional (leaving the
list with only three elements). Allows the
following keys:
shape: 'RECT' or 'CIRC' (rectangle, circle)
line: linewidth
color: RGB tuple, gray scalar or html hex-string
size: radius for CIRC, (width, height) for RECT
annotations_default: A default format, that will be used if no
specific format is given for an annotation. If no format is
specified the following defaults are used:
shape: 'RECT',
color: '#228B22', (forestgreen)
line: 2,
size: (20, 20)
"""
if plt.get_backend() in (
'module://ipykernel.pylab.backend_inline',
'nbAgg'):
# Calls IPython's magic variables
for conf in get_ipython().configurables:
if isinstance(conf, PylabMagics):
conf.matplotlib(line='notebook')
conf.matplotlib(line='notebook')
if source is not None:
if isinstance(source, type(cv2.VideoCapture())) \
or hasattr(source, 'read'):
self.capture = source
else:
self.capture = cv2.VideoCapture(source)
else:
self.capture = cv2.VideoCapture(0)
self.figure = plt.figure()
self.connect_event_handlers()
self.function = function
self.convert_color = convert_color
self.annotations = (None if not annotations else
sorted(annotations, key=lambda a: a[2]))
self.annotations_default = annotations_default
self.annotation_artists = []
self.original = None
self.processed = None
self.frame_offset = 0
try:
self.cmap_original = cmaps if isinstance(cmaps, str) else cmaps[0]
except (IndexError, TypeError):
self.cmap_original = None
try:
self.cmap_processed = cmaps if isinstance(cmaps, str) else cmaps[1]
except (IndexError, TypeError):
self.cmap_processed = None
if side_by_side:
axes_original = self.figure.add_subplot(1, 2, 1)
axes_processed = self.figure.add_subplot(1, 2, 2)
else:
axes_original = None
axes_processed = self.figure.add_subplot(1, 1, 1)
if print_info:
self.print_info(self.capture)
self.size = self.determine_size(self.capture)
self.original = prepare_axes(axes_original, 'Original',
self.size, self.cmap_original)
self.processed = prepare_axes(axes_processed, 'Processed',
self.size, self.cmap_processed)
self.axes_processed = axes_processed
self.update_info()
super().__init__(self.figure, interval=50, blit=True)
plt.show()
def evt_release(self, *args): # pylint: disable=unused-argument
"""Tries to release the capture."""
try:
self.capture.release()
except AttributeError:
pass
def evt_toggle_pause(self, *args): # pylint: disable=unused-argument
"""Pauses and resumes the video source."""
if self.event_source._timer is None: # noqa: e501 pylint: disable=protected-access
self.event_source.start()
else:
self.event_source.stop()
def print_info(self, capture):
"""Prints information about the unprocessed image.
Reads one frame from the source to determine image colors, dimensions
and data types.
Args:
capture: the source to read from.
"""
self.frame_offset += 1
ret, frame = capture.read()
if ret:
print('Capture Information')
print('\tDimensions (HxW): {}x{}'.format(*frame.shape[0:2]))
print('\tColor channels: {}'.format(frame.shape[2] if
len(frame.shape) > 2 else 1))
print('\tColor range: {}-{}'.format(np.min(frame),
np.max(frame)))
print('\tdtype: {}'.format(frame.dtype))
else:
print('No source found.')
def determine_size(self, capture):
"""Determines the height and width of the image source.
If no dimensions are available, this method defaults to a resolution of
640x480, thus returns (480, 640).
If capture has a get method it is assumed to understand
`cv2.CAP_PROP_FRAME_WIDTH` and `cv2.CAP_PROP_FRAME_HEIGHT` to get the
information. Otherwise it reads one frame from the source to determine
image dimensions.
Args:
capture: the source to read from.
Returns:
A tuple containing integers of height and width (simple casts).
"""
width = 640
height = 480
if capture and hasattr(capture, 'get'):
width = capture.get(cv2.CAP_PROP_FRAME_WIDTH)
height = capture.get(cv2.CAP_PROP_FRAME_HEIGHT)
else:
self.frame_offset += 1
ret, frame = capture.read()
if ret:
width = frame.shape[1]
height = frame.shape[0]
return (int(height), int(width))
def new_frame_seq(self):
"""Returns an endless frame counter.
Starts at self.frame_offset, in case some methods had to read frames
beforehand to gather information.
This function is called by TimedAnimation.
Returns:
an endless frame count
"""
return itertools.count(self.frame_offset)
def _init_draw(self):
"""Initializes the drawing of the frames by setting the images to
random colors.
This function is called by TimedAnimation.
"""
if self.original is not None:
self.original.set_data(np.random.random((10, 10, 3)))
self.processed.set_data(np.random.random((10, 10, 3)))
def read_frame(self):
"""Reads a frame and converts the color if needed.
In case no frame is available, i.e. self.capture.read() returns False
as the first return value, the event_source of the TimedAnimation is
stopped, and if possible the capture source released.
Returns:
None if stopped, otherwise the color converted source image.
"""
ret, frame = self.capture.read()
if not ret:
self.event_source.stop()
try:
self.capture.release()
except AttributeError:
# has no release method, thus just pass
pass
return None
if self.convert_color != -1 and is_color_image(frame):
return cv2.cvtColor(frame, self.convert_color)
return frame
def process_frame(self, frame):
"""Processes a frame with the user specified function.
Args:
frame: The input frame.
Returns:
The processed frame.
"""
return self.function(frame)
def annotate(self, framedata):
"""Annotates the processed axis with given annotations for
the provided framedata.
Args:
framedata: The current frame number.
"""
for artist in self.annotation_artists:
artist.remove()
self.annotation_artists = []
for annotation in self.annotations:
if annotation[2] > framedata:
return
if annotation[2] == framedata:
pos = annotation[0:2]
shape = self.annotations_default['shape']
color = self.annotations_default['color']
size = self.annotations_default['size']
line = self.annotations_default['line']
if len(annotation) > 3:
shape = annotation[3].get('shape', shape)
color = annotation[3].get('color', color)
size = annotation[3].get('size', size)
line = annotation[3].get('line', line)
if shape == 'CIRC' and hasattr(size, '__len__'):
size = 30
if not hasattr(color, '__len__'):
color = (color,) * 3
if shape == 'RECT':
patch = patches.Rectangle((pos[0] - size[0] // 2,
pos[1] - size[1] // 2),
size[0], size[1], fill=False,
lw=line, fc='none', ec=color)
elif shape == 'CIRC':
patch = patches.CirclePolygon(pos, radius=size, fc='none',
ec=color, lw=line)
self.annotation_artists.append(patch)
self.axes_processed.add_artist(self.annotation_artists[-1])
def _draw_frame(self, framedata):
"""Reads, processes and draws the frames.
If needed for color maps, conversions to gray scale are performed. In
case the images are no color images and no custom color maps are
defined, the colormap `gray` is applied.
This function is called by TimedAnimation.
Args:
framedata: The frame data.
"""
original = self.read_frame()
if original is None:
self.update_info(self.info_string(message='Finished.',
frame=framedata))
return
if self.original is not None:
processed = self.process_frame(original.copy())
if self.cmap_original is not None:
original = to_gray(original)
elif not is_color_image(original):
self.original.set_cmap('gray')
self.original.set_data(original)
else:
processed = self.process_frame(original)
if self.cmap_processed is not None:
processed = to_gray(processed)
elif not is_color_image(processed):
self.processed.set_cmap('gray')
if self.annotations:
self.annotate(framedata)
self.processed.set_data(processed)
self.update_info(self.info_string(frame=framedata))
def update_info(self, custom=None):
"""Updates the figure's suptitle.
Calls self.info_string() unless custom is provided.
Args:
custom: Overwrite it with this string, unless None.
"""
self.figure.suptitle(self.info_string() if custom is None else custom)
def info_string(self, size=None, message='', frame=-1):
"""Returns information about the stream.
Generates a string containing size, frame number, and info messages.
Omits unnecessary information (e.g. empty messages and frame -1).
This method is primarily used to update the suptitle of the plot
figure.
Returns:
An info string.
"""
info = []
if size is not None:
info.append('Size: {1}x{0}'.format(*size))
elif self.size is not None:
info.append('Size: {1}x{0}'.format(*self.size))
if frame >= 0:
info.append('Frame: {}'.format(frame))
if message != '':
info.append('{}'.format(message))
return ' '.join(info)
|
shoeffner/cvloop | cvloop/cvloop.py | cvloop.evt_toggle_pause | python | def evt_toggle_pause(self, *args): # pylint: disable=unused-argument
if self.event_source._timer is None: # noqa: e501 pylint: disable=protected-access
self.event_source.start()
else:
self.event_source.stop() | Pauses and resumes the video source. | train | https://github.com/shoeffner/cvloop/blob/3ddd311e9b679d16c8fd36779931380374de343c/cvloop/cvloop.py#L249-L254 | null | class cvloop(animation.TimedAnimation): # noqa: E501 pylint: disable=invalid-name, too-many-instance-attributes
"""Uses a TimedAnimation to efficiently render video sources with blit."""
def __init__(self, source=None, function=lambda x: x, *,
side_by_side=False, convert_color=cv2.COLOR_BGR2RGB,
cmaps=None, print_info=False, annotations=None,
annotations_default={'shape': 'RECT',
'color': '#228B22',
'line': 2,
'size': (20, 20)}):
"""Runs a video loop for the specified source and modifies the stream
with the function.
The source can either be an integer for a webcam device, a string to
load a video file or a VideoCapture object. In the last case, the
capture object will not be released by this function.
The function takes in a frame and returns the modified frame. The
default value just passes the value through, it is equivalent to the
identity function.
If side_by_side is True, the input as well as the modified image are
shown side by side, otherwise only the output is shown.
If convert_color can be any value for cv2.cvtColor, e.g.
cv2.COLOR_BGR2RGB. If it is -1, no color conversion is performed,
otherwise a color conversion using cv2.cvtColor is performed before the
image is passed to the function.
Args:
source: The video source; ints for webcams/devices, a string to
load a video file. To fine tune a video source, it is
possible to pass a VideoCapture object directly.
(Default: 0)
function: The modification function.
(Default: identity function `lambda x: x`)
side_by_side: If True, both images are shown, the original and the
modified image.
(Default: False)
convert_color: Converts the image with the given value using
`cv2.cvtColor`, unless value is -1.
(Default: `cv2.COLOR_BGR2RGB`)
cmaps: If None, the plot function makes guesses about what color
maps to use (if at all). If a single value, that color map
is used for all plots (e.g. cmaps='gray'). If cmaps is a
tuple, the first value is used on the original image, the
second value for the modified image. If cmaps is a tuple,
None-entries are ignored and result in the normal guessing.
print_info: If True, prints some info about the resource:
dimensions, color channels, data type. Skips the output
of one frame.
annotations: A list or tuple of annotations. Each annotation is a
list or tuple in turn of this format:
[x, y, frame, options]
x: the x coordinate of the center
y: the y coordinate of the center
frame: the frame number
options: A dictionary. This is optional (leaving the
list with only three elements). Allows the
following keys:
shape: 'RECT' or 'CIRC' (rectangle, circle)
line: linewidth
color: RGB tuple, gray scalar or html hex-string
size: radius for CIRC, (width, height) for RECT
annotations_default: A default format, that will be used if no
specific format is given for an annotation. If no format is
specified the following defaults are used:
shape: 'RECT',
color: '#228B22', (forestgreen)
line: 2,
size: (20, 20)
"""
if plt.get_backend() in (
'module://ipykernel.pylab.backend_inline',
'nbAgg'):
# Calls IPython's magic variables
for conf in get_ipython().configurables:
if isinstance(conf, PylabMagics):
conf.matplotlib(line='notebook')
conf.matplotlib(line='notebook')
if source is not None:
if isinstance(source, type(cv2.VideoCapture())) \
or hasattr(source, 'read'):
self.capture = source
else:
self.capture = cv2.VideoCapture(source)
else:
self.capture = cv2.VideoCapture(0)
self.figure = plt.figure()
self.connect_event_handlers()
self.function = function
self.convert_color = convert_color
self.annotations = (None if not annotations else
sorted(annotations, key=lambda a: a[2]))
self.annotations_default = annotations_default
self.annotation_artists = []
self.original = None
self.processed = None
self.frame_offset = 0
try:
self.cmap_original = cmaps if isinstance(cmaps, str) else cmaps[0]
except (IndexError, TypeError):
self.cmap_original = None
try:
self.cmap_processed = cmaps if isinstance(cmaps, str) else cmaps[1]
except (IndexError, TypeError):
self.cmap_processed = None
if side_by_side:
axes_original = self.figure.add_subplot(1, 2, 1)
axes_processed = self.figure.add_subplot(1, 2, 2)
else:
axes_original = None
axes_processed = self.figure.add_subplot(1, 1, 1)
if print_info:
self.print_info(self.capture)
self.size = self.determine_size(self.capture)
self.original = prepare_axes(axes_original, 'Original',
self.size, self.cmap_original)
self.processed = prepare_axes(axes_processed, 'Processed',
self.size, self.cmap_processed)
self.axes_processed = axes_processed
self.update_info()
super().__init__(self.figure, interval=50, blit=True)
plt.show()
def connect_event_handlers(self):
"""Connects event handlers to the figure."""
self.figure.canvas.mpl_connect('close_event', self.evt_release)
self.figure.canvas.mpl_connect('pause_event', self.evt_toggle_pause)
def evt_release(self, *args): # pylint: disable=unused-argument
"""Tries to release the capture."""
try:
self.capture.release()
except AttributeError:
pass
def print_info(self, capture):
"""Prints information about the unprocessed image.
Reads one frame from the source to determine image colors, dimensions
and data types.
Args:
capture: the source to read from.
"""
self.frame_offset += 1
ret, frame = capture.read()
if ret:
print('Capture Information')
print('\tDimensions (HxW): {}x{}'.format(*frame.shape[0:2]))
print('\tColor channels: {}'.format(frame.shape[2] if
len(frame.shape) > 2 else 1))
print('\tColor range: {}-{}'.format(np.min(frame),
np.max(frame)))
print('\tdtype: {}'.format(frame.dtype))
else:
print('No source found.')
def determine_size(self, capture):
"""Determines the height and width of the image source.
If no dimensions are available, this method defaults to a resolution of
640x480, thus returns (480, 640).
If capture has a get method it is assumed to understand
`cv2.CAP_PROP_FRAME_WIDTH` and `cv2.CAP_PROP_FRAME_HEIGHT` to get the
information. Otherwise it reads one frame from the source to determine
image dimensions.
Args:
capture: the source to read from.
Returns:
A tuple containing integers of height and width (simple casts).
"""
width = 640
height = 480
if capture and hasattr(capture, 'get'):
width = capture.get(cv2.CAP_PROP_FRAME_WIDTH)
height = capture.get(cv2.CAP_PROP_FRAME_HEIGHT)
else:
self.frame_offset += 1
ret, frame = capture.read()
if ret:
width = frame.shape[1]
height = frame.shape[0]
return (int(height), int(width))
def new_frame_seq(self):
"""Returns an endless frame counter.
Starts at self.frame_offset, in case some methods had to read frames
beforehand to gather information.
This function is called by TimedAnimation.
Returns:
an endless frame count
"""
return itertools.count(self.frame_offset)
def _init_draw(self):
"""Initializes the drawing of the frames by setting the images to
random colors.
This function is called by TimedAnimation.
"""
if self.original is not None:
self.original.set_data(np.random.random((10, 10, 3)))
self.processed.set_data(np.random.random((10, 10, 3)))
def read_frame(self):
"""Reads a frame and converts the color if needed.
In case no frame is available, i.e. self.capture.read() returns False
as the first return value, the event_source of the TimedAnimation is
stopped, and if possible the capture source released.
Returns:
None if stopped, otherwise the color converted source image.
"""
ret, frame = self.capture.read()
if not ret:
self.event_source.stop()
try:
self.capture.release()
except AttributeError:
# has no release method, thus just pass
pass
return None
if self.convert_color != -1 and is_color_image(frame):
return cv2.cvtColor(frame, self.convert_color)
return frame
def process_frame(self, frame):
"""Processes a frame with the user specified function.
Args:
frame: The input frame.
Returns:
The processed frame.
"""
return self.function(frame)
def annotate(self, framedata):
"""Annotates the processed axis with given annotations for
the provided framedata.
Args:
framedata: The current frame number.
"""
for artist in self.annotation_artists:
artist.remove()
self.annotation_artists = []
for annotation in self.annotations:
if annotation[2] > framedata:
return
if annotation[2] == framedata:
pos = annotation[0:2]
shape = self.annotations_default['shape']
color = self.annotations_default['color']
size = self.annotations_default['size']
line = self.annotations_default['line']
if len(annotation) > 3:
shape = annotation[3].get('shape', shape)
color = annotation[3].get('color', color)
size = annotation[3].get('size', size)
line = annotation[3].get('line', line)
if shape == 'CIRC' and hasattr(size, '__len__'):
size = 30
if not hasattr(color, '__len__'):
color = (color,) * 3
if shape == 'RECT':
patch = patches.Rectangle((pos[0] - size[0] // 2,
pos[1] - size[1] // 2),
size[0], size[1], fill=False,
lw=line, fc='none', ec=color)
elif shape == 'CIRC':
patch = patches.CirclePolygon(pos, radius=size, fc='none',
ec=color, lw=line)
self.annotation_artists.append(patch)
self.axes_processed.add_artist(self.annotation_artists[-1])
def _draw_frame(self, framedata):
"""Reads, processes and draws the frames.
If needed for color maps, conversions to gray scale are performed. In
case the images are no color images and no custom color maps are
defined, the colormap `gray` is applied.
This function is called by TimedAnimation.
Args:
framedata: The frame data.
"""
original = self.read_frame()
if original is None:
self.update_info(self.info_string(message='Finished.',
frame=framedata))
return
if self.original is not None:
processed = self.process_frame(original.copy())
if self.cmap_original is not None:
original = to_gray(original)
elif not is_color_image(original):
self.original.set_cmap('gray')
self.original.set_data(original)
else:
processed = self.process_frame(original)
if self.cmap_processed is not None:
processed = to_gray(processed)
elif not is_color_image(processed):
self.processed.set_cmap('gray')
if self.annotations:
self.annotate(framedata)
self.processed.set_data(processed)
self.update_info(self.info_string(frame=framedata))
def update_info(self, custom=None):
"""Updates the figure's suptitle.
Calls self.info_string() unless custom is provided.
Args:
custom: Overwrite it with this string, unless None.
"""
self.figure.suptitle(self.info_string() if custom is None else custom)
def info_string(self, size=None, message='', frame=-1):
"""Returns information about the stream.
Generates a string containing size, frame number, and info messages.
Omits unnecessary information (e.g. empty messages and frame -1).
This method is primarily used to update the suptitle of the plot
figure.
Returns:
An info string.
"""
info = []
if size is not None:
info.append('Size: {1}x{0}'.format(*size))
elif self.size is not None:
info.append('Size: {1}x{0}'.format(*self.size))
if frame >= 0:
info.append('Frame: {}'.format(frame))
if message != '':
info.append('{}'.format(message))
return ' '.join(info)
|
shoeffner/cvloop | cvloop/cvloop.py | cvloop.print_info | python | def print_info(self, capture):
self.frame_offset += 1
ret, frame = capture.read()
if ret:
print('Capture Information')
print('\tDimensions (HxW): {}x{}'.format(*frame.shape[0:2]))
print('\tColor channels: {}'.format(frame.shape[2] if
len(frame.shape) > 2 else 1))
print('\tColor range: {}-{}'.format(np.min(frame),
np.max(frame)))
print('\tdtype: {}'.format(frame.dtype))
else:
print('No source found.') | Prints information about the unprocessed image.
Reads one frame from the source to determine image colors, dimensions
and data types.
Args:
capture: the source to read from. | train | https://github.com/shoeffner/cvloop/blob/3ddd311e9b679d16c8fd36779931380374de343c/cvloop/cvloop.py#L256-L276 | null | class cvloop(animation.TimedAnimation): # noqa: E501 pylint: disable=invalid-name, too-many-instance-attributes
"""Uses a TimedAnimation to efficiently render video sources with blit."""
def __init__(self, source=None, function=lambda x: x, *,
side_by_side=False, convert_color=cv2.COLOR_BGR2RGB,
cmaps=None, print_info=False, annotations=None,
annotations_default={'shape': 'RECT',
'color': '#228B22',
'line': 2,
'size': (20, 20)}):
"""Runs a video loop for the specified source and modifies the stream
with the function.
The source can either be an integer for a webcam device, a string to
load a video file or a VideoCapture object. In the last case, the
capture object will not be released by this function.
The function takes in a frame and returns the modified frame. The
default value just passes the value through, it is equivalent to the
identity function.
If side_by_side is True, the input as well as the modified image are
shown side by side, otherwise only the output is shown.
If convert_color can be any value for cv2.cvtColor, e.g.
cv2.COLOR_BGR2RGB. If it is -1, no color conversion is performed,
otherwise a color conversion using cv2.cvtColor is performed before the
image is passed to the function.
Args:
source: The video source; ints for webcams/devices, a string to
load a video file. To fine tune a video source, it is
possible to pass a VideoCapture object directly.
(Default: 0)
function: The modification function.
(Default: identity function `lambda x: x`)
side_by_side: If True, both images are shown, the original and the
modified image.
(Default: False)
convert_color: Converts the image with the given value using
`cv2.cvtColor`, unless value is -1.
(Default: `cv2.COLOR_BGR2RGB`)
cmaps: If None, the plot function makes guesses about what color
maps to use (if at all). If a single value, that color map
is used for all plots (e.g. cmaps='gray'). If cmaps is a
tuple, the first value is used on the original image, the
second value for the modified image. If cmaps is a tuple,
None-entries are ignored and result in the normal guessing.
print_info: If True, prints some info about the resource:
dimensions, color channels, data type. Skips the output
of one frame.
annotations: A list or tuple of annotations. Each annotation is a
list or tuple in turn of this format:
[x, y, frame, options]
x: the x coordinate of the center
y: the y coordinate of the center
frame: the frame number
options: A dictionary. This is optional (leaving the
list with only three elements). Allows the
following keys:
shape: 'RECT' or 'CIRC' (rectangle, circle)
line: linewidth
color: RGB tuple, gray scalar or html hex-string
size: radius for CIRC, (width, height) for RECT
annotations_default: A default format, that will be used if no
specific format is given for an annotation. If no format is
specified the following defaults are used:
shape: 'RECT',
color: '#228B22', (forestgreen)
line: 2,
size: (20, 20)
"""
if plt.get_backend() in (
'module://ipykernel.pylab.backend_inline',
'nbAgg'):
# Calls IPython's magic variables
for conf in get_ipython().configurables:
if isinstance(conf, PylabMagics):
conf.matplotlib(line='notebook')
conf.matplotlib(line='notebook')
if source is not None:
if isinstance(source, type(cv2.VideoCapture())) \
or hasattr(source, 'read'):
self.capture = source
else:
self.capture = cv2.VideoCapture(source)
else:
self.capture = cv2.VideoCapture(0)
self.figure = plt.figure()
self.connect_event_handlers()
self.function = function
self.convert_color = convert_color
self.annotations = (None if not annotations else
sorted(annotations, key=lambda a: a[2]))
self.annotations_default = annotations_default
self.annotation_artists = []
self.original = None
self.processed = None
self.frame_offset = 0
try:
self.cmap_original = cmaps if isinstance(cmaps, str) else cmaps[0]
except (IndexError, TypeError):
self.cmap_original = None
try:
self.cmap_processed = cmaps if isinstance(cmaps, str) else cmaps[1]
except (IndexError, TypeError):
self.cmap_processed = None
if side_by_side:
axes_original = self.figure.add_subplot(1, 2, 1)
axes_processed = self.figure.add_subplot(1, 2, 2)
else:
axes_original = None
axes_processed = self.figure.add_subplot(1, 1, 1)
if print_info:
self.print_info(self.capture)
self.size = self.determine_size(self.capture)
self.original = prepare_axes(axes_original, 'Original',
self.size, self.cmap_original)
self.processed = prepare_axes(axes_processed, 'Processed',
self.size, self.cmap_processed)
self.axes_processed = axes_processed
self.update_info()
super().__init__(self.figure, interval=50, blit=True)
plt.show()
def connect_event_handlers(self):
"""Connects event handlers to the figure."""
self.figure.canvas.mpl_connect('close_event', self.evt_release)
self.figure.canvas.mpl_connect('pause_event', self.evt_toggle_pause)
def evt_release(self, *args): # pylint: disable=unused-argument
"""Tries to release the capture."""
try:
self.capture.release()
except AttributeError:
pass
def evt_toggle_pause(self, *args): # pylint: disable=unused-argument
"""Pauses and resumes the video source."""
if self.event_source._timer is None: # noqa: e501 pylint: disable=protected-access
self.event_source.start()
else:
self.event_source.stop()
def determine_size(self, capture):
"""Determines the height and width of the image source.
If no dimensions are available, this method defaults to a resolution of
640x480, thus returns (480, 640).
If capture has a get method it is assumed to understand
`cv2.CAP_PROP_FRAME_WIDTH` and `cv2.CAP_PROP_FRAME_HEIGHT` to get the
information. Otherwise it reads one frame from the source to determine
image dimensions.
Args:
capture: the source to read from.
Returns:
A tuple containing integers of height and width (simple casts).
"""
width = 640
height = 480
if capture and hasattr(capture, 'get'):
width = capture.get(cv2.CAP_PROP_FRAME_WIDTH)
height = capture.get(cv2.CAP_PROP_FRAME_HEIGHT)
else:
self.frame_offset += 1
ret, frame = capture.read()
if ret:
width = frame.shape[1]
height = frame.shape[0]
return (int(height), int(width))
def new_frame_seq(self):
"""Returns an endless frame counter.
Starts at self.frame_offset, in case some methods had to read frames
beforehand to gather information.
This function is called by TimedAnimation.
Returns:
an endless frame count
"""
return itertools.count(self.frame_offset)
def _init_draw(self):
"""Initializes the drawing of the frames by setting the images to
random colors.
This function is called by TimedAnimation.
"""
if self.original is not None:
self.original.set_data(np.random.random((10, 10, 3)))
self.processed.set_data(np.random.random((10, 10, 3)))
def read_frame(self):
"""Reads a frame and converts the color if needed.
In case no frame is available, i.e. self.capture.read() returns False
as the first return value, the event_source of the TimedAnimation is
stopped, and if possible the capture source released.
Returns:
None if stopped, otherwise the color converted source image.
"""
ret, frame = self.capture.read()
if not ret:
self.event_source.stop()
try:
self.capture.release()
except AttributeError:
# has no release method, thus just pass
pass
return None
if self.convert_color != -1 and is_color_image(frame):
return cv2.cvtColor(frame, self.convert_color)
return frame
def process_frame(self, frame):
"""Processes a frame with the user specified function.
Args:
frame: The input frame.
Returns:
The processed frame.
"""
return self.function(frame)
def annotate(self, framedata):
"""Annotates the processed axis with given annotations for
the provided framedata.
Args:
framedata: The current frame number.
"""
for artist in self.annotation_artists:
artist.remove()
self.annotation_artists = []
for annotation in self.annotations:
if annotation[2] > framedata:
return
if annotation[2] == framedata:
pos = annotation[0:2]
shape = self.annotations_default['shape']
color = self.annotations_default['color']
size = self.annotations_default['size']
line = self.annotations_default['line']
if len(annotation) > 3:
shape = annotation[3].get('shape', shape)
color = annotation[3].get('color', color)
size = annotation[3].get('size', size)
line = annotation[3].get('line', line)
if shape == 'CIRC' and hasattr(size, '__len__'):
size = 30
if not hasattr(color, '__len__'):
color = (color,) * 3
if shape == 'RECT':
patch = patches.Rectangle((pos[0] - size[0] // 2,
pos[1] - size[1] // 2),
size[0], size[1], fill=False,
lw=line, fc='none', ec=color)
elif shape == 'CIRC':
patch = patches.CirclePolygon(pos, radius=size, fc='none',
ec=color, lw=line)
self.annotation_artists.append(patch)
self.axes_processed.add_artist(self.annotation_artists[-1])
def _draw_frame(self, framedata):
"""Reads, processes and draws the frames.
If needed for color maps, conversions to gray scale are performed. In
case the images are no color images and no custom color maps are
defined, the colormap `gray` is applied.
This function is called by TimedAnimation.
Args:
framedata: The frame data.
"""
original = self.read_frame()
if original is None:
self.update_info(self.info_string(message='Finished.',
frame=framedata))
return
if self.original is not None:
processed = self.process_frame(original.copy())
if self.cmap_original is not None:
original = to_gray(original)
elif not is_color_image(original):
self.original.set_cmap('gray')
self.original.set_data(original)
else:
processed = self.process_frame(original)
if self.cmap_processed is not None:
processed = to_gray(processed)
elif not is_color_image(processed):
self.processed.set_cmap('gray')
if self.annotations:
self.annotate(framedata)
self.processed.set_data(processed)
self.update_info(self.info_string(frame=framedata))
def update_info(self, custom=None):
"""Updates the figure's suptitle.
Calls self.info_string() unless custom is provided.
Args:
custom: Overwrite it with this string, unless None.
"""
self.figure.suptitle(self.info_string() if custom is None else custom)
def info_string(self, size=None, message='', frame=-1):
"""Returns information about the stream.
Generates a string containing size, frame number, and info messages.
Omits unnecessary information (e.g. empty messages and frame -1).
This method is primarily used to update the suptitle of the plot
figure.
Returns:
An info string.
"""
info = []
if size is not None:
info.append('Size: {1}x{0}'.format(*size))
elif self.size is not None:
info.append('Size: {1}x{0}'.format(*self.size))
if frame >= 0:
info.append('Frame: {}'.format(frame))
if message != '':
info.append('{}'.format(message))
return ' '.join(info)
|
shoeffner/cvloop | cvloop/cvloop.py | cvloop.determine_size | python | def determine_size(self, capture):
width = 640
height = 480
if capture and hasattr(capture, 'get'):
width = capture.get(cv2.CAP_PROP_FRAME_WIDTH)
height = capture.get(cv2.CAP_PROP_FRAME_HEIGHT)
else:
self.frame_offset += 1
ret, frame = capture.read()
if ret:
width = frame.shape[1]
height = frame.shape[0]
return (int(height), int(width)) | Determines the height and width of the image source.
If no dimensions are available, this method defaults to a resolution of
640x480, thus returns (480, 640).
If capture has a get method it is assumed to understand
`cv2.CAP_PROP_FRAME_WIDTH` and `cv2.CAP_PROP_FRAME_HEIGHT` to get the
information. Otherwise it reads one frame from the source to determine
image dimensions.
Args:
capture: the source to read from.
Returns:
A tuple containing integers of height and width (simple casts). | train | https://github.com/shoeffner/cvloop/blob/3ddd311e9b679d16c8fd36779931380374de343c/cvloop/cvloop.py#L278-L305 | null | class cvloop(animation.TimedAnimation): # noqa: E501 pylint: disable=invalid-name, too-many-instance-attributes
"""Uses a TimedAnimation to efficiently render video sources with blit."""
def __init__(self, source=None, function=lambda x: x, *,
side_by_side=False, convert_color=cv2.COLOR_BGR2RGB,
cmaps=None, print_info=False, annotations=None,
annotations_default={'shape': 'RECT',
'color': '#228B22',
'line': 2,
'size': (20, 20)}):
"""Runs a video loop for the specified source and modifies the stream
with the function.
The source can either be an integer for a webcam device, a string to
load a video file or a VideoCapture object. In the last case, the
capture object will not be released by this function.
The function takes in a frame and returns the modified frame. The
default value just passes the value through, it is equivalent to the
identity function.
If side_by_side is True, the input as well as the modified image are
shown side by side, otherwise only the output is shown.
If convert_color can be any value for cv2.cvtColor, e.g.
cv2.COLOR_BGR2RGB. If it is -1, no color conversion is performed,
otherwise a color conversion using cv2.cvtColor is performed before the
image is passed to the function.
Args:
source: The video source; ints for webcams/devices, a string to
load a video file. To fine tune a video source, it is
possible to pass a VideoCapture object directly.
(Default: 0)
function: The modification function.
(Default: identity function `lambda x: x`)
side_by_side: If True, both images are shown, the original and the
modified image.
(Default: False)
convert_color: Converts the image with the given value using
`cv2.cvtColor`, unless value is -1.
(Default: `cv2.COLOR_BGR2RGB`)
cmaps: If None, the plot function makes guesses about what color
maps to use (if at all). If a single value, that color map
is used for all plots (e.g. cmaps='gray'). If cmaps is a
tuple, the first value is used on the original image, the
second value for the modified image. If cmaps is a tuple,
None-entries are ignored and result in the normal guessing.
print_info: If True, prints some info about the resource:
dimensions, color channels, data type. Skips the output
of one frame.
annotations: A list or tuple of annotations. Each annotation is a
list or tuple in turn of this format:
[x, y, frame, options]
x: the x coordinate of the center
y: the y coordinate of the center
frame: the frame number
options: A dictionary. This is optional (leaving the
list with only three elements). Allows the
following keys:
shape: 'RECT' or 'CIRC' (rectangle, circle)
line: linewidth
color: RGB tuple, gray scalar or html hex-string
size: radius for CIRC, (width, height) for RECT
annotations_default: A default format, that will be used if no
specific format is given for an annotation. If no format is
specified the following defaults are used:
shape: 'RECT',
color: '#228B22', (forestgreen)
line: 2,
size: (20, 20)
"""
if plt.get_backend() in (
'module://ipykernel.pylab.backend_inline',
'nbAgg'):
# Calls IPython's magic variables
for conf in get_ipython().configurables:
if isinstance(conf, PylabMagics):
conf.matplotlib(line='notebook')
conf.matplotlib(line='notebook')
if source is not None:
if isinstance(source, type(cv2.VideoCapture())) \
or hasattr(source, 'read'):
self.capture = source
else:
self.capture = cv2.VideoCapture(source)
else:
self.capture = cv2.VideoCapture(0)
self.figure = plt.figure()
self.connect_event_handlers()
self.function = function
self.convert_color = convert_color
self.annotations = (None if not annotations else
sorted(annotations, key=lambda a: a[2]))
self.annotations_default = annotations_default
self.annotation_artists = []
self.original = None
self.processed = None
self.frame_offset = 0
try:
self.cmap_original = cmaps if isinstance(cmaps, str) else cmaps[0]
except (IndexError, TypeError):
self.cmap_original = None
try:
self.cmap_processed = cmaps if isinstance(cmaps, str) else cmaps[1]
except (IndexError, TypeError):
self.cmap_processed = None
if side_by_side:
axes_original = self.figure.add_subplot(1, 2, 1)
axes_processed = self.figure.add_subplot(1, 2, 2)
else:
axes_original = None
axes_processed = self.figure.add_subplot(1, 1, 1)
if print_info:
self.print_info(self.capture)
self.size = self.determine_size(self.capture)
self.original = prepare_axes(axes_original, 'Original',
self.size, self.cmap_original)
self.processed = prepare_axes(axes_processed, 'Processed',
self.size, self.cmap_processed)
self.axes_processed = axes_processed
self.update_info()
super().__init__(self.figure, interval=50, blit=True)
plt.show()
def connect_event_handlers(self):
"""Connects event handlers to the figure."""
self.figure.canvas.mpl_connect('close_event', self.evt_release)
self.figure.canvas.mpl_connect('pause_event', self.evt_toggle_pause)
def evt_release(self, *args): # pylint: disable=unused-argument
"""Tries to release the capture."""
try:
self.capture.release()
except AttributeError:
pass
def evt_toggle_pause(self, *args): # pylint: disable=unused-argument
"""Pauses and resumes the video source."""
if self.event_source._timer is None: # noqa: e501 pylint: disable=protected-access
self.event_source.start()
else:
self.event_source.stop()
def print_info(self, capture):
"""Prints information about the unprocessed image.
Reads one frame from the source to determine image colors, dimensions
and data types.
Args:
capture: the source to read from.
"""
self.frame_offset += 1
ret, frame = capture.read()
if ret:
print('Capture Information')
print('\tDimensions (HxW): {}x{}'.format(*frame.shape[0:2]))
print('\tColor channels: {}'.format(frame.shape[2] if
len(frame.shape) > 2 else 1))
print('\tColor range: {}-{}'.format(np.min(frame),
np.max(frame)))
print('\tdtype: {}'.format(frame.dtype))
else:
print('No source found.')
def new_frame_seq(self):
"""Returns an endless frame counter.
Starts at self.frame_offset, in case some methods had to read frames
beforehand to gather information.
This function is called by TimedAnimation.
Returns:
an endless frame count
"""
return itertools.count(self.frame_offset)
def _init_draw(self):
"""Initializes the drawing of the frames by setting the images to
random colors.
This function is called by TimedAnimation.
"""
if self.original is not None:
self.original.set_data(np.random.random((10, 10, 3)))
self.processed.set_data(np.random.random((10, 10, 3)))
def read_frame(self):
"""Reads a frame and converts the color if needed.
In case no frame is available, i.e. self.capture.read() returns False
as the first return value, the event_source of the TimedAnimation is
stopped, and if possible the capture source released.
Returns:
None if stopped, otherwise the color converted source image.
"""
ret, frame = self.capture.read()
if not ret:
self.event_source.stop()
try:
self.capture.release()
except AttributeError:
# has no release method, thus just pass
pass
return None
if self.convert_color != -1 and is_color_image(frame):
return cv2.cvtColor(frame, self.convert_color)
return frame
def process_frame(self, frame):
"""Processes a frame with the user specified function.
Args:
frame: The input frame.
Returns:
The processed frame.
"""
return self.function(frame)
def annotate(self, framedata):
"""Annotates the processed axis with given annotations for
the provided framedata.
Args:
framedata: The current frame number.
"""
for artist in self.annotation_artists:
artist.remove()
self.annotation_artists = []
for annotation in self.annotations:
if annotation[2] > framedata:
return
if annotation[2] == framedata:
pos = annotation[0:2]
shape = self.annotations_default['shape']
color = self.annotations_default['color']
size = self.annotations_default['size']
line = self.annotations_default['line']
if len(annotation) > 3:
shape = annotation[3].get('shape', shape)
color = annotation[3].get('color', color)
size = annotation[3].get('size', size)
line = annotation[3].get('line', line)
if shape == 'CIRC' and hasattr(size, '__len__'):
size = 30
if not hasattr(color, '__len__'):
color = (color,) * 3
if shape == 'RECT':
patch = patches.Rectangle((pos[0] - size[0] // 2,
pos[1] - size[1] // 2),
size[0], size[1], fill=False,
lw=line, fc='none', ec=color)
elif shape == 'CIRC':
patch = patches.CirclePolygon(pos, radius=size, fc='none',
ec=color, lw=line)
self.annotation_artists.append(patch)
self.axes_processed.add_artist(self.annotation_artists[-1])
def _draw_frame(self, framedata):
"""Reads, processes and draws the frames.
If needed for color maps, conversions to gray scale are performed. In
case the images are no color images and no custom color maps are
defined, the colormap `gray` is applied.
This function is called by TimedAnimation.
Args:
framedata: The frame data.
"""
original = self.read_frame()
if original is None:
self.update_info(self.info_string(message='Finished.',
frame=framedata))
return
if self.original is not None:
processed = self.process_frame(original.copy())
if self.cmap_original is not None:
original = to_gray(original)
elif not is_color_image(original):
self.original.set_cmap('gray')
self.original.set_data(original)
else:
processed = self.process_frame(original)
if self.cmap_processed is not None:
processed = to_gray(processed)
elif not is_color_image(processed):
self.processed.set_cmap('gray')
if self.annotations:
self.annotate(framedata)
self.processed.set_data(processed)
self.update_info(self.info_string(frame=framedata))
def update_info(self, custom=None):
"""Updates the figure's suptitle.
Calls self.info_string() unless custom is provided.
Args:
custom: Overwrite it with this string, unless None.
"""
self.figure.suptitle(self.info_string() if custom is None else custom)
def info_string(self, size=None, message='', frame=-1):
"""Returns information about the stream.
Generates a string containing size, frame number, and info messages.
Omits unnecessary information (e.g. empty messages and frame -1).
This method is primarily used to update the suptitle of the plot
figure.
Returns:
An info string.
"""
info = []
if size is not None:
info.append('Size: {1}x{0}'.format(*size))
elif self.size is not None:
info.append('Size: {1}x{0}'.format(*self.size))
if frame >= 0:
info.append('Frame: {}'.format(frame))
if message != '':
info.append('{}'.format(message))
return ' '.join(info)
|
shoeffner/cvloop | cvloop/cvloop.py | cvloop._init_draw | python | def _init_draw(self):
if self.original is not None:
self.original.set_data(np.random.random((10, 10, 3)))
self.processed.set_data(np.random.random((10, 10, 3))) | Initializes the drawing of the frames by setting the images to
random colors.
This function is called by TimedAnimation. | train | https://github.com/shoeffner/cvloop/blob/3ddd311e9b679d16c8fd36779931380374de343c/cvloop/cvloop.py#L320-L328 | null | class cvloop(animation.TimedAnimation): # noqa: E501 pylint: disable=invalid-name, too-many-instance-attributes
"""Uses a TimedAnimation to efficiently render video sources with blit."""
def __init__(self, source=None, function=lambda x: x, *,
side_by_side=False, convert_color=cv2.COLOR_BGR2RGB,
cmaps=None, print_info=False, annotations=None,
annotations_default={'shape': 'RECT',
'color': '#228B22',
'line': 2,
'size': (20, 20)}):
"""Runs a video loop for the specified source and modifies the stream
with the function.
The source can either be an integer for a webcam device, a string to
load a video file or a VideoCapture object. In the last case, the
capture object will not be released by this function.
The function takes in a frame and returns the modified frame. The
default value just passes the value through, it is equivalent to the
identity function.
If side_by_side is True, the input as well as the modified image are
shown side by side, otherwise only the output is shown.
If convert_color can be any value for cv2.cvtColor, e.g.
cv2.COLOR_BGR2RGB. If it is -1, no color conversion is performed,
otherwise a color conversion using cv2.cvtColor is performed before the
image is passed to the function.
Args:
source: The video source; ints for webcams/devices, a string to
load a video file. To fine tune a video source, it is
possible to pass a VideoCapture object directly.
(Default: 0)
function: The modification function.
(Default: identity function `lambda x: x`)
side_by_side: If True, both images are shown, the original and the
modified image.
(Default: False)
convert_color: Converts the image with the given value using
`cv2.cvtColor`, unless value is -1.
(Default: `cv2.COLOR_BGR2RGB`)
cmaps: If None, the plot function makes guesses about what color
maps to use (if at all). If a single value, that color map
is used for all plots (e.g. cmaps='gray'). If cmaps is a
tuple, the first value is used on the original image, the
second value for the modified image. If cmaps is a tuple,
None-entries are ignored and result in the normal guessing.
print_info: If True, prints some info about the resource:
dimensions, color channels, data type. Skips the output
of one frame.
annotations: A list or tuple of annotations. Each annotation is a
list or tuple in turn of this format:
[x, y, frame, options]
x: the x coordinate of the center
y: the y coordinate of the center
frame: the frame number
options: A dictionary. This is optional (leaving the
list with only three elements). Allows the
following keys:
shape: 'RECT' or 'CIRC' (rectangle, circle)
line: linewidth
color: RGB tuple, gray scalar or html hex-string
size: radius for CIRC, (width, height) for RECT
annotations_default: A default format, that will be used if no
specific format is given for an annotation. If no format is
specified the following defaults are used:
shape: 'RECT',
color: '#228B22', (forestgreen)
line: 2,
size: (20, 20)
"""
if plt.get_backend() in (
'module://ipykernel.pylab.backend_inline',
'nbAgg'):
# Calls IPython's magic variables
for conf in get_ipython().configurables:
if isinstance(conf, PylabMagics):
conf.matplotlib(line='notebook')
conf.matplotlib(line='notebook')
if source is not None:
if isinstance(source, type(cv2.VideoCapture())) \
or hasattr(source, 'read'):
self.capture = source
else:
self.capture = cv2.VideoCapture(source)
else:
self.capture = cv2.VideoCapture(0)
self.figure = plt.figure()
self.connect_event_handlers()
self.function = function
self.convert_color = convert_color
self.annotations = (None if not annotations else
sorted(annotations, key=lambda a: a[2]))
self.annotations_default = annotations_default
self.annotation_artists = []
self.original = None
self.processed = None
self.frame_offset = 0
try:
self.cmap_original = cmaps if isinstance(cmaps, str) else cmaps[0]
except (IndexError, TypeError):
self.cmap_original = None
try:
self.cmap_processed = cmaps if isinstance(cmaps, str) else cmaps[1]
except (IndexError, TypeError):
self.cmap_processed = None
if side_by_side:
axes_original = self.figure.add_subplot(1, 2, 1)
axes_processed = self.figure.add_subplot(1, 2, 2)
else:
axes_original = None
axes_processed = self.figure.add_subplot(1, 1, 1)
if print_info:
self.print_info(self.capture)
self.size = self.determine_size(self.capture)
self.original = prepare_axes(axes_original, 'Original',
self.size, self.cmap_original)
self.processed = prepare_axes(axes_processed, 'Processed',
self.size, self.cmap_processed)
self.axes_processed = axes_processed
self.update_info()
super().__init__(self.figure, interval=50, blit=True)
plt.show()
def connect_event_handlers(self):
"""Connects event handlers to the figure."""
self.figure.canvas.mpl_connect('close_event', self.evt_release)
self.figure.canvas.mpl_connect('pause_event', self.evt_toggle_pause)
def evt_release(self, *args): # pylint: disable=unused-argument
"""Tries to release the capture."""
try:
self.capture.release()
except AttributeError:
pass
def evt_toggle_pause(self, *args): # pylint: disable=unused-argument
"""Pauses and resumes the video source."""
if self.event_source._timer is None: # noqa: e501 pylint: disable=protected-access
self.event_source.start()
else:
self.event_source.stop()
def print_info(self, capture):
"""Prints information about the unprocessed image.
Reads one frame from the source to determine image colors, dimensions
and data types.
Args:
capture: the source to read from.
"""
self.frame_offset += 1
ret, frame = capture.read()
if ret:
print('Capture Information')
print('\tDimensions (HxW): {}x{}'.format(*frame.shape[0:2]))
print('\tColor channels: {}'.format(frame.shape[2] if
len(frame.shape) > 2 else 1))
print('\tColor range: {}-{}'.format(np.min(frame),
np.max(frame)))
print('\tdtype: {}'.format(frame.dtype))
else:
print('No source found.')
def determine_size(self, capture):
"""Determines the height and width of the image source.
If no dimensions are available, this method defaults to a resolution of
640x480, thus returns (480, 640).
If capture has a get method it is assumed to understand
`cv2.CAP_PROP_FRAME_WIDTH` and `cv2.CAP_PROP_FRAME_HEIGHT` to get the
information. Otherwise it reads one frame from the source to determine
image dimensions.
Args:
capture: the source to read from.
Returns:
A tuple containing integers of height and width (simple casts).
"""
width = 640
height = 480
if capture and hasattr(capture, 'get'):
width = capture.get(cv2.CAP_PROP_FRAME_WIDTH)
height = capture.get(cv2.CAP_PROP_FRAME_HEIGHT)
else:
self.frame_offset += 1
ret, frame = capture.read()
if ret:
width = frame.shape[1]
height = frame.shape[0]
return (int(height), int(width))
def new_frame_seq(self):
"""Returns an endless frame counter.
Starts at self.frame_offset, in case some methods had to read frames
beforehand to gather information.
This function is called by TimedAnimation.
Returns:
an endless frame count
"""
return itertools.count(self.frame_offset)
def read_frame(self):
"""Reads a frame and converts the color if needed.
In case no frame is available, i.e. self.capture.read() returns False
as the first return value, the event_source of the TimedAnimation is
stopped, and if possible the capture source released.
Returns:
None if stopped, otherwise the color converted source image.
"""
ret, frame = self.capture.read()
if not ret:
self.event_source.stop()
try:
self.capture.release()
except AttributeError:
# has no release method, thus just pass
pass
return None
if self.convert_color != -1 and is_color_image(frame):
return cv2.cvtColor(frame, self.convert_color)
return frame
def process_frame(self, frame):
"""Processes a frame with the user specified function.
Args:
frame: The input frame.
Returns:
The processed frame.
"""
return self.function(frame)
def annotate(self, framedata):
"""Annotates the processed axis with given annotations for
the provided framedata.
Args:
framedata: The current frame number.
"""
for artist in self.annotation_artists:
artist.remove()
self.annotation_artists = []
for annotation in self.annotations:
if annotation[2] > framedata:
return
if annotation[2] == framedata:
pos = annotation[0:2]
shape = self.annotations_default['shape']
color = self.annotations_default['color']
size = self.annotations_default['size']
line = self.annotations_default['line']
if len(annotation) > 3:
shape = annotation[3].get('shape', shape)
color = annotation[3].get('color', color)
size = annotation[3].get('size', size)
line = annotation[3].get('line', line)
if shape == 'CIRC' and hasattr(size, '__len__'):
size = 30
if not hasattr(color, '__len__'):
color = (color,) * 3
if shape == 'RECT':
patch = patches.Rectangle((pos[0] - size[0] // 2,
pos[1] - size[1] // 2),
size[0], size[1], fill=False,
lw=line, fc='none', ec=color)
elif shape == 'CIRC':
patch = patches.CirclePolygon(pos, radius=size, fc='none',
ec=color, lw=line)
self.annotation_artists.append(patch)
self.axes_processed.add_artist(self.annotation_artists[-1])
def _draw_frame(self, framedata):
"""Reads, processes and draws the frames.
If needed for color maps, conversions to gray scale are performed. In
case the images are no color images and no custom color maps are
defined, the colormap `gray` is applied.
This function is called by TimedAnimation.
Args:
framedata: The frame data.
"""
original = self.read_frame()
if original is None:
self.update_info(self.info_string(message='Finished.',
frame=framedata))
return
if self.original is not None:
processed = self.process_frame(original.copy())
if self.cmap_original is not None:
original = to_gray(original)
elif not is_color_image(original):
self.original.set_cmap('gray')
self.original.set_data(original)
else:
processed = self.process_frame(original)
if self.cmap_processed is not None:
processed = to_gray(processed)
elif not is_color_image(processed):
self.processed.set_cmap('gray')
if self.annotations:
self.annotate(framedata)
self.processed.set_data(processed)
self.update_info(self.info_string(frame=framedata))
def update_info(self, custom=None):
"""Updates the figure's suptitle.
Calls self.info_string() unless custom is provided.
Args:
custom: Overwrite it with this string, unless None.
"""
self.figure.suptitle(self.info_string() if custom is None else custom)
def info_string(self, size=None, message='', frame=-1):
"""Returns information about the stream.
Generates a string containing size, frame number, and info messages.
Omits unnecessary information (e.g. empty messages and frame -1).
This method is primarily used to update the suptitle of the plot
figure.
Returns:
An info string.
"""
info = []
if size is not None:
info.append('Size: {1}x{0}'.format(*size))
elif self.size is not None:
info.append('Size: {1}x{0}'.format(*self.size))
if frame >= 0:
info.append('Frame: {}'.format(frame))
if message != '':
info.append('{}'.format(message))
return ' '.join(info)
|
shoeffner/cvloop | cvloop/cvloop.py | cvloop.read_frame | python | def read_frame(self):
ret, frame = self.capture.read()
if not ret:
self.event_source.stop()
try:
self.capture.release()
except AttributeError:
# has no release method, thus just pass
pass
return None
if self.convert_color != -1 and is_color_image(frame):
return cv2.cvtColor(frame, self.convert_color)
return frame | Reads a frame and converts the color if needed.
In case no frame is available, i.e. self.capture.read() returns False
as the first return value, the event_source of the TimedAnimation is
stopped, and if possible the capture source released.
Returns:
None if stopped, otherwise the color converted source image. | train | https://github.com/shoeffner/cvloop/blob/3ddd311e9b679d16c8fd36779931380374de343c/cvloop/cvloop.py#L330-L351 | [
"def is_color_image(frame):\n \"\"\"Checks if an image is a color image.\n\n A color image is an image with at least three dimensions and in the\n third dimension at least three color channels.\n\n Returns:\n True if the image is a color image.\n \"\"\"\n return len(frame.shape) >= 3 and frame.shape[2] >= 3\n"
] | class cvloop(animation.TimedAnimation): # noqa: E501 pylint: disable=invalid-name, too-many-instance-attributes
"""Uses a TimedAnimation to efficiently render video sources with blit."""
def __init__(self, source=None, function=lambda x: x, *,
side_by_side=False, convert_color=cv2.COLOR_BGR2RGB,
cmaps=None, print_info=False, annotations=None,
annotations_default={'shape': 'RECT',
'color': '#228B22',
'line': 2,
'size': (20, 20)}):
"""Runs a video loop for the specified source and modifies the stream
with the function.
The source can either be an integer for a webcam device, a string to
load a video file or a VideoCapture object. In the last case, the
capture object will not be released by this function.
The function takes in a frame and returns the modified frame. The
default value just passes the value through, it is equivalent to the
identity function.
If side_by_side is True, the input as well as the modified image are
shown side by side, otherwise only the output is shown.
If convert_color can be any value for cv2.cvtColor, e.g.
cv2.COLOR_BGR2RGB. If it is -1, no color conversion is performed,
otherwise a color conversion using cv2.cvtColor is performed before the
image is passed to the function.
Args:
source: The video source; ints for webcams/devices, a string to
load a video file. To fine tune a video source, it is
possible to pass a VideoCapture object directly.
(Default: 0)
function: The modification function.
(Default: identity function `lambda x: x`)
side_by_side: If True, both images are shown, the original and the
modified image.
(Default: False)
convert_color: Converts the image with the given value using
`cv2.cvtColor`, unless value is -1.
(Default: `cv2.COLOR_BGR2RGB`)
cmaps: If None, the plot function makes guesses about what color
maps to use (if at all). If a single value, that color map
is used for all plots (e.g. cmaps='gray'). If cmaps is a
tuple, the first value is used on the original image, the
second value for the modified image. If cmaps is a tuple,
None-entries are ignored and result in the normal guessing.
print_info: If True, prints some info about the resource:
dimensions, color channels, data type. Skips the output
of one frame.
annotations: A list or tuple of annotations. Each annotation is a
list or tuple in turn of this format:
[x, y, frame, options]
x: the x coordinate of the center
y: the y coordinate of the center
frame: the frame number
options: A dictionary. This is optional (leaving the
list with only three elements). Allows the
following keys:
shape: 'RECT' or 'CIRC' (rectangle, circle)
line: linewidth
color: RGB tuple, gray scalar or html hex-string
size: radius for CIRC, (width, height) for RECT
annotations_default: A default format, that will be used if no
specific format is given for an annotation. If no format is
specified the following defaults are used:
shape: 'RECT',
color: '#228B22', (forestgreen)
line: 2,
size: (20, 20)
"""
if plt.get_backend() in (
'module://ipykernel.pylab.backend_inline',
'nbAgg'):
# Calls IPython's magic variables
for conf in get_ipython().configurables:
if isinstance(conf, PylabMagics):
conf.matplotlib(line='notebook')
conf.matplotlib(line='notebook')
if source is not None:
if isinstance(source, type(cv2.VideoCapture())) \
or hasattr(source, 'read'):
self.capture = source
else:
self.capture = cv2.VideoCapture(source)
else:
self.capture = cv2.VideoCapture(0)
self.figure = plt.figure()
self.connect_event_handlers()
self.function = function
self.convert_color = convert_color
self.annotations = (None if not annotations else
sorted(annotations, key=lambda a: a[2]))
self.annotations_default = annotations_default
self.annotation_artists = []
self.original = None
self.processed = None
self.frame_offset = 0
try:
self.cmap_original = cmaps if isinstance(cmaps, str) else cmaps[0]
except (IndexError, TypeError):
self.cmap_original = None
try:
self.cmap_processed = cmaps if isinstance(cmaps, str) else cmaps[1]
except (IndexError, TypeError):
self.cmap_processed = None
if side_by_side:
axes_original = self.figure.add_subplot(1, 2, 1)
axes_processed = self.figure.add_subplot(1, 2, 2)
else:
axes_original = None
axes_processed = self.figure.add_subplot(1, 1, 1)
if print_info:
self.print_info(self.capture)
self.size = self.determine_size(self.capture)
self.original = prepare_axes(axes_original, 'Original',
self.size, self.cmap_original)
self.processed = prepare_axes(axes_processed, 'Processed',
self.size, self.cmap_processed)
self.axes_processed = axes_processed
self.update_info()
super().__init__(self.figure, interval=50, blit=True)
plt.show()
def connect_event_handlers(self):
"""Connects event handlers to the figure."""
self.figure.canvas.mpl_connect('close_event', self.evt_release)
self.figure.canvas.mpl_connect('pause_event', self.evt_toggle_pause)
def evt_release(self, *args): # pylint: disable=unused-argument
"""Tries to release the capture."""
try:
self.capture.release()
except AttributeError:
pass
def evt_toggle_pause(self, *args): # pylint: disable=unused-argument
"""Pauses and resumes the video source."""
if self.event_source._timer is None: # noqa: e501 pylint: disable=protected-access
self.event_source.start()
else:
self.event_source.stop()
def print_info(self, capture):
"""Prints information about the unprocessed image.
Reads one frame from the source to determine image colors, dimensions
and data types.
Args:
capture: the source to read from.
"""
self.frame_offset += 1
ret, frame = capture.read()
if ret:
print('Capture Information')
print('\tDimensions (HxW): {}x{}'.format(*frame.shape[0:2]))
print('\tColor channels: {}'.format(frame.shape[2] if
len(frame.shape) > 2 else 1))
print('\tColor range: {}-{}'.format(np.min(frame),
np.max(frame)))
print('\tdtype: {}'.format(frame.dtype))
else:
print('No source found.')
def determine_size(self, capture):
"""Determines the height and width of the image source.
If no dimensions are available, this method defaults to a resolution of
640x480, thus returns (480, 640).
If capture has a get method it is assumed to understand
`cv2.CAP_PROP_FRAME_WIDTH` and `cv2.CAP_PROP_FRAME_HEIGHT` to get the
information. Otherwise it reads one frame from the source to determine
image dimensions.
Args:
capture: the source to read from.
Returns:
A tuple containing integers of height and width (simple casts).
"""
width = 640
height = 480
if capture and hasattr(capture, 'get'):
width = capture.get(cv2.CAP_PROP_FRAME_WIDTH)
height = capture.get(cv2.CAP_PROP_FRAME_HEIGHT)
else:
self.frame_offset += 1
ret, frame = capture.read()
if ret:
width = frame.shape[1]
height = frame.shape[0]
return (int(height), int(width))
def new_frame_seq(self):
"""Returns an endless frame counter.
Starts at self.frame_offset, in case some methods had to read frames
beforehand to gather information.
This function is called by TimedAnimation.
Returns:
an endless frame count
"""
return itertools.count(self.frame_offset)
def _init_draw(self):
"""Initializes the drawing of the frames by setting the images to
random colors.
This function is called by TimedAnimation.
"""
if self.original is not None:
self.original.set_data(np.random.random((10, 10, 3)))
self.processed.set_data(np.random.random((10, 10, 3)))
def process_frame(self, frame):
"""Processes a frame with the user specified function.
Args:
frame: The input frame.
Returns:
The processed frame.
"""
return self.function(frame)
def annotate(self, framedata):
"""Annotates the processed axis with given annotations for
the provided framedata.
Args:
framedata: The current frame number.
"""
for artist in self.annotation_artists:
artist.remove()
self.annotation_artists = []
for annotation in self.annotations:
if annotation[2] > framedata:
return
if annotation[2] == framedata:
pos = annotation[0:2]
shape = self.annotations_default['shape']
color = self.annotations_default['color']
size = self.annotations_default['size']
line = self.annotations_default['line']
if len(annotation) > 3:
shape = annotation[3].get('shape', shape)
color = annotation[3].get('color', color)
size = annotation[3].get('size', size)
line = annotation[3].get('line', line)
if shape == 'CIRC' and hasattr(size, '__len__'):
size = 30
if not hasattr(color, '__len__'):
color = (color,) * 3
if shape == 'RECT':
patch = patches.Rectangle((pos[0] - size[0] // 2,
pos[1] - size[1] // 2),
size[0], size[1], fill=False,
lw=line, fc='none', ec=color)
elif shape == 'CIRC':
patch = patches.CirclePolygon(pos, radius=size, fc='none',
ec=color, lw=line)
self.annotation_artists.append(patch)
self.axes_processed.add_artist(self.annotation_artists[-1])
def _draw_frame(self, framedata):
"""Reads, processes and draws the frames.
If needed for color maps, conversions to gray scale are performed. In
case the images are no color images and no custom color maps are
defined, the colormap `gray` is applied.
This function is called by TimedAnimation.
Args:
framedata: The frame data.
"""
original = self.read_frame()
if original is None:
self.update_info(self.info_string(message='Finished.',
frame=framedata))
return
if self.original is not None:
processed = self.process_frame(original.copy())
if self.cmap_original is not None:
original = to_gray(original)
elif not is_color_image(original):
self.original.set_cmap('gray')
self.original.set_data(original)
else:
processed = self.process_frame(original)
if self.cmap_processed is not None:
processed = to_gray(processed)
elif not is_color_image(processed):
self.processed.set_cmap('gray')
if self.annotations:
self.annotate(framedata)
self.processed.set_data(processed)
self.update_info(self.info_string(frame=framedata))
def update_info(self, custom=None):
"""Updates the figure's suptitle.
Calls self.info_string() unless custom is provided.
Args:
custom: Overwrite it with this string, unless None.
"""
self.figure.suptitle(self.info_string() if custom is None else custom)
def info_string(self, size=None, message='', frame=-1):
"""Returns information about the stream.
Generates a string containing size, frame number, and info messages.
Omits unnecessary information (e.g. empty messages and frame -1).
This method is primarily used to update the suptitle of the plot
figure.
Returns:
An info string.
"""
info = []
if size is not None:
info.append('Size: {1}x{0}'.format(*size))
elif self.size is not None:
info.append('Size: {1}x{0}'.format(*self.size))
if frame >= 0:
info.append('Frame: {}'.format(frame))
if message != '':
info.append('{}'.format(message))
return ' '.join(info)
|
shoeffner/cvloop | cvloop/cvloop.py | cvloop.annotate | python | def annotate(self, framedata):
for artist in self.annotation_artists:
artist.remove()
self.annotation_artists = []
for annotation in self.annotations:
if annotation[2] > framedata:
return
if annotation[2] == framedata:
pos = annotation[0:2]
shape = self.annotations_default['shape']
color = self.annotations_default['color']
size = self.annotations_default['size']
line = self.annotations_default['line']
if len(annotation) > 3:
shape = annotation[3].get('shape', shape)
color = annotation[3].get('color', color)
size = annotation[3].get('size', size)
line = annotation[3].get('line', line)
if shape == 'CIRC' and hasattr(size, '__len__'):
size = 30
if not hasattr(color, '__len__'):
color = (color,) * 3
if shape == 'RECT':
patch = patches.Rectangle((pos[0] - size[0] // 2,
pos[1] - size[1] // 2),
size[0], size[1], fill=False,
lw=line, fc='none', ec=color)
elif shape == 'CIRC':
patch = patches.CirclePolygon(pos, radius=size, fc='none',
ec=color, lw=line)
self.annotation_artists.append(patch)
self.axes_processed.add_artist(self.annotation_artists[-1]) | Annotates the processed axis with given annotations for
the provided framedata.
Args:
framedata: The current frame number. | train | https://github.com/shoeffner/cvloop/blob/3ddd311e9b679d16c8fd36779931380374de343c/cvloop/cvloop.py#L364-L403 | null | class cvloop(animation.TimedAnimation): # noqa: E501 pylint: disable=invalid-name, too-many-instance-attributes
"""Uses a TimedAnimation to efficiently render video sources with blit."""
def __init__(self, source=None, function=lambda x: x, *,
side_by_side=False, convert_color=cv2.COLOR_BGR2RGB,
cmaps=None, print_info=False, annotations=None,
annotations_default={'shape': 'RECT',
'color': '#228B22',
'line': 2,
'size': (20, 20)}):
"""Runs a video loop for the specified source and modifies the stream
with the function.
The source can either be an integer for a webcam device, a string to
load a video file or a VideoCapture object. In the last case, the
capture object will not be released by this function.
The function takes in a frame and returns the modified frame. The
default value just passes the value through, it is equivalent to the
identity function.
If side_by_side is True, the input as well as the modified image are
shown side by side, otherwise only the output is shown.
If convert_color can be any value for cv2.cvtColor, e.g.
cv2.COLOR_BGR2RGB. If it is -1, no color conversion is performed,
otherwise a color conversion using cv2.cvtColor is performed before the
image is passed to the function.
Args:
source: The video source; ints for webcams/devices, a string to
load a video file. To fine tune a video source, it is
possible to pass a VideoCapture object directly.
(Default: 0)
function: The modification function.
(Default: identity function `lambda x: x`)
side_by_side: If True, both images are shown, the original and the
modified image.
(Default: False)
convert_color: Converts the image with the given value using
`cv2.cvtColor`, unless value is -1.
(Default: `cv2.COLOR_BGR2RGB`)
cmaps: If None, the plot function makes guesses about what color
maps to use (if at all). If a single value, that color map
is used for all plots (e.g. cmaps='gray'). If cmaps is a
tuple, the first value is used on the original image, the
second value for the modified image. If cmaps is a tuple,
None-entries are ignored and result in the normal guessing.
print_info: If True, prints some info about the resource:
dimensions, color channels, data type. Skips the output
of one frame.
annotations: A list or tuple of annotations. Each annotation is a
list or tuple in turn of this format:
[x, y, frame, options]
x: the x coordinate of the center
y: the y coordinate of the center
frame: the frame number
options: A dictionary. This is optional (leaving the
list with only three elements). Allows the
following keys:
shape: 'RECT' or 'CIRC' (rectangle, circle)
line: linewidth
color: RGB tuple, gray scalar or html hex-string
size: radius for CIRC, (width, height) for RECT
annotations_default: A default format, that will be used if no
specific format is given for an annotation. If no format is
specified the following defaults are used:
shape: 'RECT',
color: '#228B22', (forestgreen)
line: 2,
size: (20, 20)
"""
if plt.get_backend() in (
'module://ipykernel.pylab.backend_inline',
'nbAgg'):
# Calls IPython's magic variables
for conf in get_ipython().configurables:
if isinstance(conf, PylabMagics):
conf.matplotlib(line='notebook')
conf.matplotlib(line='notebook')
if source is not None:
if isinstance(source, type(cv2.VideoCapture())) \
or hasattr(source, 'read'):
self.capture = source
else:
self.capture = cv2.VideoCapture(source)
else:
self.capture = cv2.VideoCapture(0)
self.figure = plt.figure()
self.connect_event_handlers()
self.function = function
self.convert_color = convert_color
self.annotations = (None if not annotations else
sorted(annotations, key=lambda a: a[2]))
self.annotations_default = annotations_default
self.annotation_artists = []
self.original = None
self.processed = None
self.frame_offset = 0
try:
self.cmap_original = cmaps if isinstance(cmaps, str) else cmaps[0]
except (IndexError, TypeError):
self.cmap_original = None
try:
self.cmap_processed = cmaps if isinstance(cmaps, str) else cmaps[1]
except (IndexError, TypeError):
self.cmap_processed = None
if side_by_side:
axes_original = self.figure.add_subplot(1, 2, 1)
axes_processed = self.figure.add_subplot(1, 2, 2)
else:
axes_original = None
axes_processed = self.figure.add_subplot(1, 1, 1)
if print_info:
self.print_info(self.capture)
self.size = self.determine_size(self.capture)
self.original = prepare_axes(axes_original, 'Original',
self.size, self.cmap_original)
self.processed = prepare_axes(axes_processed, 'Processed',
self.size, self.cmap_processed)
self.axes_processed = axes_processed
self.update_info()
super().__init__(self.figure, interval=50, blit=True)
plt.show()
def connect_event_handlers(self):
"""Connects event handlers to the figure."""
self.figure.canvas.mpl_connect('close_event', self.evt_release)
self.figure.canvas.mpl_connect('pause_event', self.evt_toggle_pause)
def evt_release(self, *args): # pylint: disable=unused-argument
"""Tries to release the capture."""
try:
self.capture.release()
except AttributeError:
pass
def evt_toggle_pause(self, *args): # pylint: disable=unused-argument
"""Pauses and resumes the video source."""
if self.event_source._timer is None: # noqa: e501 pylint: disable=protected-access
self.event_source.start()
else:
self.event_source.stop()
def print_info(self, capture):
"""Prints information about the unprocessed image.
Reads one frame from the source to determine image colors, dimensions
and data types.
Args:
capture: the source to read from.
"""
self.frame_offset += 1
ret, frame = capture.read()
if ret:
print('Capture Information')
print('\tDimensions (HxW): {}x{}'.format(*frame.shape[0:2]))
print('\tColor channels: {}'.format(frame.shape[2] if
len(frame.shape) > 2 else 1))
print('\tColor range: {}-{}'.format(np.min(frame),
np.max(frame)))
print('\tdtype: {}'.format(frame.dtype))
else:
print('No source found.')
def determine_size(self, capture):
"""Determines the height and width of the image source.
If no dimensions are available, this method defaults to a resolution of
640x480, thus returns (480, 640).
If capture has a get method it is assumed to understand
`cv2.CAP_PROP_FRAME_WIDTH` and `cv2.CAP_PROP_FRAME_HEIGHT` to get the
information. Otherwise it reads one frame from the source to determine
image dimensions.
Args:
capture: the source to read from.
Returns:
A tuple containing integers of height and width (simple casts).
"""
width = 640
height = 480
if capture and hasattr(capture, 'get'):
width = capture.get(cv2.CAP_PROP_FRAME_WIDTH)
height = capture.get(cv2.CAP_PROP_FRAME_HEIGHT)
else:
self.frame_offset += 1
ret, frame = capture.read()
if ret:
width = frame.shape[1]
height = frame.shape[0]
return (int(height), int(width))
def new_frame_seq(self):
"""Returns an endless frame counter.
Starts at self.frame_offset, in case some methods had to read frames
beforehand to gather information.
This function is called by TimedAnimation.
Returns:
an endless frame count
"""
return itertools.count(self.frame_offset)
def _init_draw(self):
"""Initializes the drawing of the frames by setting the images to
random colors.
This function is called by TimedAnimation.
"""
if self.original is not None:
self.original.set_data(np.random.random((10, 10, 3)))
self.processed.set_data(np.random.random((10, 10, 3)))
def read_frame(self):
"""Reads a frame and converts the color if needed.
In case no frame is available, i.e. self.capture.read() returns False
as the first return value, the event_source of the TimedAnimation is
stopped, and if possible the capture source released.
Returns:
None if stopped, otherwise the color converted source image.
"""
ret, frame = self.capture.read()
if not ret:
self.event_source.stop()
try:
self.capture.release()
except AttributeError:
# has no release method, thus just pass
pass
return None
if self.convert_color != -1 and is_color_image(frame):
return cv2.cvtColor(frame, self.convert_color)
return frame
def process_frame(self, frame):
"""Processes a frame with the user specified function.
Args:
frame: The input frame.
Returns:
The processed frame.
"""
return self.function(frame)
def _draw_frame(self, framedata):
"""Reads, processes and draws the frames.
If needed for color maps, conversions to gray scale are performed. In
case the images are no color images and no custom color maps are
defined, the colormap `gray` is applied.
This function is called by TimedAnimation.
Args:
framedata: The frame data.
"""
original = self.read_frame()
if original is None:
self.update_info(self.info_string(message='Finished.',
frame=framedata))
return
if self.original is not None:
processed = self.process_frame(original.copy())
if self.cmap_original is not None:
original = to_gray(original)
elif not is_color_image(original):
self.original.set_cmap('gray')
self.original.set_data(original)
else:
processed = self.process_frame(original)
if self.cmap_processed is not None:
processed = to_gray(processed)
elif not is_color_image(processed):
self.processed.set_cmap('gray')
if self.annotations:
self.annotate(framedata)
self.processed.set_data(processed)
self.update_info(self.info_string(frame=framedata))
def update_info(self, custom=None):
"""Updates the figure's suptitle.
Calls self.info_string() unless custom is provided.
Args:
custom: Overwrite it with this string, unless None.
"""
self.figure.suptitle(self.info_string() if custom is None else custom)
def info_string(self, size=None, message='', frame=-1):
"""Returns information about the stream.
Generates a string containing size, frame number, and info messages.
Omits unnecessary information (e.g. empty messages and frame -1).
This method is primarily used to update the suptitle of the plot
figure.
Returns:
An info string.
"""
info = []
if size is not None:
info.append('Size: {1}x{0}'.format(*size))
elif self.size is not None:
info.append('Size: {1}x{0}'.format(*self.size))
if frame >= 0:
info.append('Frame: {}'.format(frame))
if message != '':
info.append('{}'.format(message))
return ' '.join(info)
|
shoeffner/cvloop | cvloop/cvloop.py | cvloop._draw_frame | python | def _draw_frame(self, framedata):
original = self.read_frame()
if original is None:
self.update_info(self.info_string(message='Finished.',
frame=framedata))
return
if self.original is not None:
processed = self.process_frame(original.copy())
if self.cmap_original is not None:
original = to_gray(original)
elif not is_color_image(original):
self.original.set_cmap('gray')
self.original.set_data(original)
else:
processed = self.process_frame(original)
if self.cmap_processed is not None:
processed = to_gray(processed)
elif not is_color_image(processed):
self.processed.set_cmap('gray')
if self.annotations:
self.annotate(framedata)
self.processed.set_data(processed)
self.update_info(self.info_string(frame=framedata)) | Reads, processes and draws the frames.
If needed for color maps, conversions to gray scale are performed. In
case the images are no color images and no custom color maps are
defined, the colormap `gray` is applied.
This function is called by TimedAnimation.
Args:
framedata: The frame data. | train | https://github.com/shoeffner/cvloop/blob/3ddd311e9b679d16c8fd36779931380374de343c/cvloop/cvloop.py#L405-L444 | null | class cvloop(animation.TimedAnimation): # noqa: E501 pylint: disable=invalid-name, too-many-instance-attributes
"""Uses a TimedAnimation to efficiently render video sources with blit."""
def __init__(self, source=None, function=lambda x: x, *,
side_by_side=False, convert_color=cv2.COLOR_BGR2RGB,
cmaps=None, print_info=False, annotations=None,
annotations_default={'shape': 'RECT',
'color': '#228B22',
'line': 2,
'size': (20, 20)}):
"""Runs a video loop for the specified source and modifies the stream
with the function.
The source can either be an integer for a webcam device, a string to
load a video file or a VideoCapture object. In the last case, the
capture object will not be released by this function.
The function takes in a frame and returns the modified frame. The
default value just passes the value through, it is equivalent to the
identity function.
If side_by_side is True, the input as well as the modified image are
shown side by side, otherwise only the output is shown.
If convert_color can be any value for cv2.cvtColor, e.g.
cv2.COLOR_BGR2RGB. If it is -1, no color conversion is performed,
otherwise a color conversion using cv2.cvtColor is performed before the
image is passed to the function.
Args:
source: The video source; ints for webcams/devices, a string to
load a video file. To fine tune a video source, it is
possible to pass a VideoCapture object directly.
(Default: 0)
function: The modification function.
(Default: identity function `lambda x: x`)
side_by_side: If True, both images are shown, the original and the
modified image.
(Default: False)
convert_color: Converts the image with the given value using
`cv2.cvtColor`, unless value is -1.
(Default: `cv2.COLOR_BGR2RGB`)
cmaps: If None, the plot function makes guesses about what color
maps to use (if at all). If a single value, that color map
is used for all plots (e.g. cmaps='gray'). If cmaps is a
tuple, the first value is used on the original image, the
second value for the modified image. If cmaps is a tuple,
None-entries are ignored and result in the normal guessing.
print_info: If True, prints some info about the resource:
dimensions, color channels, data type. Skips the output
of one frame.
annotations: A list or tuple of annotations. Each annotation is a
list or tuple in turn of this format:
[x, y, frame, options]
x: the x coordinate of the center
y: the y coordinate of the center
frame: the frame number
options: A dictionary. This is optional (leaving the
list with only three elements). Allows the
following keys:
shape: 'RECT' or 'CIRC' (rectangle, circle)
line: linewidth
color: RGB tuple, gray scalar or html hex-string
size: radius for CIRC, (width, height) for RECT
annotations_default: A default format, that will be used if no
specific format is given for an annotation. If no format is
specified the following defaults are used:
shape: 'RECT',
color: '#228B22', (forestgreen)
line: 2,
size: (20, 20)
"""
if plt.get_backend() in (
'module://ipykernel.pylab.backend_inline',
'nbAgg'):
# Calls IPython's magic variables
for conf in get_ipython().configurables:
if isinstance(conf, PylabMagics):
conf.matplotlib(line='notebook')
conf.matplotlib(line='notebook')
if source is not None:
if isinstance(source, type(cv2.VideoCapture())) \
or hasattr(source, 'read'):
self.capture = source
else:
self.capture = cv2.VideoCapture(source)
else:
self.capture = cv2.VideoCapture(0)
self.figure = plt.figure()
self.connect_event_handlers()
self.function = function
self.convert_color = convert_color
self.annotations = (None if not annotations else
sorted(annotations, key=lambda a: a[2]))
self.annotations_default = annotations_default
self.annotation_artists = []
self.original = None
self.processed = None
self.frame_offset = 0
try:
self.cmap_original = cmaps if isinstance(cmaps, str) else cmaps[0]
except (IndexError, TypeError):
self.cmap_original = None
try:
self.cmap_processed = cmaps if isinstance(cmaps, str) else cmaps[1]
except (IndexError, TypeError):
self.cmap_processed = None
if side_by_side:
axes_original = self.figure.add_subplot(1, 2, 1)
axes_processed = self.figure.add_subplot(1, 2, 2)
else:
axes_original = None
axes_processed = self.figure.add_subplot(1, 1, 1)
if print_info:
self.print_info(self.capture)
self.size = self.determine_size(self.capture)
self.original = prepare_axes(axes_original, 'Original',
self.size, self.cmap_original)
self.processed = prepare_axes(axes_processed, 'Processed',
self.size, self.cmap_processed)
self.axes_processed = axes_processed
self.update_info()
super().__init__(self.figure, interval=50, blit=True)
plt.show()
def connect_event_handlers(self):
"""Connects event handlers to the figure."""
self.figure.canvas.mpl_connect('close_event', self.evt_release)
self.figure.canvas.mpl_connect('pause_event', self.evt_toggle_pause)
def evt_release(self, *args): # pylint: disable=unused-argument
"""Tries to release the capture."""
try:
self.capture.release()
except AttributeError:
pass
def evt_toggle_pause(self, *args): # pylint: disable=unused-argument
"""Pauses and resumes the video source."""
if self.event_source._timer is None: # noqa: e501 pylint: disable=protected-access
self.event_source.start()
else:
self.event_source.stop()
def print_info(self, capture):
"""Prints information about the unprocessed image.
Reads one frame from the source to determine image colors, dimensions
and data types.
Args:
capture: the source to read from.
"""
self.frame_offset += 1
ret, frame = capture.read()
if ret:
print('Capture Information')
print('\tDimensions (HxW): {}x{}'.format(*frame.shape[0:2]))
print('\tColor channels: {}'.format(frame.shape[2] if
len(frame.shape) > 2 else 1))
print('\tColor range: {}-{}'.format(np.min(frame),
np.max(frame)))
print('\tdtype: {}'.format(frame.dtype))
else:
print('No source found.')
def determine_size(self, capture):
"""Determines the height and width of the image source.
If no dimensions are available, this method defaults to a resolution of
640x480, thus returns (480, 640).
If capture has a get method it is assumed to understand
`cv2.CAP_PROP_FRAME_WIDTH` and `cv2.CAP_PROP_FRAME_HEIGHT` to get the
information. Otherwise it reads one frame from the source to determine
image dimensions.
Args:
capture: the source to read from.
Returns:
A tuple containing integers of height and width (simple casts).
"""
width = 640
height = 480
if capture and hasattr(capture, 'get'):
width = capture.get(cv2.CAP_PROP_FRAME_WIDTH)
height = capture.get(cv2.CAP_PROP_FRAME_HEIGHT)
else:
self.frame_offset += 1
ret, frame = capture.read()
if ret:
width = frame.shape[1]
height = frame.shape[0]
return (int(height), int(width))
def new_frame_seq(self):
"""Returns an endless frame counter.
Starts at self.frame_offset, in case some methods had to read frames
beforehand to gather information.
This function is called by TimedAnimation.
Returns:
an endless frame count
"""
return itertools.count(self.frame_offset)
def _init_draw(self):
"""Initializes the drawing of the frames by setting the images to
random colors.
This function is called by TimedAnimation.
"""
if self.original is not None:
self.original.set_data(np.random.random((10, 10, 3)))
self.processed.set_data(np.random.random((10, 10, 3)))
def read_frame(self):
"""Reads a frame and converts the color if needed.
In case no frame is available, i.e. self.capture.read() returns False
as the first return value, the event_source of the TimedAnimation is
stopped, and if possible the capture source released.
Returns:
None if stopped, otherwise the color converted source image.
"""
ret, frame = self.capture.read()
if not ret:
self.event_source.stop()
try:
self.capture.release()
except AttributeError:
# has no release method, thus just pass
pass
return None
if self.convert_color != -1 and is_color_image(frame):
return cv2.cvtColor(frame, self.convert_color)
return frame
def process_frame(self, frame):
"""Processes a frame with the user specified function.
Args:
frame: The input frame.
Returns:
The processed frame.
"""
return self.function(frame)
def annotate(self, framedata):
"""Annotates the processed axis with given annotations for
the provided framedata.
Args:
framedata: The current frame number.
"""
for artist in self.annotation_artists:
artist.remove()
self.annotation_artists = []
for annotation in self.annotations:
if annotation[2] > framedata:
return
if annotation[2] == framedata:
pos = annotation[0:2]
shape = self.annotations_default['shape']
color = self.annotations_default['color']
size = self.annotations_default['size']
line = self.annotations_default['line']
if len(annotation) > 3:
shape = annotation[3].get('shape', shape)
color = annotation[3].get('color', color)
size = annotation[3].get('size', size)
line = annotation[3].get('line', line)
if shape == 'CIRC' and hasattr(size, '__len__'):
size = 30
if not hasattr(color, '__len__'):
color = (color,) * 3
if shape == 'RECT':
patch = patches.Rectangle((pos[0] - size[0] // 2,
pos[1] - size[1] // 2),
size[0], size[1], fill=False,
lw=line, fc='none', ec=color)
elif shape == 'CIRC':
patch = patches.CirclePolygon(pos, radius=size, fc='none',
ec=color, lw=line)
self.annotation_artists.append(patch)
self.axes_processed.add_artist(self.annotation_artists[-1])
def update_info(self, custom=None):
"""Updates the figure's suptitle.
Calls self.info_string() unless custom is provided.
Args:
custom: Overwrite it with this string, unless None.
"""
self.figure.suptitle(self.info_string() if custom is None else custom)
def info_string(self, size=None, message='', frame=-1):
"""Returns information about the stream.
Generates a string containing size, frame number, and info messages.
Omits unnecessary information (e.g. empty messages and frame -1).
This method is primarily used to update the suptitle of the plot
figure.
Returns:
An info string.
"""
info = []
if size is not None:
info.append('Size: {1}x{0}'.format(*size))
elif self.size is not None:
info.append('Size: {1}x{0}'.format(*self.size))
if frame >= 0:
info.append('Frame: {}'.format(frame))
if message != '':
info.append('{}'.format(message))
return ' '.join(info)
|
shoeffner/cvloop | cvloop/cvloop.py | cvloop.update_info | python | def update_info(self, custom=None):
self.figure.suptitle(self.info_string() if custom is None else custom) | Updates the figure's suptitle.
Calls self.info_string() unless custom is provided.
Args:
custom: Overwrite it with this string, unless None. | train | https://github.com/shoeffner/cvloop/blob/3ddd311e9b679d16c8fd36779931380374de343c/cvloop/cvloop.py#L446-L454 | null | class cvloop(animation.TimedAnimation): # noqa: E501 pylint: disable=invalid-name, too-many-instance-attributes
"""Uses a TimedAnimation to efficiently render video sources with blit."""
def __init__(self, source=None, function=lambda x: x, *,
side_by_side=False, convert_color=cv2.COLOR_BGR2RGB,
cmaps=None, print_info=False, annotations=None,
annotations_default={'shape': 'RECT',
'color': '#228B22',
'line': 2,
'size': (20, 20)}):
"""Runs a video loop for the specified source and modifies the stream
with the function.
The source can either be an integer for a webcam device, a string to
load a video file or a VideoCapture object. In the last case, the
capture object will not be released by this function.
The function takes in a frame and returns the modified frame. The
default value just passes the value through, it is equivalent to the
identity function.
If side_by_side is True, the input as well as the modified image are
shown side by side, otherwise only the output is shown.
If convert_color can be any value for cv2.cvtColor, e.g.
cv2.COLOR_BGR2RGB. If it is -1, no color conversion is performed,
otherwise a color conversion using cv2.cvtColor is performed before the
image is passed to the function.
Args:
source: The video source; ints for webcams/devices, a string to
load a video file. To fine tune a video source, it is
possible to pass a VideoCapture object directly.
(Default: 0)
function: The modification function.
(Default: identity function `lambda x: x`)
side_by_side: If True, both images are shown, the original and the
modified image.
(Default: False)
convert_color: Converts the image with the given value using
`cv2.cvtColor`, unless value is -1.
(Default: `cv2.COLOR_BGR2RGB`)
cmaps: If None, the plot function makes guesses about what color
maps to use (if at all). If a single value, that color map
is used for all plots (e.g. cmaps='gray'). If cmaps is a
tuple, the first value is used on the original image, the
second value for the modified image. If cmaps is a tuple,
None-entries are ignored and result in the normal guessing.
print_info: If True, prints some info about the resource:
dimensions, color channels, data type. Skips the output
of one frame.
annotations: A list or tuple of annotations. Each annotation is a
list or tuple in turn of this format:
[x, y, frame, options]
x: the x coordinate of the center
y: the y coordinate of the center
frame: the frame number
options: A dictionary. This is optional (leaving the
list with only three elements). Allows the
following keys:
shape: 'RECT' or 'CIRC' (rectangle, circle)
line: linewidth
color: RGB tuple, gray scalar or html hex-string
size: radius for CIRC, (width, height) for RECT
annotations_default: A default format, that will be used if no
specific format is given for an annotation. If no format is
specified the following defaults are used:
shape: 'RECT',
color: '#228B22', (forestgreen)
line: 2,
size: (20, 20)
"""
if plt.get_backend() in (
'module://ipykernel.pylab.backend_inline',
'nbAgg'):
# Calls IPython's magic variables
for conf in get_ipython().configurables:
if isinstance(conf, PylabMagics):
conf.matplotlib(line='notebook')
conf.matplotlib(line='notebook')
if source is not None:
if isinstance(source, type(cv2.VideoCapture())) \
or hasattr(source, 'read'):
self.capture = source
else:
self.capture = cv2.VideoCapture(source)
else:
self.capture = cv2.VideoCapture(0)
self.figure = plt.figure()
self.connect_event_handlers()
self.function = function
self.convert_color = convert_color
self.annotations = (None if not annotations else
sorted(annotations, key=lambda a: a[2]))
self.annotations_default = annotations_default
self.annotation_artists = []
self.original = None
self.processed = None
self.frame_offset = 0
try:
self.cmap_original = cmaps if isinstance(cmaps, str) else cmaps[0]
except (IndexError, TypeError):
self.cmap_original = None
try:
self.cmap_processed = cmaps if isinstance(cmaps, str) else cmaps[1]
except (IndexError, TypeError):
self.cmap_processed = None
if side_by_side:
axes_original = self.figure.add_subplot(1, 2, 1)
axes_processed = self.figure.add_subplot(1, 2, 2)
else:
axes_original = None
axes_processed = self.figure.add_subplot(1, 1, 1)
if print_info:
self.print_info(self.capture)
self.size = self.determine_size(self.capture)
self.original = prepare_axes(axes_original, 'Original',
self.size, self.cmap_original)
self.processed = prepare_axes(axes_processed, 'Processed',
self.size, self.cmap_processed)
self.axes_processed = axes_processed
self.update_info()
super().__init__(self.figure, interval=50, blit=True)
plt.show()
def connect_event_handlers(self):
"""Connects event handlers to the figure."""
self.figure.canvas.mpl_connect('close_event', self.evt_release)
self.figure.canvas.mpl_connect('pause_event', self.evt_toggle_pause)
def evt_release(self, *args): # pylint: disable=unused-argument
"""Tries to release the capture."""
try:
self.capture.release()
except AttributeError:
pass
def evt_toggle_pause(self, *args): # pylint: disable=unused-argument
"""Pauses and resumes the video source."""
if self.event_source._timer is None: # noqa: e501 pylint: disable=protected-access
self.event_source.start()
else:
self.event_source.stop()
def print_info(self, capture):
"""Prints information about the unprocessed image.
Reads one frame from the source to determine image colors, dimensions
and data types.
Args:
capture: the source to read from.
"""
self.frame_offset += 1
ret, frame = capture.read()
if ret:
print('Capture Information')
print('\tDimensions (HxW): {}x{}'.format(*frame.shape[0:2]))
print('\tColor channels: {}'.format(frame.shape[2] if
len(frame.shape) > 2 else 1))
print('\tColor range: {}-{}'.format(np.min(frame),
np.max(frame)))
print('\tdtype: {}'.format(frame.dtype))
else:
print('No source found.')
def determine_size(self, capture):
"""Determines the height and width of the image source.
If no dimensions are available, this method defaults to a resolution of
640x480, thus returns (480, 640).
If capture has a get method it is assumed to understand
`cv2.CAP_PROP_FRAME_WIDTH` and `cv2.CAP_PROP_FRAME_HEIGHT` to get the
information. Otherwise it reads one frame from the source to determine
image dimensions.
Args:
capture: the source to read from.
Returns:
A tuple containing integers of height and width (simple casts).
"""
width = 640
height = 480
if capture and hasattr(capture, 'get'):
width = capture.get(cv2.CAP_PROP_FRAME_WIDTH)
height = capture.get(cv2.CAP_PROP_FRAME_HEIGHT)
else:
self.frame_offset += 1
ret, frame = capture.read()
if ret:
width = frame.shape[1]
height = frame.shape[0]
return (int(height), int(width))
def new_frame_seq(self):
"""Returns an endless frame counter.
Starts at self.frame_offset, in case some methods had to read frames
beforehand to gather information.
This function is called by TimedAnimation.
Returns:
an endless frame count
"""
return itertools.count(self.frame_offset)
def _init_draw(self):
"""Initializes the drawing of the frames by setting the images to
random colors.
This function is called by TimedAnimation.
"""
if self.original is not None:
self.original.set_data(np.random.random((10, 10, 3)))
self.processed.set_data(np.random.random((10, 10, 3)))
def read_frame(self):
"""Reads a frame and converts the color if needed.
In case no frame is available, i.e. self.capture.read() returns False
as the first return value, the event_source of the TimedAnimation is
stopped, and if possible the capture source released.
Returns:
None if stopped, otherwise the color converted source image.
"""
ret, frame = self.capture.read()
if not ret:
self.event_source.stop()
try:
self.capture.release()
except AttributeError:
# has no release method, thus just pass
pass
return None
if self.convert_color != -1 and is_color_image(frame):
return cv2.cvtColor(frame, self.convert_color)
return frame
def process_frame(self, frame):
"""Processes a frame with the user specified function.
Args:
frame: The input frame.
Returns:
The processed frame.
"""
return self.function(frame)
def annotate(self, framedata):
"""Annotates the processed axis with given annotations for
the provided framedata.
Args:
framedata: The current frame number.
"""
for artist in self.annotation_artists:
artist.remove()
self.annotation_artists = []
for annotation in self.annotations:
if annotation[2] > framedata:
return
if annotation[2] == framedata:
pos = annotation[0:2]
shape = self.annotations_default['shape']
color = self.annotations_default['color']
size = self.annotations_default['size']
line = self.annotations_default['line']
if len(annotation) > 3:
shape = annotation[3].get('shape', shape)
color = annotation[3].get('color', color)
size = annotation[3].get('size', size)
line = annotation[3].get('line', line)
if shape == 'CIRC' and hasattr(size, '__len__'):
size = 30
if not hasattr(color, '__len__'):
color = (color,) * 3
if shape == 'RECT':
patch = patches.Rectangle((pos[0] - size[0] // 2,
pos[1] - size[1] // 2),
size[0], size[1], fill=False,
lw=line, fc='none', ec=color)
elif shape == 'CIRC':
patch = patches.CirclePolygon(pos, radius=size, fc='none',
ec=color, lw=line)
self.annotation_artists.append(patch)
self.axes_processed.add_artist(self.annotation_artists[-1])
def _draw_frame(self, framedata):
"""Reads, processes and draws the frames.
If needed for color maps, conversions to gray scale are performed. In
case the images are no color images and no custom color maps are
defined, the colormap `gray` is applied.
This function is called by TimedAnimation.
Args:
framedata: The frame data.
"""
original = self.read_frame()
if original is None:
self.update_info(self.info_string(message='Finished.',
frame=framedata))
return
if self.original is not None:
processed = self.process_frame(original.copy())
if self.cmap_original is not None:
original = to_gray(original)
elif not is_color_image(original):
self.original.set_cmap('gray')
self.original.set_data(original)
else:
processed = self.process_frame(original)
if self.cmap_processed is not None:
processed = to_gray(processed)
elif not is_color_image(processed):
self.processed.set_cmap('gray')
if self.annotations:
self.annotate(framedata)
self.processed.set_data(processed)
self.update_info(self.info_string(frame=framedata))
def info_string(self, size=None, message='', frame=-1):
"""Returns information about the stream.
Generates a string containing size, frame number, and info messages.
Omits unnecessary information (e.g. empty messages and frame -1).
This method is primarily used to update the suptitle of the plot
figure.
Returns:
An info string.
"""
info = []
if size is not None:
info.append('Size: {1}x{0}'.format(*size))
elif self.size is not None:
info.append('Size: {1}x{0}'.format(*self.size))
if frame >= 0:
info.append('Frame: {}'.format(frame))
if message != '':
info.append('{}'.format(message))
return ' '.join(info)
|
shoeffner/cvloop | cvloop/cvloop.py | cvloop.info_string | python | def info_string(self, size=None, message='', frame=-1):
info = []
if size is not None:
info.append('Size: {1}x{0}'.format(*size))
elif self.size is not None:
info.append('Size: {1}x{0}'.format(*self.size))
if frame >= 0:
info.append('Frame: {}'.format(frame))
if message != '':
info.append('{}'.format(message))
return ' '.join(info) | Returns information about the stream.
Generates a string containing size, frame number, and info messages.
Omits unnecessary information (e.g. empty messages and frame -1).
This method is primarily used to update the suptitle of the plot
figure.
Returns:
An info string. | train | https://github.com/shoeffner/cvloop/blob/3ddd311e9b679d16c8fd36779931380374de343c/cvloop/cvloop.py#L456-L477 | null | class cvloop(animation.TimedAnimation): # noqa: E501 pylint: disable=invalid-name, too-many-instance-attributes
"""Uses a TimedAnimation to efficiently render video sources with blit."""
def __init__(self, source=None, function=lambda x: x, *,
side_by_side=False, convert_color=cv2.COLOR_BGR2RGB,
cmaps=None, print_info=False, annotations=None,
annotations_default={'shape': 'RECT',
'color': '#228B22',
'line': 2,
'size': (20, 20)}):
"""Runs a video loop for the specified source and modifies the stream
with the function.
The source can either be an integer for a webcam device, a string to
load a video file or a VideoCapture object. In the last case, the
capture object will not be released by this function.
The function takes in a frame and returns the modified frame. The
default value just passes the value through, it is equivalent to the
identity function.
If side_by_side is True, the input as well as the modified image are
shown side by side, otherwise only the output is shown.
If convert_color can be any value for cv2.cvtColor, e.g.
cv2.COLOR_BGR2RGB. If it is -1, no color conversion is performed,
otherwise a color conversion using cv2.cvtColor is performed before the
image is passed to the function.
Args:
source: The video source; ints for webcams/devices, a string to
load a video file. To fine tune a video source, it is
possible to pass a VideoCapture object directly.
(Default: 0)
function: The modification function.
(Default: identity function `lambda x: x`)
side_by_side: If True, both images are shown, the original and the
modified image.
(Default: False)
convert_color: Converts the image with the given value using
`cv2.cvtColor`, unless value is -1.
(Default: `cv2.COLOR_BGR2RGB`)
cmaps: If None, the plot function makes guesses about what color
maps to use (if at all). If a single value, that color map
is used for all plots (e.g. cmaps='gray'). If cmaps is a
tuple, the first value is used on the original image, the
second value for the modified image. If cmaps is a tuple,
None-entries are ignored and result in the normal guessing.
print_info: If True, prints some info about the resource:
dimensions, color channels, data type. Skips the output
of one frame.
annotations: A list or tuple of annotations. Each annotation is a
list or tuple in turn of this format:
[x, y, frame, options]
x: the x coordinate of the center
y: the y coordinate of the center
frame: the frame number
options: A dictionary. This is optional (leaving the
list with only three elements). Allows the
following keys:
shape: 'RECT' or 'CIRC' (rectangle, circle)
line: linewidth
color: RGB tuple, gray scalar or html hex-string
size: radius for CIRC, (width, height) for RECT
annotations_default: A default format, that will be used if no
specific format is given for an annotation. If no format is
specified the following defaults are used:
shape: 'RECT',
color: '#228B22', (forestgreen)
line: 2,
size: (20, 20)
"""
if plt.get_backend() in (
'module://ipykernel.pylab.backend_inline',
'nbAgg'):
# Calls IPython's magic variables
for conf in get_ipython().configurables:
if isinstance(conf, PylabMagics):
conf.matplotlib(line='notebook')
conf.matplotlib(line='notebook')
if source is not None:
if isinstance(source, type(cv2.VideoCapture())) \
or hasattr(source, 'read'):
self.capture = source
else:
self.capture = cv2.VideoCapture(source)
else:
self.capture = cv2.VideoCapture(0)
self.figure = plt.figure()
self.connect_event_handlers()
self.function = function
self.convert_color = convert_color
self.annotations = (None if not annotations else
sorted(annotations, key=lambda a: a[2]))
self.annotations_default = annotations_default
self.annotation_artists = []
self.original = None
self.processed = None
self.frame_offset = 0
try:
self.cmap_original = cmaps if isinstance(cmaps, str) else cmaps[0]
except (IndexError, TypeError):
self.cmap_original = None
try:
self.cmap_processed = cmaps if isinstance(cmaps, str) else cmaps[1]
except (IndexError, TypeError):
self.cmap_processed = None
if side_by_side:
axes_original = self.figure.add_subplot(1, 2, 1)
axes_processed = self.figure.add_subplot(1, 2, 2)
else:
axes_original = None
axes_processed = self.figure.add_subplot(1, 1, 1)
if print_info:
self.print_info(self.capture)
self.size = self.determine_size(self.capture)
self.original = prepare_axes(axes_original, 'Original',
self.size, self.cmap_original)
self.processed = prepare_axes(axes_processed, 'Processed',
self.size, self.cmap_processed)
self.axes_processed = axes_processed
self.update_info()
super().__init__(self.figure, interval=50, blit=True)
plt.show()
def connect_event_handlers(self):
"""Connects event handlers to the figure."""
self.figure.canvas.mpl_connect('close_event', self.evt_release)
self.figure.canvas.mpl_connect('pause_event', self.evt_toggle_pause)
def evt_release(self, *args): # pylint: disable=unused-argument
"""Tries to release the capture."""
try:
self.capture.release()
except AttributeError:
pass
def evt_toggle_pause(self, *args): # pylint: disable=unused-argument
"""Pauses and resumes the video source."""
if self.event_source._timer is None: # noqa: e501 pylint: disable=protected-access
self.event_source.start()
else:
self.event_source.stop()
def print_info(self, capture):
"""Prints information about the unprocessed image.
Reads one frame from the source to determine image colors, dimensions
and data types.
Args:
capture: the source to read from.
"""
self.frame_offset += 1
ret, frame = capture.read()
if ret:
print('Capture Information')
print('\tDimensions (HxW): {}x{}'.format(*frame.shape[0:2]))
print('\tColor channels: {}'.format(frame.shape[2] if
len(frame.shape) > 2 else 1))
print('\tColor range: {}-{}'.format(np.min(frame),
np.max(frame)))
print('\tdtype: {}'.format(frame.dtype))
else:
print('No source found.')
def determine_size(self, capture):
"""Determines the height and width of the image source.
If no dimensions are available, this method defaults to a resolution of
640x480, thus returns (480, 640).
If capture has a get method it is assumed to understand
`cv2.CAP_PROP_FRAME_WIDTH` and `cv2.CAP_PROP_FRAME_HEIGHT` to get the
information. Otherwise it reads one frame from the source to determine
image dimensions.
Args:
capture: the source to read from.
Returns:
A tuple containing integers of height and width (simple casts).
"""
width = 640
height = 480
if capture and hasattr(capture, 'get'):
width = capture.get(cv2.CAP_PROP_FRAME_WIDTH)
height = capture.get(cv2.CAP_PROP_FRAME_HEIGHT)
else:
self.frame_offset += 1
ret, frame = capture.read()
if ret:
width = frame.shape[1]
height = frame.shape[0]
return (int(height), int(width))
def new_frame_seq(self):
"""Returns an endless frame counter.
Starts at self.frame_offset, in case some methods had to read frames
beforehand to gather information.
This function is called by TimedAnimation.
Returns:
an endless frame count
"""
return itertools.count(self.frame_offset)
def _init_draw(self):
"""Initializes the drawing of the frames by setting the images to
random colors.
This function is called by TimedAnimation.
"""
if self.original is not None:
self.original.set_data(np.random.random((10, 10, 3)))
self.processed.set_data(np.random.random((10, 10, 3)))
def read_frame(self):
"""Reads a frame and converts the color if needed.
In case no frame is available, i.e. self.capture.read() returns False
as the first return value, the event_source of the TimedAnimation is
stopped, and if possible the capture source released.
Returns:
None if stopped, otherwise the color converted source image.
"""
ret, frame = self.capture.read()
if not ret:
self.event_source.stop()
try:
self.capture.release()
except AttributeError:
# has no release method, thus just pass
pass
return None
if self.convert_color != -1 and is_color_image(frame):
return cv2.cvtColor(frame, self.convert_color)
return frame
def process_frame(self, frame):
"""Processes a frame with the user specified function.
Args:
frame: The input frame.
Returns:
The processed frame.
"""
return self.function(frame)
def annotate(self, framedata):
"""Annotates the processed axis with given annotations for
the provided framedata.
Args:
framedata: The current frame number.
"""
for artist in self.annotation_artists:
artist.remove()
self.annotation_artists = []
for annotation in self.annotations:
if annotation[2] > framedata:
return
if annotation[2] == framedata:
pos = annotation[0:2]
shape = self.annotations_default['shape']
color = self.annotations_default['color']
size = self.annotations_default['size']
line = self.annotations_default['line']
if len(annotation) > 3:
shape = annotation[3].get('shape', shape)
color = annotation[3].get('color', color)
size = annotation[3].get('size', size)
line = annotation[3].get('line', line)
if shape == 'CIRC' and hasattr(size, '__len__'):
size = 30
if not hasattr(color, '__len__'):
color = (color,) * 3
if shape == 'RECT':
patch = patches.Rectangle((pos[0] - size[0] // 2,
pos[1] - size[1] // 2),
size[0], size[1], fill=False,
lw=line, fc='none', ec=color)
elif shape == 'CIRC':
patch = patches.CirclePolygon(pos, radius=size, fc='none',
ec=color, lw=line)
self.annotation_artists.append(patch)
self.axes_processed.add_artist(self.annotation_artists[-1])
def _draw_frame(self, framedata):
"""Reads, processes and draws the frames.
If needed for color maps, conversions to gray scale are performed. In
case the images are no color images and no custom color maps are
defined, the colormap `gray` is applied.
This function is called by TimedAnimation.
Args:
framedata: The frame data.
"""
original = self.read_frame()
if original is None:
self.update_info(self.info_string(message='Finished.',
frame=framedata))
return
if self.original is not None:
processed = self.process_frame(original.copy())
if self.cmap_original is not None:
original = to_gray(original)
elif not is_color_image(original):
self.original.set_cmap('gray')
self.original.set_data(original)
else:
processed = self.process_frame(original)
if self.cmap_processed is not None:
processed = to_gray(processed)
elif not is_color_image(processed):
self.processed.set_cmap('gray')
if self.annotations:
self.annotate(framedata)
self.processed.set_data(processed)
self.update_info(self.info_string(frame=framedata))
def update_info(self, custom=None):
"""Updates the figure's suptitle.
Calls self.info_string() unless custom is provided.
Args:
custom: Overwrite it with this string, unless None.
"""
self.figure.suptitle(self.info_string() if custom is None else custom)
|
shoeffner/cvloop | tools/sanitize_ipynb.py | main | python | def main():
with open(sys.argv[1], 'r') as nbfile:
notebook = json.load(nbfile)
# remove kernelspec (venvs)
try:
del notebook['metadata']['kernelspec']
except KeyError:
pass
# remove outputs and metadata, set execution counts to None
for cell in notebook['cells']:
try:
if cell['cell_type'] == 'code':
cell['outputs'] = []
cell['execution_count'] = None
cell['metadata'] = {}
except KeyError:
pass
with open(sys.argv[1], 'w') as nbfile:
json.dump(notebook, nbfile, indent=1) | Sanitizes the loaded *.ipynb. | train | https://github.com/shoeffner/cvloop/blob/3ddd311e9b679d16c8fd36779931380374de343c/tools/sanitize_ipynb.py#L8-L30 | null | """Removes unnecessary information from an *.ipynb to make it
easier to track it with git."""
import json
import sys
if __name__ == '__main__':
main()
|
kontron/python-aardvark | pyaardvark/aardvark.py | find_devices | python | def find_devices():
# first fetch the number of attached devices, so we can create a buffer
# with the exact amount of entries. api expects array of u16
num_devices = api.py_aa_find_devices(0, array.array('H'))
_raise_error_if_negative(num_devices)
# return an empty list if no device is connected
if num_devices == 0:
return list()
ports = array.array('H', (0,) * num_devices)
unique_ids = array.array('I', (0,) * num_devices)
num_devices = api.py_aa_find_devices_ext(len(ports), len(unique_ids),
ports, unique_ids)
_raise_error_if_negative(num_devices)
if num_devices == 0:
return list()
del ports[num_devices:]
del unique_ids[num_devices:]
devices = list()
for port, uid in zip(ports, unique_ids):
in_use = bool(port & PORT_NOT_FREE)
dev = dict(
port=port & ~PORT_NOT_FREE,
serial_number=_unique_id_str(uid),
in_use=in_use)
devices.append(dev)
return devices | Return a list of dictionaries. Each dictionary represents one device.
The dictionary contains the following keys: port, unique_id and in_use.
`port` can be used with :func:`open`. `serial_number` is the serial number
of the device (and can also be used with :func:`open`) and `in_use`
indicates whether the device was opened before and can currently not be
opened.
.. note::
There is no guarantee, that the returned information is still valid
when you open the device. Esp. if you open a device by the port, the
unique_id may change because you've just opened another device. Eg. it
may be disconnected from the machine after you call :func:`find_devices`
but before you call :func:`open`.
To open a device by its serial number, you should use the :func:`open`
with the `serial_number` parameter. | train | https://github.com/kontron/python-aardvark/blob/9827f669fbdc5bceb98e7d08a294b4e4e455d0d5/pyaardvark/aardvark.py#L91-L141 | null | # Copyright (c) 2014-2018 Kontron Europe GmbH
# 2017 CAMCO Produktions- und Vertriebs-GmbH
#
# This library is free software; you can redistribute it and/or
# modify it under the terms of the GNU Lesser General Public
# License as published by the Free Software Foundation; either
# version 2.1 of the License, or (at your option) any later version.
#
# This library is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
# Lesser General Public License for more details.
#
# You should have received a copy of the GNU Lesser General Public
# License along with this library; if not, write to the Free Software
# Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
from builtins import bytes
import array
import logging
import sys
from .constants import *
if sys.platform.startswith('linux'):
try:
from .ext.linux32 import aardvark as api
except ImportError:
try:
from .ext.linux64 import aardvark as api
except ImportError:
api = None
elif sys.platform.startswith('win32'):
try:
from .ext.win32 import aardvark as api
except ImportError:
try:
from .ext.win64 import aardvark as api
except ImportError:
api = None
elif sys.platform.startswith('darwin'):
try:
from .ext.osx32 import aardvark as api
except ImportError:
try:
from .ext.osx64 import aardvark as api
except ImportError:
api = None
else:
api = None
if not api:
raise RuntimeError('Unable to find suitable binary interface. '
'Unsupported platform?')
log = logging.getLogger(__name__)
def _raise_error_if_negative(val):
"""Raises an :class:`IOError` if `val` is negative."""
if val < 0:
raise IOError(val, api.py_aa_status_string(val))
def status_string(code):
for k, v in globals().items():
if k.startswith('I2C_STATUS_') and v == code:
return k
return 'I2C_STATUS_UNKNOWN_STATUS'
def _raise_i2c_status_code_error_if_failure(code):
"""Raises an :class:`IOError` if `code` is not :data:`I2C_STATUS_OK`."""
if code != I2C_STATUS_OK:
raise IOError(code, status_string(code))
def _unique_id_str(unique_id):
id1 = unique_id / 1000000
id2 = unique_id % 1000000
return '%04d-%06d' % (id1, id2)
def _to_version_str(v):
return '%d.%02d' % (v >> 8, v & 0xff)
def api_version():
"""Returns the underlying C module (aardvark.so, aardvark.pyd) as a string.
It returns the same value as :attr:`Aardvark.api_version` but you don't
need to open a device.
"""
return _to_version_str(api.py_version() & 0xffff)
def open(port=None, serial_number=None):
"""Open an aardvark device and return an :class:`Aardvark` object. If the
device cannot be opened an :class:`IOError` is raised.
The `port` can be retrieved by :func:`find_devices`. Usually, the first
device is 0, the second 1, etc.
If you are using only one device, you can therefore omit the parameter
in which case 0 is used.
Another method to open a device is to use the serial number. You can either
find the number on the device itself or in the in the corresponding USB
property. The serial number is a string which looks like `NNNN-MMMMMMM`.
Raises an :class:`IOError` if the port (or serial number) does not exist,
is already connected or an incompatible device is found.
.. note::
There is a small chance that this function raises an :class:`IOError`
although the correct device is available and not opened. The
open-by-serial-number method works by scanning the devices. But as
explained in :func:`find_devices`, the returned information may be
outdated. Therefore, :func:`open` checks the serial number once the
device is opened and if it is not the expected one, raises
:class:`IOError`. No retry mechanism is implemented.
As long as nobody comes along with a better idea, this failure case is
up to the user.
"""
if port is None and serial_number is None:
dev = Aardvark()
elif serial_number is not None:
for d in find_devices():
if d['serial_number'] == serial_number:
break
else:
_raise_error_if_negative(ERR_UNABLE_TO_OPEN)
dev = Aardvark(d['port'])
# make sure we opened the correct device
if dev.unique_id_str() != serial_number:
dev.close()
_raise_error_if_negative(ERR_UNABLE_TO_OPEN)
else:
dev = Aardvark(port)
return dev
class Aardvark(object):
"""Represents an Aardvark device."""
BUFFER_SIZE = 65535
def __init__(self, port=0):
ret, ver = api.py_aa_open_ext(port)
_raise_error_if_negative(ret)
#: A handle which is used as the first paramter for all calls to the
#: underlying API.
self.handle = ret
# assign some useful names
version = dict(
software = ver[0],
firmware = ver[1],
hardware = ver[2],
sw_req_by_fw = ver[3],
fw_req_by_sw = ver[4],
api_req_by_sw = ver[5],
)
#: Hardware revision of the host adapter as a string. The format is
#: ``M.NN`` where `M` is the major number and `NN` the zero padded
#: minor number.
self.hardware_revision = _to_version_str(version['hardware'])
#: Firmware version of the host adapter as a string. See
#: :attr:`hardware_revision` for more information on the format.
self.firmware_version = _to_version_str(version['firmware'])
#: Version of underlying C module (aardvark.so, aardvark.pyd) as a
#: string. See :attr:`hardware_revision` for more information on the
#: format.
self.api_version = _to_version_str(version['software'])
# version checks
if version['firmware'] < version['fw_req_by_sw']:
log.debug('The API requires a firmware version >= %s, but the '
'device has version %s',
_to_version_str(version['fw_req_by_sw']),
_to_version_str(version['firmware']))
ret = ERR_INCOMPATIBLE_DEVICE
elif version['software'] < version['sw_req_by_fw']:
log.debug('The firmware requires an API version >= %s, but the '
'API has version %s',
_to_version_str(version['sw_req_by_fw']),
_to_version_str(version['software']))
ret = ERR_INCOMPATIBLE_LIBRARY
_raise_error_if_negative(ret)
# Initialize shadow variables
self._i2c_slave_response = None
def __enter__(self):
return self
def __exit__(self, type, value, tb):
self.close()
return False
def close(self):
"""Close the device."""
api.py_aa_close(self.handle)
self.handle = None
def unique_id(self):
"""Return the unique identifier of the device. The identifier is the
serial number you can find on the adapter without the dash. Eg. the
serial number 0012-345678 would be 12345678.
"""
return api.py_aa_unique_id(self.handle)
def unique_id_str(self):
"""Return the unique identifier. But unlike :func:`unique_id`, the ID
is returned as a string which has the format NNNN-MMMMMMM.
"""
return _unique_id_str(self.unique_id())
def _interface_configuration(self, value):
ret = api.py_aa_configure(self.handle, value)
_raise_error_if_negative(ret)
return ret
@property
def enable_i2c(self):
"""Set this to `True` to enable the hardware I2C interface. If set to
`False` the hardware interface will be disabled and its pins (SDA and
SCL) can be used as GPIOs.
"""
config = self._interface_configuration(CONFIG_QUERY)
return config == CONFIG_GPIO_I2C or config == CONFIG_SPI_I2C
@enable_i2c.setter
def enable_i2c(self, value):
new_config = config = self._interface_configuration(CONFIG_QUERY)
if value and config == CONFIG_GPIO_ONLY:
new_config = CONFIG_GPIO_I2C
elif value and config == CONFIG_SPI_GPIO:
new_config = CONFIG_SPI_I2C
elif not value and config == CONFIG_GPIO_I2C:
new_config = CONFIG_GPIO_ONLY
elif not value and config == CONFIG_SPI_I2C:
new_config = CONFIG_SPI_GPIO
if new_config != config:
self._interface_configuration(new_config)
@property
def enable_spi(self):
"""Set this to `True` to enable the hardware SPI interface. If set to
`False` the hardware interface will be disabled and its pins (MISO,
MOSI, SCK and SS) can be used as GPIOs.
"""
config = self._interface_configuration(CONFIG_QUERY)
return config == CONFIG_SPI_GPIO or config == CONFIG_SPI_I2C
@enable_spi.setter
def enable_spi(self, value):
new_config = config = self._interface_configuration(CONFIG_QUERY)
if value and config == CONFIG_GPIO_ONLY:
new_config = CONFIG_SPI_GPIO
elif value and config == CONFIG_GPIO_I2C:
new_config = CONFIG_SPI_I2C
elif not value and config == CONFIG_SPI_GPIO:
new_config = CONFIG_GPIO_ONLY
elif not value and config == CONFIG_SPI_I2C:
new_config = CONFIG_GPIO_I2C
if new_config != config:
self._interface_configuration(new_config)
@property
def i2c_bitrate(self):
"""I2C bitrate in kHz. Not every bitrate is supported by the host
adapter. Therefore, the actual bitrate may be less than the value which
is set.
The power-on default value is 100 kHz.
"""
ret = api.py_aa_i2c_bitrate(self.handle, 0)
_raise_error_if_negative(ret)
return ret
@i2c_bitrate.setter
def i2c_bitrate(self, value):
ret = api.py_aa_i2c_bitrate(self.handle, value)
_raise_error_if_negative(ret)
@property
def i2c_pullups(self):
"""Setting this to `True` will enable the I2C pullup resistors. If set
to `False` the pullup resistors will be disabled.
Raises an :exc:`IOError` if the hardware adapter does not support
pullup resistors.
"""
ret = api.py_aa_i2c_pullup(self.handle, I2C_PULLUP_QUERY)
_raise_error_if_negative(ret)
return ret
@i2c_pullups.setter
def i2c_pullups(self, value):
if value:
pullup = I2C_PULLUP_BOTH
else:
pullup = I2C_PULLUP_NONE
ret = api.py_aa_i2c_pullup(self.handle, pullup)
_raise_error_if_negative(ret)
@property
def target_power(self):
"""Setting this to `True` will activate the power pins (4 and 6). If
set to `False` the power will be deactivated.
Raises an :exc:`IOError` if the hardware adapter does not support
the switchable power pins.
"""
ret = api.py_aa_target_power(self.handle, TARGET_POWER_QUERY)
_raise_error_if_negative(ret)
return ret
@target_power.setter
def target_power(self, value):
if value:
power = TARGET_POWER_BOTH
else:
power = TARGET_POWER_NONE
ret = api.py_aa_target_power(self.handle, power)
_raise_error_if_negative(ret)
@property
def i2c_bus_timeout(self):
"""I2C bus lock timeout in ms.
Minimum value is 10 ms and the maximum value is 450 ms. Not every value
can be set and will be rounded to the next possible number. You can
read back the property to get the actual value.
The power-on default value is 200 ms.
"""
ret = api.py_aa_i2c_bus_timeout(self.handle, 0)
_raise_error_if_negative(ret)
return ret
@i2c_bus_timeout.setter
def i2c_bus_timeout(self, timeout):
ret = api.py_aa_i2c_bus_timeout(self.handle, timeout)
_raise_error_if_negative(ret)
def i2c_master_write(self, i2c_address, data, flags=I2C_NO_FLAGS):
"""Make an I2C write access.
The given I2C device is addressed and data given as a string is
written. The transaction is finished with an I2C stop condition unless
I2C_NO_STOP is set in the flags.
10 bit addresses are supported if the I2C_10_BIT_ADDR flag is set.
"""
data = array.array('B', data)
status, _ = api.py_aa_i2c_write_ext(self.handle, i2c_address, flags,
len(data), data)
_raise_i2c_status_code_error_if_failure(status)
def i2c_master_read(self, addr, length, flags=I2C_NO_FLAGS):
"""Make an I2C read access.
The given I2C device is addressed and clock cycles for `length` bytes
are generated. A short read will occur if the device generates an early
NAK.
The transaction is finished with an I2C stop condition unless the
I2C_NO_STOP flag is set.
"""
data = array.array('B', (0,) * length)
status, rx_len = api.py_aa_i2c_read_ext(self.handle, addr, flags,
length, data)
_raise_i2c_status_code_error_if_failure(status)
del data[rx_len:]
return bytes(data)
def i2c_master_write_read(self, i2c_address, data, length):
"""Make an I2C write/read access.
First an I2C write access is issued. No stop condition will be
generated. Instead the read access begins with a repeated start.
This method is useful for accessing most addressable I2C devices like
EEPROMs, port expander, etc.
Basically, this is just a convenient function which internally uses
`i2c_master_write` and `i2c_master_read`.
"""
self.i2c_master_write(i2c_address, data, I2C_NO_STOP)
return self.i2c_master_read(i2c_address, length)
def poll(self, timeout=None):
"""Wait for an event to occur.
If `timeout` is given, if specifies the length of time in milliseconds
which the function will wait for events before returing. If `timeout`
is omitted, negative or None, the call will block until there is an
event.
Returns a list of events. In case no event is pending, an empty list is
returned.
"""
if timeout is None:
timeout = -1
ret = api.py_aa_async_poll(self.handle, timeout)
_raise_error_if_negative(ret)
events = list()
for event in (POLL_I2C_READ, POLL_I2C_WRITE, POLL_SPI,
POLL_I2C_MONITOR):
if ret & event:
events.append(event)
return events
def enable_i2c_slave(self, slave_address):
"""Enable I2C slave mode.
The device will respond to the specified slave_address if it is
addressed.
You can wait for the data with :func:`poll` and get it with
`i2c_slave_read`.
"""
ret = api.py_aa_i2c_slave_enable(self.handle, slave_address,
self.BUFFER_SIZE, self.BUFFER_SIZE)
_raise_error_if_negative(ret)
def disable_i2c_slave(self):
"""Disable I2C slave mode."""
ret = api.py_aa_i2c_slave_disable(self.handle)
_raise_error_if_negative(ret)
def i2c_slave_read(self):
"""Read the bytes from an I2C slave reception.
The bytes are returned as a string object.
"""
data = array.array('B', (0,) * self.BUFFER_SIZE)
status, addr, rx_len = api.py_aa_i2c_slave_read_ext(self.handle,
self.BUFFER_SIZE, data)
_raise_i2c_status_code_error_if_failure(status)
# In case of general call, actually return the general call address
if addr == 0x80:
addr = 0x00
del data[rx_len:]
return (addr, bytes(data))
@property
def i2c_slave_response(self):
"""Response to next read command.
An array of bytes that will be transmitted to the I2C master with the
next read operation.
Warning: Due to the fact that the Aardvark API does not provide a means
to read out this value, it is buffered when setting the property.
Reading the property therefore might not return what is actually stored
in the device.
"""
return self._i2c_slave_response
@i2c_slave_response.setter
def i2c_slave_response(self, data):
data = array.array('B', data)
ret = api.py_aa_i2c_slave_set_response(self.handle, len(data), data)
_raise_error_if_negative(ret)
self._i2c_slave_response = data
@property
def i2c_slave_last_transmit_size(self):
"""Returns the number of bytes transmitted by the slave."""
ret = api.py_aa_i2c_slave_write_stats(self.handle)
_raise_error_if_negative(ret)
return ret
def enable_i2c_monitor(self):
"""Activate the I2C monitor.
Enabling the monitor will disable all other functions of the adapter.
Raises an :exc:`IOError` if the hardware adapter does not support
monitor mode.
"""
ret = api.py_aa_i2c_monitor_enable(self.handle)
_raise_error_if_negative(ret)
def disable_i2c_monitor(self):
"""Disable the I2C monitor.
Raises an :exc:`IOError` if the hardware adapter does not support
monitor mode.
"""
ret = api.py_aa_i2c_monitor_disable(self.handle)
_raise_error_if_negative(ret)
def i2c_monitor_read(self):
"""Retrieved any data fetched by the monitor.
This function has an integrated timeout mechanism. You should use
:func:`poll` to determine if there is any data available.
Returns a list of data bytes and special symbols. There are three
special symbols: `I2C_MONITOR_NACK`, I2C_MONITOR_START and
I2C_MONITOR_STOP.
"""
data = array.array('H', (0,) * self.BUFFER_SIZE)
ret = api.py_aa_i2c_monitor_read(self.handle, self.BUFFER_SIZE,
data)
_raise_error_if_negative(ret)
del data[ret:]
return data.tolist()
@property
def spi_bitrate(self):
"""SPI bitrate in kHz. Not every bitrate is supported by the host
adapter. Therefore, the actual bitrate may be less than the value which
is set. The slowest bitrate supported is 125kHz. Any smaller value will
be rounded up to 125kHz.
The power-on default value is 1000 kHz.
"""
ret = api.py_aa_spi_bitrate(self.handle, 0)
_raise_error_if_negative(ret)
return ret
@spi_bitrate.setter
def spi_bitrate(self, value):
ret = api.py_aa_spi_bitrate(self.handle, value)
_raise_error_if_negative(ret)
def spi_configure(self, polarity, phase, bitorder):
"""Configure the SPI interface."""
ret = api.py_aa_spi_configure(self.handle, polarity, phase, bitorder)
_raise_error_if_negative(ret)
def spi_configure_mode(self, spi_mode):
"""Configure the SPI interface by the well known SPI modes."""
if spi_mode == SPI_MODE_0:
self.spi_configure(SPI_POL_RISING_FALLING,
SPI_PHASE_SAMPLE_SETUP, SPI_BITORDER_MSB)
elif spi_mode == SPI_MODE_3:
self.spi_configure(SPI_POL_FALLING_RISING,
SPI_PHASE_SETUP_SAMPLE, SPI_BITORDER_MSB)
else:
raise RuntimeError('SPI Mode not supported')
def spi_write(self, data):
"""Write a stream of bytes to a SPI device."""
data_out = array.array('B', data)
data_in = array.array('B', (0,) * len(data_out))
ret = api.py_aa_spi_write(self.handle, len(data_out), data_out,
len(data_in), data_in)
_raise_error_if_negative(ret)
return bytes(data_in)
def spi_ss_polarity(self, polarity):
"""Change the ouput polarity on the SS line.
Please note, that this only affects the master functions.
"""
ret = api.py_aa_spi_master_ss_polarity(self.handle, polarity)
_raise_error_if_negative(ret)
|
kontron/python-aardvark | pyaardvark/aardvark.py | open | python | def open(port=None, serial_number=None):
if port is None and serial_number is None:
dev = Aardvark()
elif serial_number is not None:
for d in find_devices():
if d['serial_number'] == serial_number:
break
else:
_raise_error_if_negative(ERR_UNABLE_TO_OPEN)
dev = Aardvark(d['port'])
# make sure we opened the correct device
if dev.unique_id_str() != serial_number:
dev.close()
_raise_error_if_negative(ERR_UNABLE_TO_OPEN)
else:
dev = Aardvark(port)
return dev | Open an aardvark device and return an :class:`Aardvark` object. If the
device cannot be opened an :class:`IOError` is raised.
The `port` can be retrieved by :func:`find_devices`. Usually, the first
device is 0, the second 1, etc.
If you are using only one device, you can therefore omit the parameter
in which case 0 is used.
Another method to open a device is to use the serial number. You can either
find the number on the device itself or in the in the corresponding USB
property. The serial number is a string which looks like `NNNN-MMMMMMM`.
Raises an :class:`IOError` if the port (or serial number) does not exist,
is already connected or an incompatible device is found.
.. note::
There is a small chance that this function raises an :class:`IOError`
although the correct device is available and not opened. The
open-by-serial-number method works by scanning the devices. But as
explained in :func:`find_devices`, the returned information may be
outdated. Therefore, :func:`open` checks the serial number once the
device is opened and if it is not the expected one, raises
:class:`IOError`. No retry mechanism is implemented.
As long as nobody comes along with a better idea, this failure case is
up to the user. | train | https://github.com/kontron/python-aardvark/blob/9827f669fbdc5bceb98e7d08a294b4e4e455d0d5/pyaardvark/aardvark.py#L143-L191 | null | # Copyright (c) 2014-2018 Kontron Europe GmbH
# 2017 CAMCO Produktions- und Vertriebs-GmbH
#
# This library is free software; you can redistribute it and/or
# modify it under the terms of the GNU Lesser General Public
# License as published by the Free Software Foundation; either
# version 2.1 of the License, or (at your option) any later version.
#
# This library is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
# Lesser General Public License for more details.
#
# You should have received a copy of the GNU Lesser General Public
# License along with this library; if not, write to the Free Software
# Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
from builtins import bytes
import array
import logging
import sys
from .constants import *
if sys.platform.startswith('linux'):
try:
from .ext.linux32 import aardvark as api
except ImportError:
try:
from .ext.linux64 import aardvark as api
except ImportError:
api = None
elif sys.platform.startswith('win32'):
try:
from .ext.win32 import aardvark as api
except ImportError:
try:
from .ext.win64 import aardvark as api
except ImportError:
api = None
elif sys.platform.startswith('darwin'):
try:
from .ext.osx32 import aardvark as api
except ImportError:
try:
from .ext.osx64 import aardvark as api
except ImportError:
api = None
else:
api = None
if not api:
raise RuntimeError('Unable to find suitable binary interface. '
'Unsupported platform?')
log = logging.getLogger(__name__)
def _raise_error_if_negative(val):
"""Raises an :class:`IOError` if `val` is negative."""
if val < 0:
raise IOError(val, api.py_aa_status_string(val))
def status_string(code):
for k, v in globals().items():
if k.startswith('I2C_STATUS_') and v == code:
return k
return 'I2C_STATUS_UNKNOWN_STATUS'
def _raise_i2c_status_code_error_if_failure(code):
"""Raises an :class:`IOError` if `code` is not :data:`I2C_STATUS_OK`."""
if code != I2C_STATUS_OK:
raise IOError(code, status_string(code))
def _unique_id_str(unique_id):
id1 = unique_id / 1000000
id2 = unique_id % 1000000
return '%04d-%06d' % (id1, id2)
def _to_version_str(v):
return '%d.%02d' % (v >> 8, v & 0xff)
def api_version():
"""Returns the underlying C module (aardvark.so, aardvark.pyd) as a string.
It returns the same value as :attr:`Aardvark.api_version` but you don't
need to open a device.
"""
return _to_version_str(api.py_version() & 0xffff)
def find_devices():
"""Return a list of dictionaries. Each dictionary represents one device.
The dictionary contains the following keys: port, unique_id and in_use.
`port` can be used with :func:`open`. `serial_number` is the serial number
of the device (and can also be used with :func:`open`) and `in_use`
indicates whether the device was opened before and can currently not be
opened.
.. note::
There is no guarantee, that the returned information is still valid
when you open the device. Esp. if you open a device by the port, the
unique_id may change because you've just opened another device. Eg. it
may be disconnected from the machine after you call :func:`find_devices`
but before you call :func:`open`.
To open a device by its serial number, you should use the :func:`open`
with the `serial_number` parameter.
"""
# first fetch the number of attached devices, so we can create a buffer
# with the exact amount of entries. api expects array of u16
num_devices = api.py_aa_find_devices(0, array.array('H'))
_raise_error_if_negative(num_devices)
# return an empty list if no device is connected
if num_devices == 0:
return list()
ports = array.array('H', (0,) * num_devices)
unique_ids = array.array('I', (0,) * num_devices)
num_devices = api.py_aa_find_devices_ext(len(ports), len(unique_ids),
ports, unique_ids)
_raise_error_if_negative(num_devices)
if num_devices == 0:
return list()
del ports[num_devices:]
del unique_ids[num_devices:]
devices = list()
for port, uid in zip(ports, unique_ids):
in_use = bool(port & PORT_NOT_FREE)
dev = dict(
port=port & ~PORT_NOT_FREE,
serial_number=_unique_id_str(uid),
in_use=in_use)
devices.append(dev)
return devices
class Aardvark(object):
"""Represents an Aardvark device."""
BUFFER_SIZE = 65535
def __init__(self, port=0):
ret, ver = api.py_aa_open_ext(port)
_raise_error_if_negative(ret)
#: A handle which is used as the first paramter for all calls to the
#: underlying API.
self.handle = ret
# assign some useful names
version = dict(
software = ver[0],
firmware = ver[1],
hardware = ver[2],
sw_req_by_fw = ver[3],
fw_req_by_sw = ver[4],
api_req_by_sw = ver[5],
)
#: Hardware revision of the host adapter as a string. The format is
#: ``M.NN`` where `M` is the major number and `NN` the zero padded
#: minor number.
self.hardware_revision = _to_version_str(version['hardware'])
#: Firmware version of the host adapter as a string. See
#: :attr:`hardware_revision` for more information on the format.
self.firmware_version = _to_version_str(version['firmware'])
#: Version of underlying C module (aardvark.so, aardvark.pyd) as a
#: string. See :attr:`hardware_revision` for more information on the
#: format.
self.api_version = _to_version_str(version['software'])
# version checks
if version['firmware'] < version['fw_req_by_sw']:
log.debug('The API requires a firmware version >= %s, but the '
'device has version %s',
_to_version_str(version['fw_req_by_sw']),
_to_version_str(version['firmware']))
ret = ERR_INCOMPATIBLE_DEVICE
elif version['software'] < version['sw_req_by_fw']:
log.debug('The firmware requires an API version >= %s, but the '
'API has version %s',
_to_version_str(version['sw_req_by_fw']),
_to_version_str(version['software']))
ret = ERR_INCOMPATIBLE_LIBRARY
_raise_error_if_negative(ret)
# Initialize shadow variables
self._i2c_slave_response = None
def __enter__(self):
return self
def __exit__(self, type, value, tb):
self.close()
return False
def close(self):
"""Close the device."""
api.py_aa_close(self.handle)
self.handle = None
def unique_id(self):
"""Return the unique identifier of the device. The identifier is the
serial number you can find on the adapter without the dash. Eg. the
serial number 0012-345678 would be 12345678.
"""
return api.py_aa_unique_id(self.handle)
def unique_id_str(self):
"""Return the unique identifier. But unlike :func:`unique_id`, the ID
is returned as a string which has the format NNNN-MMMMMMM.
"""
return _unique_id_str(self.unique_id())
def _interface_configuration(self, value):
ret = api.py_aa_configure(self.handle, value)
_raise_error_if_negative(ret)
return ret
@property
def enable_i2c(self):
"""Set this to `True` to enable the hardware I2C interface. If set to
`False` the hardware interface will be disabled and its pins (SDA and
SCL) can be used as GPIOs.
"""
config = self._interface_configuration(CONFIG_QUERY)
return config == CONFIG_GPIO_I2C or config == CONFIG_SPI_I2C
@enable_i2c.setter
def enable_i2c(self, value):
new_config = config = self._interface_configuration(CONFIG_QUERY)
if value and config == CONFIG_GPIO_ONLY:
new_config = CONFIG_GPIO_I2C
elif value and config == CONFIG_SPI_GPIO:
new_config = CONFIG_SPI_I2C
elif not value and config == CONFIG_GPIO_I2C:
new_config = CONFIG_GPIO_ONLY
elif not value and config == CONFIG_SPI_I2C:
new_config = CONFIG_SPI_GPIO
if new_config != config:
self._interface_configuration(new_config)
@property
def enable_spi(self):
"""Set this to `True` to enable the hardware SPI interface. If set to
`False` the hardware interface will be disabled and its pins (MISO,
MOSI, SCK and SS) can be used as GPIOs.
"""
config = self._interface_configuration(CONFIG_QUERY)
return config == CONFIG_SPI_GPIO or config == CONFIG_SPI_I2C
@enable_spi.setter
def enable_spi(self, value):
new_config = config = self._interface_configuration(CONFIG_QUERY)
if value and config == CONFIG_GPIO_ONLY:
new_config = CONFIG_SPI_GPIO
elif value and config == CONFIG_GPIO_I2C:
new_config = CONFIG_SPI_I2C
elif not value and config == CONFIG_SPI_GPIO:
new_config = CONFIG_GPIO_ONLY
elif not value and config == CONFIG_SPI_I2C:
new_config = CONFIG_GPIO_I2C
if new_config != config:
self._interface_configuration(new_config)
@property
def i2c_bitrate(self):
"""I2C bitrate in kHz. Not every bitrate is supported by the host
adapter. Therefore, the actual bitrate may be less than the value which
is set.
The power-on default value is 100 kHz.
"""
ret = api.py_aa_i2c_bitrate(self.handle, 0)
_raise_error_if_negative(ret)
return ret
@i2c_bitrate.setter
def i2c_bitrate(self, value):
ret = api.py_aa_i2c_bitrate(self.handle, value)
_raise_error_if_negative(ret)
@property
def i2c_pullups(self):
"""Setting this to `True` will enable the I2C pullup resistors. If set
to `False` the pullup resistors will be disabled.
Raises an :exc:`IOError` if the hardware adapter does not support
pullup resistors.
"""
ret = api.py_aa_i2c_pullup(self.handle, I2C_PULLUP_QUERY)
_raise_error_if_negative(ret)
return ret
@i2c_pullups.setter
def i2c_pullups(self, value):
if value:
pullup = I2C_PULLUP_BOTH
else:
pullup = I2C_PULLUP_NONE
ret = api.py_aa_i2c_pullup(self.handle, pullup)
_raise_error_if_negative(ret)
@property
def target_power(self):
"""Setting this to `True` will activate the power pins (4 and 6). If
set to `False` the power will be deactivated.
Raises an :exc:`IOError` if the hardware adapter does not support
the switchable power pins.
"""
ret = api.py_aa_target_power(self.handle, TARGET_POWER_QUERY)
_raise_error_if_negative(ret)
return ret
@target_power.setter
def target_power(self, value):
if value:
power = TARGET_POWER_BOTH
else:
power = TARGET_POWER_NONE
ret = api.py_aa_target_power(self.handle, power)
_raise_error_if_negative(ret)
@property
def i2c_bus_timeout(self):
"""I2C bus lock timeout in ms.
Minimum value is 10 ms and the maximum value is 450 ms. Not every value
can be set and will be rounded to the next possible number. You can
read back the property to get the actual value.
The power-on default value is 200 ms.
"""
ret = api.py_aa_i2c_bus_timeout(self.handle, 0)
_raise_error_if_negative(ret)
return ret
@i2c_bus_timeout.setter
def i2c_bus_timeout(self, timeout):
ret = api.py_aa_i2c_bus_timeout(self.handle, timeout)
_raise_error_if_negative(ret)
def i2c_master_write(self, i2c_address, data, flags=I2C_NO_FLAGS):
"""Make an I2C write access.
The given I2C device is addressed and data given as a string is
written. The transaction is finished with an I2C stop condition unless
I2C_NO_STOP is set in the flags.
10 bit addresses are supported if the I2C_10_BIT_ADDR flag is set.
"""
data = array.array('B', data)
status, _ = api.py_aa_i2c_write_ext(self.handle, i2c_address, flags,
len(data), data)
_raise_i2c_status_code_error_if_failure(status)
def i2c_master_read(self, addr, length, flags=I2C_NO_FLAGS):
"""Make an I2C read access.
The given I2C device is addressed and clock cycles for `length` bytes
are generated. A short read will occur if the device generates an early
NAK.
The transaction is finished with an I2C stop condition unless the
I2C_NO_STOP flag is set.
"""
data = array.array('B', (0,) * length)
status, rx_len = api.py_aa_i2c_read_ext(self.handle, addr, flags,
length, data)
_raise_i2c_status_code_error_if_failure(status)
del data[rx_len:]
return bytes(data)
def i2c_master_write_read(self, i2c_address, data, length):
"""Make an I2C write/read access.
First an I2C write access is issued. No stop condition will be
generated. Instead the read access begins with a repeated start.
This method is useful for accessing most addressable I2C devices like
EEPROMs, port expander, etc.
Basically, this is just a convenient function which internally uses
`i2c_master_write` and `i2c_master_read`.
"""
self.i2c_master_write(i2c_address, data, I2C_NO_STOP)
return self.i2c_master_read(i2c_address, length)
def poll(self, timeout=None):
"""Wait for an event to occur.
If `timeout` is given, if specifies the length of time in milliseconds
which the function will wait for events before returing. If `timeout`
is omitted, negative or None, the call will block until there is an
event.
Returns a list of events. In case no event is pending, an empty list is
returned.
"""
if timeout is None:
timeout = -1
ret = api.py_aa_async_poll(self.handle, timeout)
_raise_error_if_negative(ret)
events = list()
for event in (POLL_I2C_READ, POLL_I2C_WRITE, POLL_SPI,
POLL_I2C_MONITOR):
if ret & event:
events.append(event)
return events
def enable_i2c_slave(self, slave_address):
"""Enable I2C slave mode.
The device will respond to the specified slave_address if it is
addressed.
You can wait for the data with :func:`poll` and get it with
`i2c_slave_read`.
"""
ret = api.py_aa_i2c_slave_enable(self.handle, slave_address,
self.BUFFER_SIZE, self.BUFFER_SIZE)
_raise_error_if_negative(ret)
def disable_i2c_slave(self):
"""Disable I2C slave mode."""
ret = api.py_aa_i2c_slave_disable(self.handle)
_raise_error_if_negative(ret)
def i2c_slave_read(self):
"""Read the bytes from an I2C slave reception.
The bytes are returned as a string object.
"""
data = array.array('B', (0,) * self.BUFFER_SIZE)
status, addr, rx_len = api.py_aa_i2c_slave_read_ext(self.handle,
self.BUFFER_SIZE, data)
_raise_i2c_status_code_error_if_failure(status)
# In case of general call, actually return the general call address
if addr == 0x80:
addr = 0x00
del data[rx_len:]
return (addr, bytes(data))
@property
def i2c_slave_response(self):
"""Response to next read command.
An array of bytes that will be transmitted to the I2C master with the
next read operation.
Warning: Due to the fact that the Aardvark API does not provide a means
to read out this value, it is buffered when setting the property.
Reading the property therefore might not return what is actually stored
in the device.
"""
return self._i2c_slave_response
@i2c_slave_response.setter
def i2c_slave_response(self, data):
data = array.array('B', data)
ret = api.py_aa_i2c_slave_set_response(self.handle, len(data), data)
_raise_error_if_negative(ret)
self._i2c_slave_response = data
@property
def i2c_slave_last_transmit_size(self):
"""Returns the number of bytes transmitted by the slave."""
ret = api.py_aa_i2c_slave_write_stats(self.handle)
_raise_error_if_negative(ret)
return ret
def enable_i2c_monitor(self):
"""Activate the I2C monitor.
Enabling the monitor will disable all other functions of the adapter.
Raises an :exc:`IOError` if the hardware adapter does not support
monitor mode.
"""
ret = api.py_aa_i2c_monitor_enable(self.handle)
_raise_error_if_negative(ret)
def disable_i2c_monitor(self):
"""Disable the I2C monitor.
Raises an :exc:`IOError` if the hardware adapter does not support
monitor mode.
"""
ret = api.py_aa_i2c_monitor_disable(self.handle)
_raise_error_if_negative(ret)
def i2c_monitor_read(self):
"""Retrieved any data fetched by the monitor.
This function has an integrated timeout mechanism. You should use
:func:`poll` to determine if there is any data available.
Returns a list of data bytes and special symbols. There are three
special symbols: `I2C_MONITOR_NACK`, I2C_MONITOR_START and
I2C_MONITOR_STOP.
"""
data = array.array('H', (0,) * self.BUFFER_SIZE)
ret = api.py_aa_i2c_monitor_read(self.handle, self.BUFFER_SIZE,
data)
_raise_error_if_negative(ret)
del data[ret:]
return data.tolist()
@property
def spi_bitrate(self):
"""SPI bitrate in kHz. Not every bitrate is supported by the host
adapter. Therefore, the actual bitrate may be less than the value which
is set. The slowest bitrate supported is 125kHz. Any smaller value will
be rounded up to 125kHz.
The power-on default value is 1000 kHz.
"""
ret = api.py_aa_spi_bitrate(self.handle, 0)
_raise_error_if_negative(ret)
return ret
@spi_bitrate.setter
def spi_bitrate(self, value):
ret = api.py_aa_spi_bitrate(self.handle, value)
_raise_error_if_negative(ret)
def spi_configure(self, polarity, phase, bitorder):
"""Configure the SPI interface."""
ret = api.py_aa_spi_configure(self.handle, polarity, phase, bitorder)
_raise_error_if_negative(ret)
def spi_configure_mode(self, spi_mode):
"""Configure the SPI interface by the well known SPI modes."""
if spi_mode == SPI_MODE_0:
self.spi_configure(SPI_POL_RISING_FALLING,
SPI_PHASE_SAMPLE_SETUP, SPI_BITORDER_MSB)
elif spi_mode == SPI_MODE_3:
self.spi_configure(SPI_POL_FALLING_RISING,
SPI_PHASE_SETUP_SAMPLE, SPI_BITORDER_MSB)
else:
raise RuntimeError('SPI Mode not supported')
def spi_write(self, data):
"""Write a stream of bytes to a SPI device."""
data_out = array.array('B', data)
data_in = array.array('B', (0,) * len(data_out))
ret = api.py_aa_spi_write(self.handle, len(data_out), data_out,
len(data_in), data_in)
_raise_error_if_negative(ret)
return bytes(data_in)
def spi_ss_polarity(self, polarity):
"""Change the ouput polarity on the SS line.
Please note, that this only affects the master functions.
"""
ret = api.py_aa_spi_master_ss_polarity(self.handle, polarity)
_raise_error_if_negative(ret)
|
kontron/python-aardvark | pyaardvark/aardvark.py | Aardvark.enable_i2c | python | def enable_i2c(self):
config = self._interface_configuration(CONFIG_QUERY)
return config == CONFIG_GPIO_I2C or config == CONFIG_SPI_I2C | Set this to `True` to enable the hardware I2C interface. If set to
`False` the hardware interface will be disabled and its pins (SDA and
SCL) can be used as GPIOs. | train | https://github.com/kontron/python-aardvark/blob/9827f669fbdc5bceb98e7d08a294b4e4e455d0d5/pyaardvark/aardvark.py#L280-L286 | null | class Aardvark(object):
"""Represents an Aardvark device."""
BUFFER_SIZE = 65535
def __init__(self, port=0):
ret, ver = api.py_aa_open_ext(port)
_raise_error_if_negative(ret)
#: A handle which is used as the first paramter for all calls to the
#: underlying API.
self.handle = ret
# assign some useful names
version = dict(
software = ver[0],
firmware = ver[1],
hardware = ver[2],
sw_req_by_fw = ver[3],
fw_req_by_sw = ver[4],
api_req_by_sw = ver[5],
)
#: Hardware revision of the host adapter as a string. The format is
#: ``M.NN`` where `M` is the major number and `NN` the zero padded
#: minor number.
self.hardware_revision = _to_version_str(version['hardware'])
#: Firmware version of the host adapter as a string. See
#: :attr:`hardware_revision` for more information on the format.
self.firmware_version = _to_version_str(version['firmware'])
#: Version of underlying C module (aardvark.so, aardvark.pyd) as a
#: string. See :attr:`hardware_revision` for more information on the
#: format.
self.api_version = _to_version_str(version['software'])
# version checks
if version['firmware'] < version['fw_req_by_sw']:
log.debug('The API requires a firmware version >= %s, but the '
'device has version %s',
_to_version_str(version['fw_req_by_sw']),
_to_version_str(version['firmware']))
ret = ERR_INCOMPATIBLE_DEVICE
elif version['software'] < version['sw_req_by_fw']:
log.debug('The firmware requires an API version >= %s, but the '
'API has version %s',
_to_version_str(version['sw_req_by_fw']),
_to_version_str(version['software']))
ret = ERR_INCOMPATIBLE_LIBRARY
_raise_error_if_negative(ret)
# Initialize shadow variables
self._i2c_slave_response = None
def __enter__(self):
return self
def __exit__(self, type, value, tb):
self.close()
return False
def close(self):
"""Close the device."""
api.py_aa_close(self.handle)
self.handle = None
def unique_id(self):
"""Return the unique identifier of the device. The identifier is the
serial number you can find on the adapter without the dash. Eg. the
serial number 0012-345678 would be 12345678.
"""
return api.py_aa_unique_id(self.handle)
def unique_id_str(self):
"""Return the unique identifier. But unlike :func:`unique_id`, the ID
is returned as a string which has the format NNNN-MMMMMMM.
"""
return _unique_id_str(self.unique_id())
def _interface_configuration(self, value):
ret = api.py_aa_configure(self.handle, value)
_raise_error_if_negative(ret)
return ret
@property
@enable_i2c.setter
def enable_i2c(self, value):
new_config = config = self._interface_configuration(CONFIG_QUERY)
if value and config == CONFIG_GPIO_ONLY:
new_config = CONFIG_GPIO_I2C
elif value and config == CONFIG_SPI_GPIO:
new_config = CONFIG_SPI_I2C
elif not value and config == CONFIG_GPIO_I2C:
new_config = CONFIG_GPIO_ONLY
elif not value and config == CONFIG_SPI_I2C:
new_config = CONFIG_SPI_GPIO
if new_config != config:
self._interface_configuration(new_config)
@property
def enable_spi(self):
"""Set this to `True` to enable the hardware SPI interface. If set to
`False` the hardware interface will be disabled and its pins (MISO,
MOSI, SCK and SS) can be used as GPIOs.
"""
config = self._interface_configuration(CONFIG_QUERY)
return config == CONFIG_SPI_GPIO or config == CONFIG_SPI_I2C
@enable_spi.setter
def enable_spi(self, value):
new_config = config = self._interface_configuration(CONFIG_QUERY)
if value and config == CONFIG_GPIO_ONLY:
new_config = CONFIG_SPI_GPIO
elif value and config == CONFIG_GPIO_I2C:
new_config = CONFIG_SPI_I2C
elif not value and config == CONFIG_SPI_GPIO:
new_config = CONFIG_GPIO_ONLY
elif not value and config == CONFIG_SPI_I2C:
new_config = CONFIG_GPIO_I2C
if new_config != config:
self._interface_configuration(new_config)
@property
def i2c_bitrate(self):
"""I2C bitrate in kHz. Not every bitrate is supported by the host
adapter. Therefore, the actual bitrate may be less than the value which
is set.
The power-on default value is 100 kHz.
"""
ret = api.py_aa_i2c_bitrate(self.handle, 0)
_raise_error_if_negative(ret)
return ret
@i2c_bitrate.setter
def i2c_bitrate(self, value):
ret = api.py_aa_i2c_bitrate(self.handle, value)
_raise_error_if_negative(ret)
@property
def i2c_pullups(self):
"""Setting this to `True` will enable the I2C pullup resistors. If set
to `False` the pullup resistors will be disabled.
Raises an :exc:`IOError` if the hardware adapter does not support
pullup resistors.
"""
ret = api.py_aa_i2c_pullup(self.handle, I2C_PULLUP_QUERY)
_raise_error_if_negative(ret)
return ret
@i2c_pullups.setter
def i2c_pullups(self, value):
if value:
pullup = I2C_PULLUP_BOTH
else:
pullup = I2C_PULLUP_NONE
ret = api.py_aa_i2c_pullup(self.handle, pullup)
_raise_error_if_negative(ret)
@property
def target_power(self):
"""Setting this to `True` will activate the power pins (4 and 6). If
set to `False` the power will be deactivated.
Raises an :exc:`IOError` if the hardware adapter does not support
the switchable power pins.
"""
ret = api.py_aa_target_power(self.handle, TARGET_POWER_QUERY)
_raise_error_if_negative(ret)
return ret
@target_power.setter
def target_power(self, value):
if value:
power = TARGET_POWER_BOTH
else:
power = TARGET_POWER_NONE
ret = api.py_aa_target_power(self.handle, power)
_raise_error_if_negative(ret)
@property
def i2c_bus_timeout(self):
"""I2C bus lock timeout in ms.
Minimum value is 10 ms and the maximum value is 450 ms. Not every value
can be set and will be rounded to the next possible number. You can
read back the property to get the actual value.
The power-on default value is 200 ms.
"""
ret = api.py_aa_i2c_bus_timeout(self.handle, 0)
_raise_error_if_negative(ret)
return ret
@i2c_bus_timeout.setter
def i2c_bus_timeout(self, timeout):
ret = api.py_aa_i2c_bus_timeout(self.handle, timeout)
_raise_error_if_negative(ret)
def i2c_master_write(self, i2c_address, data, flags=I2C_NO_FLAGS):
"""Make an I2C write access.
The given I2C device is addressed and data given as a string is
written. The transaction is finished with an I2C stop condition unless
I2C_NO_STOP is set in the flags.
10 bit addresses are supported if the I2C_10_BIT_ADDR flag is set.
"""
data = array.array('B', data)
status, _ = api.py_aa_i2c_write_ext(self.handle, i2c_address, flags,
len(data), data)
_raise_i2c_status_code_error_if_failure(status)
def i2c_master_read(self, addr, length, flags=I2C_NO_FLAGS):
"""Make an I2C read access.
The given I2C device is addressed and clock cycles for `length` bytes
are generated. A short read will occur if the device generates an early
NAK.
The transaction is finished with an I2C stop condition unless the
I2C_NO_STOP flag is set.
"""
data = array.array('B', (0,) * length)
status, rx_len = api.py_aa_i2c_read_ext(self.handle, addr, flags,
length, data)
_raise_i2c_status_code_error_if_failure(status)
del data[rx_len:]
return bytes(data)
def i2c_master_write_read(self, i2c_address, data, length):
"""Make an I2C write/read access.
First an I2C write access is issued. No stop condition will be
generated. Instead the read access begins with a repeated start.
This method is useful for accessing most addressable I2C devices like
EEPROMs, port expander, etc.
Basically, this is just a convenient function which internally uses
`i2c_master_write` and `i2c_master_read`.
"""
self.i2c_master_write(i2c_address, data, I2C_NO_STOP)
return self.i2c_master_read(i2c_address, length)
def poll(self, timeout=None):
"""Wait for an event to occur.
If `timeout` is given, if specifies the length of time in milliseconds
which the function will wait for events before returing. If `timeout`
is omitted, negative or None, the call will block until there is an
event.
Returns a list of events. In case no event is pending, an empty list is
returned.
"""
if timeout is None:
timeout = -1
ret = api.py_aa_async_poll(self.handle, timeout)
_raise_error_if_negative(ret)
events = list()
for event in (POLL_I2C_READ, POLL_I2C_WRITE, POLL_SPI,
POLL_I2C_MONITOR):
if ret & event:
events.append(event)
return events
def enable_i2c_slave(self, slave_address):
"""Enable I2C slave mode.
The device will respond to the specified slave_address if it is
addressed.
You can wait for the data with :func:`poll` and get it with
`i2c_slave_read`.
"""
ret = api.py_aa_i2c_slave_enable(self.handle, slave_address,
self.BUFFER_SIZE, self.BUFFER_SIZE)
_raise_error_if_negative(ret)
def disable_i2c_slave(self):
"""Disable I2C slave mode."""
ret = api.py_aa_i2c_slave_disable(self.handle)
_raise_error_if_negative(ret)
def i2c_slave_read(self):
"""Read the bytes from an I2C slave reception.
The bytes are returned as a string object.
"""
data = array.array('B', (0,) * self.BUFFER_SIZE)
status, addr, rx_len = api.py_aa_i2c_slave_read_ext(self.handle,
self.BUFFER_SIZE, data)
_raise_i2c_status_code_error_if_failure(status)
# In case of general call, actually return the general call address
if addr == 0x80:
addr = 0x00
del data[rx_len:]
return (addr, bytes(data))
@property
def i2c_slave_response(self):
"""Response to next read command.
An array of bytes that will be transmitted to the I2C master with the
next read operation.
Warning: Due to the fact that the Aardvark API does not provide a means
to read out this value, it is buffered when setting the property.
Reading the property therefore might not return what is actually stored
in the device.
"""
return self._i2c_slave_response
@i2c_slave_response.setter
def i2c_slave_response(self, data):
data = array.array('B', data)
ret = api.py_aa_i2c_slave_set_response(self.handle, len(data), data)
_raise_error_if_negative(ret)
self._i2c_slave_response = data
@property
def i2c_slave_last_transmit_size(self):
"""Returns the number of bytes transmitted by the slave."""
ret = api.py_aa_i2c_slave_write_stats(self.handle)
_raise_error_if_negative(ret)
return ret
def enable_i2c_monitor(self):
"""Activate the I2C monitor.
Enabling the monitor will disable all other functions of the adapter.
Raises an :exc:`IOError` if the hardware adapter does not support
monitor mode.
"""
ret = api.py_aa_i2c_monitor_enable(self.handle)
_raise_error_if_negative(ret)
def disable_i2c_monitor(self):
"""Disable the I2C monitor.
Raises an :exc:`IOError` if the hardware adapter does not support
monitor mode.
"""
ret = api.py_aa_i2c_monitor_disable(self.handle)
_raise_error_if_negative(ret)
def i2c_monitor_read(self):
"""Retrieved any data fetched by the monitor.
This function has an integrated timeout mechanism. You should use
:func:`poll` to determine if there is any data available.
Returns a list of data bytes and special symbols. There are three
special symbols: `I2C_MONITOR_NACK`, I2C_MONITOR_START and
I2C_MONITOR_STOP.
"""
data = array.array('H', (0,) * self.BUFFER_SIZE)
ret = api.py_aa_i2c_monitor_read(self.handle, self.BUFFER_SIZE,
data)
_raise_error_if_negative(ret)
del data[ret:]
return data.tolist()
@property
def spi_bitrate(self):
"""SPI bitrate in kHz. Not every bitrate is supported by the host
adapter. Therefore, the actual bitrate may be less than the value which
is set. The slowest bitrate supported is 125kHz. Any smaller value will
be rounded up to 125kHz.
The power-on default value is 1000 kHz.
"""
ret = api.py_aa_spi_bitrate(self.handle, 0)
_raise_error_if_negative(ret)
return ret
@spi_bitrate.setter
def spi_bitrate(self, value):
ret = api.py_aa_spi_bitrate(self.handle, value)
_raise_error_if_negative(ret)
def spi_configure(self, polarity, phase, bitorder):
"""Configure the SPI interface."""
ret = api.py_aa_spi_configure(self.handle, polarity, phase, bitorder)
_raise_error_if_negative(ret)
def spi_configure_mode(self, spi_mode):
"""Configure the SPI interface by the well known SPI modes."""
if spi_mode == SPI_MODE_0:
self.spi_configure(SPI_POL_RISING_FALLING,
SPI_PHASE_SAMPLE_SETUP, SPI_BITORDER_MSB)
elif spi_mode == SPI_MODE_3:
self.spi_configure(SPI_POL_FALLING_RISING,
SPI_PHASE_SETUP_SAMPLE, SPI_BITORDER_MSB)
else:
raise RuntimeError('SPI Mode not supported')
def spi_write(self, data):
"""Write a stream of bytes to a SPI device."""
data_out = array.array('B', data)
data_in = array.array('B', (0,) * len(data_out))
ret = api.py_aa_spi_write(self.handle, len(data_out), data_out,
len(data_in), data_in)
_raise_error_if_negative(ret)
return bytes(data_in)
def spi_ss_polarity(self, polarity):
"""Change the ouput polarity on the SS line.
Please note, that this only affects the master functions.
"""
ret = api.py_aa_spi_master_ss_polarity(self.handle, polarity)
_raise_error_if_negative(ret)
|
kontron/python-aardvark | pyaardvark/aardvark.py | Aardvark.enable_spi | python | def enable_spi(self):
config = self._interface_configuration(CONFIG_QUERY)
return config == CONFIG_SPI_GPIO or config == CONFIG_SPI_I2C | Set this to `True` to enable the hardware SPI interface. If set to
`False` the hardware interface will be disabled and its pins (MISO,
MOSI, SCK and SS) can be used as GPIOs. | train | https://github.com/kontron/python-aardvark/blob/9827f669fbdc5bceb98e7d08a294b4e4e455d0d5/pyaardvark/aardvark.py#L303-L309 | null | class Aardvark(object):
"""Represents an Aardvark device."""
BUFFER_SIZE = 65535
def __init__(self, port=0):
ret, ver = api.py_aa_open_ext(port)
_raise_error_if_negative(ret)
#: A handle which is used as the first paramter for all calls to the
#: underlying API.
self.handle = ret
# assign some useful names
version = dict(
software = ver[0],
firmware = ver[1],
hardware = ver[2],
sw_req_by_fw = ver[3],
fw_req_by_sw = ver[4],
api_req_by_sw = ver[5],
)
#: Hardware revision of the host adapter as a string. The format is
#: ``M.NN`` where `M` is the major number and `NN` the zero padded
#: minor number.
self.hardware_revision = _to_version_str(version['hardware'])
#: Firmware version of the host adapter as a string. See
#: :attr:`hardware_revision` for more information on the format.
self.firmware_version = _to_version_str(version['firmware'])
#: Version of underlying C module (aardvark.so, aardvark.pyd) as a
#: string. See :attr:`hardware_revision` for more information on the
#: format.
self.api_version = _to_version_str(version['software'])
# version checks
if version['firmware'] < version['fw_req_by_sw']:
log.debug('The API requires a firmware version >= %s, but the '
'device has version %s',
_to_version_str(version['fw_req_by_sw']),
_to_version_str(version['firmware']))
ret = ERR_INCOMPATIBLE_DEVICE
elif version['software'] < version['sw_req_by_fw']:
log.debug('The firmware requires an API version >= %s, but the '
'API has version %s',
_to_version_str(version['sw_req_by_fw']),
_to_version_str(version['software']))
ret = ERR_INCOMPATIBLE_LIBRARY
_raise_error_if_negative(ret)
# Initialize shadow variables
self._i2c_slave_response = None
def __enter__(self):
return self
def __exit__(self, type, value, tb):
self.close()
return False
def close(self):
"""Close the device."""
api.py_aa_close(self.handle)
self.handle = None
def unique_id(self):
"""Return the unique identifier of the device. The identifier is the
serial number you can find on the adapter without the dash. Eg. the
serial number 0012-345678 would be 12345678.
"""
return api.py_aa_unique_id(self.handle)
def unique_id_str(self):
"""Return the unique identifier. But unlike :func:`unique_id`, the ID
is returned as a string which has the format NNNN-MMMMMMM.
"""
return _unique_id_str(self.unique_id())
def _interface_configuration(self, value):
ret = api.py_aa_configure(self.handle, value)
_raise_error_if_negative(ret)
return ret
@property
def enable_i2c(self):
"""Set this to `True` to enable the hardware I2C interface. If set to
`False` the hardware interface will be disabled and its pins (SDA and
SCL) can be used as GPIOs.
"""
config = self._interface_configuration(CONFIG_QUERY)
return config == CONFIG_GPIO_I2C or config == CONFIG_SPI_I2C
@enable_i2c.setter
def enable_i2c(self, value):
new_config = config = self._interface_configuration(CONFIG_QUERY)
if value and config == CONFIG_GPIO_ONLY:
new_config = CONFIG_GPIO_I2C
elif value and config == CONFIG_SPI_GPIO:
new_config = CONFIG_SPI_I2C
elif not value and config == CONFIG_GPIO_I2C:
new_config = CONFIG_GPIO_ONLY
elif not value and config == CONFIG_SPI_I2C:
new_config = CONFIG_SPI_GPIO
if new_config != config:
self._interface_configuration(new_config)
@property
@enable_spi.setter
def enable_spi(self, value):
new_config = config = self._interface_configuration(CONFIG_QUERY)
if value and config == CONFIG_GPIO_ONLY:
new_config = CONFIG_SPI_GPIO
elif value and config == CONFIG_GPIO_I2C:
new_config = CONFIG_SPI_I2C
elif not value and config == CONFIG_SPI_GPIO:
new_config = CONFIG_GPIO_ONLY
elif not value and config == CONFIG_SPI_I2C:
new_config = CONFIG_GPIO_I2C
if new_config != config:
self._interface_configuration(new_config)
@property
def i2c_bitrate(self):
"""I2C bitrate in kHz. Not every bitrate is supported by the host
adapter. Therefore, the actual bitrate may be less than the value which
is set.
The power-on default value is 100 kHz.
"""
ret = api.py_aa_i2c_bitrate(self.handle, 0)
_raise_error_if_negative(ret)
return ret
@i2c_bitrate.setter
def i2c_bitrate(self, value):
ret = api.py_aa_i2c_bitrate(self.handle, value)
_raise_error_if_negative(ret)
@property
def i2c_pullups(self):
"""Setting this to `True` will enable the I2C pullup resistors. If set
to `False` the pullup resistors will be disabled.
Raises an :exc:`IOError` if the hardware adapter does not support
pullup resistors.
"""
ret = api.py_aa_i2c_pullup(self.handle, I2C_PULLUP_QUERY)
_raise_error_if_negative(ret)
return ret
@i2c_pullups.setter
def i2c_pullups(self, value):
if value:
pullup = I2C_PULLUP_BOTH
else:
pullup = I2C_PULLUP_NONE
ret = api.py_aa_i2c_pullup(self.handle, pullup)
_raise_error_if_negative(ret)
@property
def target_power(self):
"""Setting this to `True` will activate the power pins (4 and 6). If
set to `False` the power will be deactivated.
Raises an :exc:`IOError` if the hardware adapter does not support
the switchable power pins.
"""
ret = api.py_aa_target_power(self.handle, TARGET_POWER_QUERY)
_raise_error_if_negative(ret)
return ret
@target_power.setter
def target_power(self, value):
if value:
power = TARGET_POWER_BOTH
else:
power = TARGET_POWER_NONE
ret = api.py_aa_target_power(self.handle, power)
_raise_error_if_negative(ret)
@property
def i2c_bus_timeout(self):
"""I2C bus lock timeout in ms.
Minimum value is 10 ms and the maximum value is 450 ms. Not every value
can be set and will be rounded to the next possible number. You can
read back the property to get the actual value.
The power-on default value is 200 ms.
"""
ret = api.py_aa_i2c_bus_timeout(self.handle, 0)
_raise_error_if_negative(ret)
return ret
@i2c_bus_timeout.setter
def i2c_bus_timeout(self, timeout):
ret = api.py_aa_i2c_bus_timeout(self.handle, timeout)
_raise_error_if_negative(ret)
def i2c_master_write(self, i2c_address, data, flags=I2C_NO_FLAGS):
"""Make an I2C write access.
The given I2C device is addressed and data given as a string is
written. The transaction is finished with an I2C stop condition unless
I2C_NO_STOP is set in the flags.
10 bit addresses are supported if the I2C_10_BIT_ADDR flag is set.
"""
data = array.array('B', data)
status, _ = api.py_aa_i2c_write_ext(self.handle, i2c_address, flags,
len(data), data)
_raise_i2c_status_code_error_if_failure(status)
def i2c_master_read(self, addr, length, flags=I2C_NO_FLAGS):
"""Make an I2C read access.
The given I2C device is addressed and clock cycles for `length` bytes
are generated. A short read will occur if the device generates an early
NAK.
The transaction is finished with an I2C stop condition unless the
I2C_NO_STOP flag is set.
"""
data = array.array('B', (0,) * length)
status, rx_len = api.py_aa_i2c_read_ext(self.handle, addr, flags,
length, data)
_raise_i2c_status_code_error_if_failure(status)
del data[rx_len:]
return bytes(data)
def i2c_master_write_read(self, i2c_address, data, length):
"""Make an I2C write/read access.
First an I2C write access is issued. No stop condition will be
generated. Instead the read access begins with a repeated start.
This method is useful for accessing most addressable I2C devices like
EEPROMs, port expander, etc.
Basically, this is just a convenient function which internally uses
`i2c_master_write` and `i2c_master_read`.
"""
self.i2c_master_write(i2c_address, data, I2C_NO_STOP)
return self.i2c_master_read(i2c_address, length)
def poll(self, timeout=None):
"""Wait for an event to occur.
If `timeout` is given, if specifies the length of time in milliseconds
which the function will wait for events before returing. If `timeout`
is omitted, negative or None, the call will block until there is an
event.
Returns a list of events. In case no event is pending, an empty list is
returned.
"""
if timeout is None:
timeout = -1
ret = api.py_aa_async_poll(self.handle, timeout)
_raise_error_if_negative(ret)
events = list()
for event in (POLL_I2C_READ, POLL_I2C_WRITE, POLL_SPI,
POLL_I2C_MONITOR):
if ret & event:
events.append(event)
return events
def enable_i2c_slave(self, slave_address):
"""Enable I2C slave mode.
The device will respond to the specified slave_address if it is
addressed.
You can wait for the data with :func:`poll` and get it with
`i2c_slave_read`.
"""
ret = api.py_aa_i2c_slave_enable(self.handle, slave_address,
self.BUFFER_SIZE, self.BUFFER_SIZE)
_raise_error_if_negative(ret)
def disable_i2c_slave(self):
"""Disable I2C slave mode."""
ret = api.py_aa_i2c_slave_disable(self.handle)
_raise_error_if_negative(ret)
def i2c_slave_read(self):
"""Read the bytes from an I2C slave reception.
The bytes are returned as a string object.
"""
data = array.array('B', (0,) * self.BUFFER_SIZE)
status, addr, rx_len = api.py_aa_i2c_slave_read_ext(self.handle,
self.BUFFER_SIZE, data)
_raise_i2c_status_code_error_if_failure(status)
# In case of general call, actually return the general call address
if addr == 0x80:
addr = 0x00
del data[rx_len:]
return (addr, bytes(data))
@property
def i2c_slave_response(self):
"""Response to next read command.
An array of bytes that will be transmitted to the I2C master with the
next read operation.
Warning: Due to the fact that the Aardvark API does not provide a means
to read out this value, it is buffered when setting the property.
Reading the property therefore might not return what is actually stored
in the device.
"""
return self._i2c_slave_response
@i2c_slave_response.setter
def i2c_slave_response(self, data):
data = array.array('B', data)
ret = api.py_aa_i2c_slave_set_response(self.handle, len(data), data)
_raise_error_if_negative(ret)
self._i2c_slave_response = data
@property
def i2c_slave_last_transmit_size(self):
"""Returns the number of bytes transmitted by the slave."""
ret = api.py_aa_i2c_slave_write_stats(self.handle)
_raise_error_if_negative(ret)
return ret
def enable_i2c_monitor(self):
"""Activate the I2C monitor.
Enabling the monitor will disable all other functions of the adapter.
Raises an :exc:`IOError` if the hardware adapter does not support
monitor mode.
"""
ret = api.py_aa_i2c_monitor_enable(self.handle)
_raise_error_if_negative(ret)
def disable_i2c_monitor(self):
"""Disable the I2C monitor.
Raises an :exc:`IOError` if the hardware adapter does not support
monitor mode.
"""
ret = api.py_aa_i2c_monitor_disable(self.handle)
_raise_error_if_negative(ret)
def i2c_monitor_read(self):
"""Retrieved any data fetched by the monitor.
This function has an integrated timeout mechanism. You should use
:func:`poll` to determine if there is any data available.
Returns a list of data bytes and special symbols. There are three
special symbols: `I2C_MONITOR_NACK`, I2C_MONITOR_START and
I2C_MONITOR_STOP.
"""
data = array.array('H', (0,) * self.BUFFER_SIZE)
ret = api.py_aa_i2c_monitor_read(self.handle, self.BUFFER_SIZE,
data)
_raise_error_if_negative(ret)
del data[ret:]
return data.tolist()
@property
def spi_bitrate(self):
"""SPI bitrate in kHz. Not every bitrate is supported by the host
adapter. Therefore, the actual bitrate may be less than the value which
is set. The slowest bitrate supported is 125kHz. Any smaller value will
be rounded up to 125kHz.
The power-on default value is 1000 kHz.
"""
ret = api.py_aa_spi_bitrate(self.handle, 0)
_raise_error_if_negative(ret)
return ret
@spi_bitrate.setter
def spi_bitrate(self, value):
ret = api.py_aa_spi_bitrate(self.handle, value)
_raise_error_if_negative(ret)
def spi_configure(self, polarity, phase, bitorder):
"""Configure the SPI interface."""
ret = api.py_aa_spi_configure(self.handle, polarity, phase, bitorder)
_raise_error_if_negative(ret)
def spi_configure_mode(self, spi_mode):
"""Configure the SPI interface by the well known SPI modes."""
if spi_mode == SPI_MODE_0:
self.spi_configure(SPI_POL_RISING_FALLING,
SPI_PHASE_SAMPLE_SETUP, SPI_BITORDER_MSB)
elif spi_mode == SPI_MODE_3:
self.spi_configure(SPI_POL_FALLING_RISING,
SPI_PHASE_SETUP_SAMPLE, SPI_BITORDER_MSB)
else:
raise RuntimeError('SPI Mode not supported')
def spi_write(self, data):
"""Write a stream of bytes to a SPI device."""
data_out = array.array('B', data)
data_in = array.array('B', (0,) * len(data_out))
ret = api.py_aa_spi_write(self.handle, len(data_out), data_out,
len(data_in), data_in)
_raise_error_if_negative(ret)
return bytes(data_in)
def spi_ss_polarity(self, polarity):
"""Change the ouput polarity on the SS line.
Please note, that this only affects the master functions.
"""
ret = api.py_aa_spi_master_ss_polarity(self.handle, polarity)
_raise_error_if_negative(ret)
|
kontron/python-aardvark | pyaardvark/aardvark.py | Aardvark.i2c_bitrate | python | def i2c_bitrate(self):
ret = api.py_aa_i2c_bitrate(self.handle, 0)
_raise_error_if_negative(ret)
return ret | I2C bitrate in kHz. Not every bitrate is supported by the host
adapter. Therefore, the actual bitrate may be less than the value which
is set.
The power-on default value is 100 kHz. | train | https://github.com/kontron/python-aardvark/blob/9827f669fbdc5bceb98e7d08a294b4e4e455d0d5/pyaardvark/aardvark.py#L326-L336 | null | class Aardvark(object):
"""Represents an Aardvark device."""
BUFFER_SIZE = 65535
def __init__(self, port=0):
ret, ver = api.py_aa_open_ext(port)
_raise_error_if_negative(ret)
#: A handle which is used as the first paramter for all calls to the
#: underlying API.
self.handle = ret
# assign some useful names
version = dict(
software = ver[0],
firmware = ver[1],
hardware = ver[2],
sw_req_by_fw = ver[3],
fw_req_by_sw = ver[4],
api_req_by_sw = ver[5],
)
#: Hardware revision of the host adapter as a string. The format is
#: ``M.NN`` where `M` is the major number and `NN` the zero padded
#: minor number.
self.hardware_revision = _to_version_str(version['hardware'])
#: Firmware version of the host adapter as a string. See
#: :attr:`hardware_revision` for more information on the format.
self.firmware_version = _to_version_str(version['firmware'])
#: Version of underlying C module (aardvark.so, aardvark.pyd) as a
#: string. See :attr:`hardware_revision` for more information on the
#: format.
self.api_version = _to_version_str(version['software'])
# version checks
if version['firmware'] < version['fw_req_by_sw']:
log.debug('The API requires a firmware version >= %s, but the '
'device has version %s',
_to_version_str(version['fw_req_by_sw']),
_to_version_str(version['firmware']))
ret = ERR_INCOMPATIBLE_DEVICE
elif version['software'] < version['sw_req_by_fw']:
log.debug('The firmware requires an API version >= %s, but the '
'API has version %s',
_to_version_str(version['sw_req_by_fw']),
_to_version_str(version['software']))
ret = ERR_INCOMPATIBLE_LIBRARY
_raise_error_if_negative(ret)
# Initialize shadow variables
self._i2c_slave_response = None
def __enter__(self):
return self
def __exit__(self, type, value, tb):
self.close()
return False
def close(self):
"""Close the device."""
api.py_aa_close(self.handle)
self.handle = None
def unique_id(self):
"""Return the unique identifier of the device. The identifier is the
serial number you can find on the adapter without the dash. Eg. the
serial number 0012-345678 would be 12345678.
"""
return api.py_aa_unique_id(self.handle)
def unique_id_str(self):
"""Return the unique identifier. But unlike :func:`unique_id`, the ID
is returned as a string which has the format NNNN-MMMMMMM.
"""
return _unique_id_str(self.unique_id())
def _interface_configuration(self, value):
ret = api.py_aa_configure(self.handle, value)
_raise_error_if_negative(ret)
return ret
@property
def enable_i2c(self):
"""Set this to `True` to enable the hardware I2C interface. If set to
`False` the hardware interface will be disabled and its pins (SDA and
SCL) can be used as GPIOs.
"""
config = self._interface_configuration(CONFIG_QUERY)
return config == CONFIG_GPIO_I2C or config == CONFIG_SPI_I2C
@enable_i2c.setter
def enable_i2c(self, value):
new_config = config = self._interface_configuration(CONFIG_QUERY)
if value and config == CONFIG_GPIO_ONLY:
new_config = CONFIG_GPIO_I2C
elif value and config == CONFIG_SPI_GPIO:
new_config = CONFIG_SPI_I2C
elif not value and config == CONFIG_GPIO_I2C:
new_config = CONFIG_GPIO_ONLY
elif not value and config == CONFIG_SPI_I2C:
new_config = CONFIG_SPI_GPIO
if new_config != config:
self._interface_configuration(new_config)
@property
def enable_spi(self):
"""Set this to `True` to enable the hardware SPI interface. If set to
`False` the hardware interface will be disabled and its pins (MISO,
MOSI, SCK and SS) can be used as GPIOs.
"""
config = self._interface_configuration(CONFIG_QUERY)
return config == CONFIG_SPI_GPIO or config == CONFIG_SPI_I2C
@enable_spi.setter
def enable_spi(self, value):
new_config = config = self._interface_configuration(CONFIG_QUERY)
if value and config == CONFIG_GPIO_ONLY:
new_config = CONFIG_SPI_GPIO
elif value and config == CONFIG_GPIO_I2C:
new_config = CONFIG_SPI_I2C
elif not value and config == CONFIG_SPI_GPIO:
new_config = CONFIG_GPIO_ONLY
elif not value and config == CONFIG_SPI_I2C:
new_config = CONFIG_GPIO_I2C
if new_config != config:
self._interface_configuration(new_config)
@property
@i2c_bitrate.setter
def i2c_bitrate(self, value):
ret = api.py_aa_i2c_bitrate(self.handle, value)
_raise_error_if_negative(ret)
@property
def i2c_pullups(self):
"""Setting this to `True` will enable the I2C pullup resistors. If set
to `False` the pullup resistors will be disabled.
Raises an :exc:`IOError` if the hardware adapter does not support
pullup resistors.
"""
ret = api.py_aa_i2c_pullup(self.handle, I2C_PULLUP_QUERY)
_raise_error_if_negative(ret)
return ret
@i2c_pullups.setter
def i2c_pullups(self, value):
if value:
pullup = I2C_PULLUP_BOTH
else:
pullup = I2C_PULLUP_NONE
ret = api.py_aa_i2c_pullup(self.handle, pullup)
_raise_error_if_negative(ret)
@property
def target_power(self):
"""Setting this to `True` will activate the power pins (4 and 6). If
set to `False` the power will be deactivated.
Raises an :exc:`IOError` if the hardware adapter does not support
the switchable power pins.
"""
ret = api.py_aa_target_power(self.handle, TARGET_POWER_QUERY)
_raise_error_if_negative(ret)
return ret
@target_power.setter
def target_power(self, value):
if value:
power = TARGET_POWER_BOTH
else:
power = TARGET_POWER_NONE
ret = api.py_aa_target_power(self.handle, power)
_raise_error_if_negative(ret)
@property
def i2c_bus_timeout(self):
"""I2C bus lock timeout in ms.
Minimum value is 10 ms and the maximum value is 450 ms. Not every value
can be set and will be rounded to the next possible number. You can
read back the property to get the actual value.
The power-on default value is 200 ms.
"""
ret = api.py_aa_i2c_bus_timeout(self.handle, 0)
_raise_error_if_negative(ret)
return ret
@i2c_bus_timeout.setter
def i2c_bus_timeout(self, timeout):
ret = api.py_aa_i2c_bus_timeout(self.handle, timeout)
_raise_error_if_negative(ret)
def i2c_master_write(self, i2c_address, data, flags=I2C_NO_FLAGS):
"""Make an I2C write access.
The given I2C device is addressed and data given as a string is
written. The transaction is finished with an I2C stop condition unless
I2C_NO_STOP is set in the flags.
10 bit addresses are supported if the I2C_10_BIT_ADDR flag is set.
"""
data = array.array('B', data)
status, _ = api.py_aa_i2c_write_ext(self.handle, i2c_address, flags,
len(data), data)
_raise_i2c_status_code_error_if_failure(status)
def i2c_master_read(self, addr, length, flags=I2C_NO_FLAGS):
"""Make an I2C read access.
The given I2C device is addressed and clock cycles for `length` bytes
are generated. A short read will occur if the device generates an early
NAK.
The transaction is finished with an I2C stop condition unless the
I2C_NO_STOP flag is set.
"""
data = array.array('B', (0,) * length)
status, rx_len = api.py_aa_i2c_read_ext(self.handle, addr, flags,
length, data)
_raise_i2c_status_code_error_if_failure(status)
del data[rx_len:]
return bytes(data)
def i2c_master_write_read(self, i2c_address, data, length):
"""Make an I2C write/read access.
First an I2C write access is issued. No stop condition will be
generated. Instead the read access begins with a repeated start.
This method is useful for accessing most addressable I2C devices like
EEPROMs, port expander, etc.
Basically, this is just a convenient function which internally uses
`i2c_master_write` and `i2c_master_read`.
"""
self.i2c_master_write(i2c_address, data, I2C_NO_STOP)
return self.i2c_master_read(i2c_address, length)
def poll(self, timeout=None):
"""Wait for an event to occur.
If `timeout` is given, if specifies the length of time in milliseconds
which the function will wait for events before returing. If `timeout`
is omitted, negative or None, the call will block until there is an
event.
Returns a list of events. In case no event is pending, an empty list is
returned.
"""
if timeout is None:
timeout = -1
ret = api.py_aa_async_poll(self.handle, timeout)
_raise_error_if_negative(ret)
events = list()
for event in (POLL_I2C_READ, POLL_I2C_WRITE, POLL_SPI,
POLL_I2C_MONITOR):
if ret & event:
events.append(event)
return events
def enable_i2c_slave(self, slave_address):
"""Enable I2C slave mode.
The device will respond to the specified slave_address if it is
addressed.
You can wait for the data with :func:`poll` and get it with
`i2c_slave_read`.
"""
ret = api.py_aa_i2c_slave_enable(self.handle, slave_address,
self.BUFFER_SIZE, self.BUFFER_SIZE)
_raise_error_if_negative(ret)
def disable_i2c_slave(self):
"""Disable I2C slave mode."""
ret = api.py_aa_i2c_slave_disable(self.handle)
_raise_error_if_negative(ret)
def i2c_slave_read(self):
"""Read the bytes from an I2C slave reception.
The bytes are returned as a string object.
"""
data = array.array('B', (0,) * self.BUFFER_SIZE)
status, addr, rx_len = api.py_aa_i2c_slave_read_ext(self.handle,
self.BUFFER_SIZE, data)
_raise_i2c_status_code_error_if_failure(status)
# In case of general call, actually return the general call address
if addr == 0x80:
addr = 0x00
del data[rx_len:]
return (addr, bytes(data))
@property
def i2c_slave_response(self):
"""Response to next read command.
An array of bytes that will be transmitted to the I2C master with the
next read operation.
Warning: Due to the fact that the Aardvark API does not provide a means
to read out this value, it is buffered when setting the property.
Reading the property therefore might not return what is actually stored
in the device.
"""
return self._i2c_slave_response
@i2c_slave_response.setter
def i2c_slave_response(self, data):
data = array.array('B', data)
ret = api.py_aa_i2c_slave_set_response(self.handle, len(data), data)
_raise_error_if_negative(ret)
self._i2c_slave_response = data
@property
def i2c_slave_last_transmit_size(self):
"""Returns the number of bytes transmitted by the slave."""
ret = api.py_aa_i2c_slave_write_stats(self.handle)
_raise_error_if_negative(ret)
return ret
def enable_i2c_monitor(self):
"""Activate the I2C monitor.
Enabling the monitor will disable all other functions of the adapter.
Raises an :exc:`IOError` if the hardware adapter does not support
monitor mode.
"""
ret = api.py_aa_i2c_monitor_enable(self.handle)
_raise_error_if_negative(ret)
def disable_i2c_monitor(self):
"""Disable the I2C monitor.
Raises an :exc:`IOError` if the hardware adapter does not support
monitor mode.
"""
ret = api.py_aa_i2c_monitor_disable(self.handle)
_raise_error_if_negative(ret)
def i2c_monitor_read(self):
"""Retrieved any data fetched by the monitor.
This function has an integrated timeout mechanism. You should use
:func:`poll` to determine if there is any data available.
Returns a list of data bytes and special symbols. There are three
special symbols: `I2C_MONITOR_NACK`, I2C_MONITOR_START and
I2C_MONITOR_STOP.
"""
data = array.array('H', (0,) * self.BUFFER_SIZE)
ret = api.py_aa_i2c_monitor_read(self.handle, self.BUFFER_SIZE,
data)
_raise_error_if_negative(ret)
del data[ret:]
return data.tolist()
@property
def spi_bitrate(self):
"""SPI bitrate in kHz. Not every bitrate is supported by the host
adapter. Therefore, the actual bitrate may be less than the value which
is set. The slowest bitrate supported is 125kHz. Any smaller value will
be rounded up to 125kHz.
The power-on default value is 1000 kHz.
"""
ret = api.py_aa_spi_bitrate(self.handle, 0)
_raise_error_if_negative(ret)
return ret
@spi_bitrate.setter
def spi_bitrate(self, value):
ret = api.py_aa_spi_bitrate(self.handle, value)
_raise_error_if_negative(ret)
def spi_configure(self, polarity, phase, bitorder):
"""Configure the SPI interface."""
ret = api.py_aa_spi_configure(self.handle, polarity, phase, bitorder)
_raise_error_if_negative(ret)
def spi_configure_mode(self, spi_mode):
"""Configure the SPI interface by the well known SPI modes."""
if spi_mode == SPI_MODE_0:
self.spi_configure(SPI_POL_RISING_FALLING,
SPI_PHASE_SAMPLE_SETUP, SPI_BITORDER_MSB)
elif spi_mode == SPI_MODE_3:
self.spi_configure(SPI_POL_FALLING_RISING,
SPI_PHASE_SETUP_SAMPLE, SPI_BITORDER_MSB)
else:
raise RuntimeError('SPI Mode not supported')
def spi_write(self, data):
"""Write a stream of bytes to a SPI device."""
data_out = array.array('B', data)
data_in = array.array('B', (0,) * len(data_out))
ret = api.py_aa_spi_write(self.handle, len(data_out), data_out,
len(data_in), data_in)
_raise_error_if_negative(ret)
return bytes(data_in)
def spi_ss_polarity(self, polarity):
"""Change the ouput polarity on the SS line.
Please note, that this only affects the master functions.
"""
ret = api.py_aa_spi_master_ss_polarity(self.handle, polarity)
_raise_error_if_negative(ret)
|
kontron/python-aardvark | pyaardvark/aardvark.py | Aardvark.i2c_pullups | python | def i2c_pullups(self):
ret = api.py_aa_i2c_pullup(self.handle, I2C_PULLUP_QUERY)
_raise_error_if_negative(ret)
return ret | Setting this to `True` will enable the I2C pullup resistors. If set
to `False` the pullup resistors will be disabled.
Raises an :exc:`IOError` if the hardware adapter does not support
pullup resistors. | train | https://github.com/kontron/python-aardvark/blob/9827f669fbdc5bceb98e7d08a294b4e4e455d0d5/pyaardvark/aardvark.py#L344-L353 | null | class Aardvark(object):
"""Represents an Aardvark device."""
BUFFER_SIZE = 65535
def __init__(self, port=0):
ret, ver = api.py_aa_open_ext(port)
_raise_error_if_negative(ret)
#: A handle which is used as the first paramter for all calls to the
#: underlying API.
self.handle = ret
# assign some useful names
version = dict(
software = ver[0],
firmware = ver[1],
hardware = ver[2],
sw_req_by_fw = ver[3],
fw_req_by_sw = ver[4],
api_req_by_sw = ver[5],
)
#: Hardware revision of the host adapter as a string. The format is
#: ``M.NN`` where `M` is the major number and `NN` the zero padded
#: minor number.
self.hardware_revision = _to_version_str(version['hardware'])
#: Firmware version of the host adapter as a string. See
#: :attr:`hardware_revision` for more information on the format.
self.firmware_version = _to_version_str(version['firmware'])
#: Version of underlying C module (aardvark.so, aardvark.pyd) as a
#: string. See :attr:`hardware_revision` for more information on the
#: format.
self.api_version = _to_version_str(version['software'])
# version checks
if version['firmware'] < version['fw_req_by_sw']:
log.debug('The API requires a firmware version >= %s, but the '
'device has version %s',
_to_version_str(version['fw_req_by_sw']),
_to_version_str(version['firmware']))
ret = ERR_INCOMPATIBLE_DEVICE
elif version['software'] < version['sw_req_by_fw']:
log.debug('The firmware requires an API version >= %s, but the '
'API has version %s',
_to_version_str(version['sw_req_by_fw']),
_to_version_str(version['software']))
ret = ERR_INCOMPATIBLE_LIBRARY
_raise_error_if_negative(ret)
# Initialize shadow variables
self._i2c_slave_response = None
def __enter__(self):
return self
def __exit__(self, type, value, tb):
self.close()
return False
def close(self):
"""Close the device."""
api.py_aa_close(self.handle)
self.handle = None
def unique_id(self):
"""Return the unique identifier of the device. The identifier is the
serial number you can find on the adapter without the dash. Eg. the
serial number 0012-345678 would be 12345678.
"""
return api.py_aa_unique_id(self.handle)
def unique_id_str(self):
"""Return the unique identifier. But unlike :func:`unique_id`, the ID
is returned as a string which has the format NNNN-MMMMMMM.
"""
return _unique_id_str(self.unique_id())
def _interface_configuration(self, value):
ret = api.py_aa_configure(self.handle, value)
_raise_error_if_negative(ret)
return ret
@property
def enable_i2c(self):
"""Set this to `True` to enable the hardware I2C interface. If set to
`False` the hardware interface will be disabled and its pins (SDA and
SCL) can be used as GPIOs.
"""
config = self._interface_configuration(CONFIG_QUERY)
return config == CONFIG_GPIO_I2C or config == CONFIG_SPI_I2C
@enable_i2c.setter
def enable_i2c(self, value):
new_config = config = self._interface_configuration(CONFIG_QUERY)
if value and config == CONFIG_GPIO_ONLY:
new_config = CONFIG_GPIO_I2C
elif value and config == CONFIG_SPI_GPIO:
new_config = CONFIG_SPI_I2C
elif not value and config == CONFIG_GPIO_I2C:
new_config = CONFIG_GPIO_ONLY
elif not value and config == CONFIG_SPI_I2C:
new_config = CONFIG_SPI_GPIO
if new_config != config:
self._interface_configuration(new_config)
@property
def enable_spi(self):
"""Set this to `True` to enable the hardware SPI interface. If set to
`False` the hardware interface will be disabled and its pins (MISO,
MOSI, SCK and SS) can be used as GPIOs.
"""
config = self._interface_configuration(CONFIG_QUERY)
return config == CONFIG_SPI_GPIO or config == CONFIG_SPI_I2C
@enable_spi.setter
def enable_spi(self, value):
new_config = config = self._interface_configuration(CONFIG_QUERY)
if value and config == CONFIG_GPIO_ONLY:
new_config = CONFIG_SPI_GPIO
elif value and config == CONFIG_GPIO_I2C:
new_config = CONFIG_SPI_I2C
elif not value and config == CONFIG_SPI_GPIO:
new_config = CONFIG_GPIO_ONLY
elif not value and config == CONFIG_SPI_I2C:
new_config = CONFIG_GPIO_I2C
if new_config != config:
self._interface_configuration(new_config)
@property
def i2c_bitrate(self):
"""I2C bitrate in kHz. Not every bitrate is supported by the host
adapter. Therefore, the actual bitrate may be less than the value which
is set.
The power-on default value is 100 kHz.
"""
ret = api.py_aa_i2c_bitrate(self.handle, 0)
_raise_error_if_negative(ret)
return ret
@i2c_bitrate.setter
def i2c_bitrate(self, value):
ret = api.py_aa_i2c_bitrate(self.handle, value)
_raise_error_if_negative(ret)
@property
@i2c_pullups.setter
def i2c_pullups(self, value):
if value:
pullup = I2C_PULLUP_BOTH
else:
pullup = I2C_PULLUP_NONE
ret = api.py_aa_i2c_pullup(self.handle, pullup)
_raise_error_if_negative(ret)
@property
def target_power(self):
"""Setting this to `True` will activate the power pins (4 and 6). If
set to `False` the power will be deactivated.
Raises an :exc:`IOError` if the hardware adapter does not support
the switchable power pins.
"""
ret = api.py_aa_target_power(self.handle, TARGET_POWER_QUERY)
_raise_error_if_negative(ret)
return ret
@target_power.setter
def target_power(self, value):
if value:
power = TARGET_POWER_BOTH
else:
power = TARGET_POWER_NONE
ret = api.py_aa_target_power(self.handle, power)
_raise_error_if_negative(ret)
@property
def i2c_bus_timeout(self):
"""I2C bus lock timeout in ms.
Minimum value is 10 ms and the maximum value is 450 ms. Not every value
can be set and will be rounded to the next possible number. You can
read back the property to get the actual value.
The power-on default value is 200 ms.
"""
ret = api.py_aa_i2c_bus_timeout(self.handle, 0)
_raise_error_if_negative(ret)
return ret
@i2c_bus_timeout.setter
def i2c_bus_timeout(self, timeout):
ret = api.py_aa_i2c_bus_timeout(self.handle, timeout)
_raise_error_if_negative(ret)
def i2c_master_write(self, i2c_address, data, flags=I2C_NO_FLAGS):
"""Make an I2C write access.
The given I2C device is addressed and data given as a string is
written. The transaction is finished with an I2C stop condition unless
I2C_NO_STOP is set in the flags.
10 bit addresses are supported if the I2C_10_BIT_ADDR flag is set.
"""
data = array.array('B', data)
status, _ = api.py_aa_i2c_write_ext(self.handle, i2c_address, flags,
len(data), data)
_raise_i2c_status_code_error_if_failure(status)
def i2c_master_read(self, addr, length, flags=I2C_NO_FLAGS):
"""Make an I2C read access.
The given I2C device is addressed and clock cycles for `length` bytes
are generated. A short read will occur if the device generates an early
NAK.
The transaction is finished with an I2C stop condition unless the
I2C_NO_STOP flag is set.
"""
data = array.array('B', (0,) * length)
status, rx_len = api.py_aa_i2c_read_ext(self.handle, addr, flags,
length, data)
_raise_i2c_status_code_error_if_failure(status)
del data[rx_len:]
return bytes(data)
def i2c_master_write_read(self, i2c_address, data, length):
"""Make an I2C write/read access.
First an I2C write access is issued. No stop condition will be
generated. Instead the read access begins with a repeated start.
This method is useful for accessing most addressable I2C devices like
EEPROMs, port expander, etc.
Basically, this is just a convenient function which internally uses
`i2c_master_write` and `i2c_master_read`.
"""
self.i2c_master_write(i2c_address, data, I2C_NO_STOP)
return self.i2c_master_read(i2c_address, length)
def poll(self, timeout=None):
"""Wait for an event to occur.
If `timeout` is given, if specifies the length of time in milliseconds
which the function will wait for events before returing. If `timeout`
is omitted, negative or None, the call will block until there is an
event.
Returns a list of events. In case no event is pending, an empty list is
returned.
"""
if timeout is None:
timeout = -1
ret = api.py_aa_async_poll(self.handle, timeout)
_raise_error_if_negative(ret)
events = list()
for event in (POLL_I2C_READ, POLL_I2C_WRITE, POLL_SPI,
POLL_I2C_MONITOR):
if ret & event:
events.append(event)
return events
def enable_i2c_slave(self, slave_address):
"""Enable I2C slave mode.
The device will respond to the specified slave_address if it is
addressed.
You can wait for the data with :func:`poll` and get it with
`i2c_slave_read`.
"""
ret = api.py_aa_i2c_slave_enable(self.handle, slave_address,
self.BUFFER_SIZE, self.BUFFER_SIZE)
_raise_error_if_negative(ret)
def disable_i2c_slave(self):
"""Disable I2C slave mode."""
ret = api.py_aa_i2c_slave_disable(self.handle)
_raise_error_if_negative(ret)
def i2c_slave_read(self):
"""Read the bytes from an I2C slave reception.
The bytes are returned as a string object.
"""
data = array.array('B', (0,) * self.BUFFER_SIZE)
status, addr, rx_len = api.py_aa_i2c_slave_read_ext(self.handle,
self.BUFFER_SIZE, data)
_raise_i2c_status_code_error_if_failure(status)
# In case of general call, actually return the general call address
if addr == 0x80:
addr = 0x00
del data[rx_len:]
return (addr, bytes(data))
@property
def i2c_slave_response(self):
"""Response to next read command.
An array of bytes that will be transmitted to the I2C master with the
next read operation.
Warning: Due to the fact that the Aardvark API does not provide a means
to read out this value, it is buffered when setting the property.
Reading the property therefore might not return what is actually stored
in the device.
"""
return self._i2c_slave_response
@i2c_slave_response.setter
def i2c_slave_response(self, data):
data = array.array('B', data)
ret = api.py_aa_i2c_slave_set_response(self.handle, len(data), data)
_raise_error_if_negative(ret)
self._i2c_slave_response = data
@property
def i2c_slave_last_transmit_size(self):
"""Returns the number of bytes transmitted by the slave."""
ret = api.py_aa_i2c_slave_write_stats(self.handle)
_raise_error_if_negative(ret)
return ret
def enable_i2c_monitor(self):
"""Activate the I2C monitor.
Enabling the monitor will disable all other functions of the adapter.
Raises an :exc:`IOError` if the hardware adapter does not support
monitor mode.
"""
ret = api.py_aa_i2c_monitor_enable(self.handle)
_raise_error_if_negative(ret)
def disable_i2c_monitor(self):
"""Disable the I2C monitor.
Raises an :exc:`IOError` if the hardware adapter does not support
monitor mode.
"""
ret = api.py_aa_i2c_monitor_disable(self.handle)
_raise_error_if_negative(ret)
def i2c_monitor_read(self):
"""Retrieved any data fetched by the monitor.
This function has an integrated timeout mechanism. You should use
:func:`poll` to determine if there is any data available.
Returns a list of data bytes and special symbols. There are three
special symbols: `I2C_MONITOR_NACK`, I2C_MONITOR_START and
I2C_MONITOR_STOP.
"""
data = array.array('H', (0,) * self.BUFFER_SIZE)
ret = api.py_aa_i2c_monitor_read(self.handle, self.BUFFER_SIZE,
data)
_raise_error_if_negative(ret)
del data[ret:]
return data.tolist()
@property
def spi_bitrate(self):
"""SPI bitrate in kHz. Not every bitrate is supported by the host
adapter. Therefore, the actual bitrate may be less than the value which
is set. The slowest bitrate supported is 125kHz. Any smaller value will
be rounded up to 125kHz.
The power-on default value is 1000 kHz.
"""
ret = api.py_aa_spi_bitrate(self.handle, 0)
_raise_error_if_negative(ret)
return ret
@spi_bitrate.setter
def spi_bitrate(self, value):
ret = api.py_aa_spi_bitrate(self.handle, value)
_raise_error_if_negative(ret)
def spi_configure(self, polarity, phase, bitorder):
"""Configure the SPI interface."""
ret = api.py_aa_spi_configure(self.handle, polarity, phase, bitorder)
_raise_error_if_negative(ret)
def spi_configure_mode(self, spi_mode):
"""Configure the SPI interface by the well known SPI modes."""
if spi_mode == SPI_MODE_0:
self.spi_configure(SPI_POL_RISING_FALLING,
SPI_PHASE_SAMPLE_SETUP, SPI_BITORDER_MSB)
elif spi_mode == SPI_MODE_3:
self.spi_configure(SPI_POL_FALLING_RISING,
SPI_PHASE_SETUP_SAMPLE, SPI_BITORDER_MSB)
else:
raise RuntimeError('SPI Mode not supported')
def spi_write(self, data):
"""Write a stream of bytes to a SPI device."""
data_out = array.array('B', data)
data_in = array.array('B', (0,) * len(data_out))
ret = api.py_aa_spi_write(self.handle, len(data_out), data_out,
len(data_in), data_in)
_raise_error_if_negative(ret)
return bytes(data_in)
def spi_ss_polarity(self, polarity):
"""Change the ouput polarity on the SS line.
Please note, that this only affects the master functions.
"""
ret = api.py_aa_spi_master_ss_polarity(self.handle, polarity)
_raise_error_if_negative(ret)
|
kontron/python-aardvark | pyaardvark/aardvark.py | Aardvark.target_power | python | def target_power(self):
ret = api.py_aa_target_power(self.handle, TARGET_POWER_QUERY)
_raise_error_if_negative(ret)
return ret | Setting this to `True` will activate the power pins (4 and 6). If
set to `False` the power will be deactivated.
Raises an :exc:`IOError` if the hardware adapter does not support
the switchable power pins. | train | https://github.com/kontron/python-aardvark/blob/9827f669fbdc5bceb98e7d08a294b4e4e455d0d5/pyaardvark/aardvark.py#L365-L374 | null | class Aardvark(object):
"""Represents an Aardvark device."""
BUFFER_SIZE = 65535
def __init__(self, port=0):
ret, ver = api.py_aa_open_ext(port)
_raise_error_if_negative(ret)
#: A handle which is used as the first paramter for all calls to the
#: underlying API.
self.handle = ret
# assign some useful names
version = dict(
software = ver[0],
firmware = ver[1],
hardware = ver[2],
sw_req_by_fw = ver[3],
fw_req_by_sw = ver[4],
api_req_by_sw = ver[5],
)
#: Hardware revision of the host adapter as a string. The format is
#: ``M.NN`` where `M` is the major number and `NN` the zero padded
#: minor number.
self.hardware_revision = _to_version_str(version['hardware'])
#: Firmware version of the host adapter as a string. See
#: :attr:`hardware_revision` for more information on the format.
self.firmware_version = _to_version_str(version['firmware'])
#: Version of underlying C module (aardvark.so, aardvark.pyd) as a
#: string. See :attr:`hardware_revision` for more information on the
#: format.
self.api_version = _to_version_str(version['software'])
# version checks
if version['firmware'] < version['fw_req_by_sw']:
log.debug('The API requires a firmware version >= %s, but the '
'device has version %s',
_to_version_str(version['fw_req_by_sw']),
_to_version_str(version['firmware']))
ret = ERR_INCOMPATIBLE_DEVICE
elif version['software'] < version['sw_req_by_fw']:
log.debug('The firmware requires an API version >= %s, but the '
'API has version %s',
_to_version_str(version['sw_req_by_fw']),
_to_version_str(version['software']))
ret = ERR_INCOMPATIBLE_LIBRARY
_raise_error_if_negative(ret)
# Initialize shadow variables
self._i2c_slave_response = None
def __enter__(self):
return self
def __exit__(self, type, value, tb):
self.close()
return False
def close(self):
"""Close the device."""
api.py_aa_close(self.handle)
self.handle = None
def unique_id(self):
"""Return the unique identifier of the device. The identifier is the
serial number you can find on the adapter without the dash. Eg. the
serial number 0012-345678 would be 12345678.
"""
return api.py_aa_unique_id(self.handle)
def unique_id_str(self):
"""Return the unique identifier. But unlike :func:`unique_id`, the ID
is returned as a string which has the format NNNN-MMMMMMM.
"""
return _unique_id_str(self.unique_id())
def _interface_configuration(self, value):
ret = api.py_aa_configure(self.handle, value)
_raise_error_if_negative(ret)
return ret
@property
def enable_i2c(self):
"""Set this to `True` to enable the hardware I2C interface. If set to
`False` the hardware interface will be disabled and its pins (SDA and
SCL) can be used as GPIOs.
"""
config = self._interface_configuration(CONFIG_QUERY)
return config == CONFIG_GPIO_I2C or config == CONFIG_SPI_I2C
@enable_i2c.setter
def enable_i2c(self, value):
new_config = config = self._interface_configuration(CONFIG_QUERY)
if value and config == CONFIG_GPIO_ONLY:
new_config = CONFIG_GPIO_I2C
elif value and config == CONFIG_SPI_GPIO:
new_config = CONFIG_SPI_I2C
elif not value and config == CONFIG_GPIO_I2C:
new_config = CONFIG_GPIO_ONLY
elif not value and config == CONFIG_SPI_I2C:
new_config = CONFIG_SPI_GPIO
if new_config != config:
self._interface_configuration(new_config)
@property
def enable_spi(self):
"""Set this to `True` to enable the hardware SPI interface. If set to
`False` the hardware interface will be disabled and its pins (MISO,
MOSI, SCK and SS) can be used as GPIOs.
"""
config = self._interface_configuration(CONFIG_QUERY)
return config == CONFIG_SPI_GPIO or config == CONFIG_SPI_I2C
@enable_spi.setter
def enable_spi(self, value):
new_config = config = self._interface_configuration(CONFIG_QUERY)
if value and config == CONFIG_GPIO_ONLY:
new_config = CONFIG_SPI_GPIO
elif value and config == CONFIG_GPIO_I2C:
new_config = CONFIG_SPI_I2C
elif not value and config == CONFIG_SPI_GPIO:
new_config = CONFIG_GPIO_ONLY
elif not value and config == CONFIG_SPI_I2C:
new_config = CONFIG_GPIO_I2C
if new_config != config:
self._interface_configuration(new_config)
@property
def i2c_bitrate(self):
"""I2C bitrate in kHz. Not every bitrate is supported by the host
adapter. Therefore, the actual bitrate may be less than the value which
is set.
The power-on default value is 100 kHz.
"""
ret = api.py_aa_i2c_bitrate(self.handle, 0)
_raise_error_if_negative(ret)
return ret
@i2c_bitrate.setter
def i2c_bitrate(self, value):
ret = api.py_aa_i2c_bitrate(self.handle, value)
_raise_error_if_negative(ret)
@property
def i2c_pullups(self):
"""Setting this to `True` will enable the I2C pullup resistors. If set
to `False` the pullup resistors will be disabled.
Raises an :exc:`IOError` if the hardware adapter does not support
pullup resistors.
"""
ret = api.py_aa_i2c_pullup(self.handle, I2C_PULLUP_QUERY)
_raise_error_if_negative(ret)
return ret
@i2c_pullups.setter
def i2c_pullups(self, value):
if value:
pullup = I2C_PULLUP_BOTH
else:
pullup = I2C_PULLUP_NONE
ret = api.py_aa_i2c_pullup(self.handle, pullup)
_raise_error_if_negative(ret)
@property
@target_power.setter
def target_power(self, value):
if value:
power = TARGET_POWER_BOTH
else:
power = TARGET_POWER_NONE
ret = api.py_aa_target_power(self.handle, power)
_raise_error_if_negative(ret)
@property
def i2c_bus_timeout(self):
"""I2C bus lock timeout in ms.
Minimum value is 10 ms and the maximum value is 450 ms. Not every value
can be set and will be rounded to the next possible number. You can
read back the property to get the actual value.
The power-on default value is 200 ms.
"""
ret = api.py_aa_i2c_bus_timeout(self.handle, 0)
_raise_error_if_negative(ret)
return ret
@i2c_bus_timeout.setter
def i2c_bus_timeout(self, timeout):
ret = api.py_aa_i2c_bus_timeout(self.handle, timeout)
_raise_error_if_negative(ret)
def i2c_master_write(self, i2c_address, data, flags=I2C_NO_FLAGS):
"""Make an I2C write access.
The given I2C device is addressed and data given as a string is
written. The transaction is finished with an I2C stop condition unless
I2C_NO_STOP is set in the flags.
10 bit addresses are supported if the I2C_10_BIT_ADDR flag is set.
"""
data = array.array('B', data)
status, _ = api.py_aa_i2c_write_ext(self.handle, i2c_address, flags,
len(data), data)
_raise_i2c_status_code_error_if_failure(status)
def i2c_master_read(self, addr, length, flags=I2C_NO_FLAGS):
"""Make an I2C read access.
The given I2C device is addressed and clock cycles for `length` bytes
are generated. A short read will occur if the device generates an early
NAK.
The transaction is finished with an I2C stop condition unless the
I2C_NO_STOP flag is set.
"""
data = array.array('B', (0,) * length)
status, rx_len = api.py_aa_i2c_read_ext(self.handle, addr, flags,
length, data)
_raise_i2c_status_code_error_if_failure(status)
del data[rx_len:]
return bytes(data)
def i2c_master_write_read(self, i2c_address, data, length):
"""Make an I2C write/read access.
First an I2C write access is issued. No stop condition will be
generated. Instead the read access begins with a repeated start.
This method is useful for accessing most addressable I2C devices like
EEPROMs, port expander, etc.
Basically, this is just a convenient function which internally uses
`i2c_master_write` and `i2c_master_read`.
"""
self.i2c_master_write(i2c_address, data, I2C_NO_STOP)
return self.i2c_master_read(i2c_address, length)
def poll(self, timeout=None):
"""Wait for an event to occur.
If `timeout` is given, if specifies the length of time in milliseconds
which the function will wait for events before returing. If `timeout`
is omitted, negative or None, the call will block until there is an
event.
Returns a list of events. In case no event is pending, an empty list is
returned.
"""
if timeout is None:
timeout = -1
ret = api.py_aa_async_poll(self.handle, timeout)
_raise_error_if_negative(ret)
events = list()
for event in (POLL_I2C_READ, POLL_I2C_WRITE, POLL_SPI,
POLL_I2C_MONITOR):
if ret & event:
events.append(event)
return events
def enable_i2c_slave(self, slave_address):
"""Enable I2C slave mode.
The device will respond to the specified slave_address if it is
addressed.
You can wait for the data with :func:`poll` and get it with
`i2c_slave_read`.
"""
ret = api.py_aa_i2c_slave_enable(self.handle, slave_address,
self.BUFFER_SIZE, self.BUFFER_SIZE)
_raise_error_if_negative(ret)
def disable_i2c_slave(self):
"""Disable I2C slave mode."""
ret = api.py_aa_i2c_slave_disable(self.handle)
_raise_error_if_negative(ret)
def i2c_slave_read(self):
"""Read the bytes from an I2C slave reception.
The bytes are returned as a string object.
"""
data = array.array('B', (0,) * self.BUFFER_SIZE)
status, addr, rx_len = api.py_aa_i2c_slave_read_ext(self.handle,
self.BUFFER_SIZE, data)
_raise_i2c_status_code_error_if_failure(status)
# In case of general call, actually return the general call address
if addr == 0x80:
addr = 0x00
del data[rx_len:]
return (addr, bytes(data))
@property
def i2c_slave_response(self):
"""Response to next read command.
An array of bytes that will be transmitted to the I2C master with the
next read operation.
Warning: Due to the fact that the Aardvark API does not provide a means
to read out this value, it is buffered when setting the property.
Reading the property therefore might not return what is actually stored
in the device.
"""
return self._i2c_slave_response
@i2c_slave_response.setter
def i2c_slave_response(self, data):
data = array.array('B', data)
ret = api.py_aa_i2c_slave_set_response(self.handle, len(data), data)
_raise_error_if_negative(ret)
self._i2c_slave_response = data
@property
def i2c_slave_last_transmit_size(self):
"""Returns the number of bytes transmitted by the slave."""
ret = api.py_aa_i2c_slave_write_stats(self.handle)
_raise_error_if_negative(ret)
return ret
def enable_i2c_monitor(self):
"""Activate the I2C monitor.
Enabling the monitor will disable all other functions of the adapter.
Raises an :exc:`IOError` if the hardware adapter does not support
monitor mode.
"""
ret = api.py_aa_i2c_monitor_enable(self.handle)
_raise_error_if_negative(ret)
def disable_i2c_monitor(self):
"""Disable the I2C monitor.
Raises an :exc:`IOError` if the hardware adapter does not support
monitor mode.
"""
ret = api.py_aa_i2c_monitor_disable(self.handle)
_raise_error_if_negative(ret)
def i2c_monitor_read(self):
"""Retrieved any data fetched by the monitor.
This function has an integrated timeout mechanism. You should use
:func:`poll` to determine if there is any data available.
Returns a list of data bytes and special symbols. There are three
special symbols: `I2C_MONITOR_NACK`, I2C_MONITOR_START and
I2C_MONITOR_STOP.
"""
data = array.array('H', (0,) * self.BUFFER_SIZE)
ret = api.py_aa_i2c_monitor_read(self.handle, self.BUFFER_SIZE,
data)
_raise_error_if_negative(ret)
del data[ret:]
return data.tolist()
@property
def spi_bitrate(self):
"""SPI bitrate in kHz. Not every bitrate is supported by the host
adapter. Therefore, the actual bitrate may be less than the value which
is set. The slowest bitrate supported is 125kHz. Any smaller value will
be rounded up to 125kHz.
The power-on default value is 1000 kHz.
"""
ret = api.py_aa_spi_bitrate(self.handle, 0)
_raise_error_if_negative(ret)
return ret
@spi_bitrate.setter
def spi_bitrate(self, value):
ret = api.py_aa_spi_bitrate(self.handle, value)
_raise_error_if_negative(ret)
def spi_configure(self, polarity, phase, bitorder):
"""Configure the SPI interface."""
ret = api.py_aa_spi_configure(self.handle, polarity, phase, bitorder)
_raise_error_if_negative(ret)
def spi_configure_mode(self, spi_mode):
"""Configure the SPI interface by the well known SPI modes."""
if spi_mode == SPI_MODE_0:
self.spi_configure(SPI_POL_RISING_FALLING,
SPI_PHASE_SAMPLE_SETUP, SPI_BITORDER_MSB)
elif spi_mode == SPI_MODE_3:
self.spi_configure(SPI_POL_FALLING_RISING,
SPI_PHASE_SETUP_SAMPLE, SPI_BITORDER_MSB)
else:
raise RuntimeError('SPI Mode not supported')
def spi_write(self, data):
"""Write a stream of bytes to a SPI device."""
data_out = array.array('B', data)
data_in = array.array('B', (0,) * len(data_out))
ret = api.py_aa_spi_write(self.handle, len(data_out), data_out,
len(data_in), data_in)
_raise_error_if_negative(ret)
return bytes(data_in)
def spi_ss_polarity(self, polarity):
"""Change the ouput polarity on the SS line.
Please note, that this only affects the master functions.
"""
ret = api.py_aa_spi_master_ss_polarity(self.handle, polarity)
_raise_error_if_negative(ret)
|
kontron/python-aardvark | pyaardvark/aardvark.py | Aardvark.i2c_bus_timeout | python | def i2c_bus_timeout(self):
ret = api.py_aa_i2c_bus_timeout(self.handle, 0)
_raise_error_if_negative(ret)
return ret | I2C bus lock timeout in ms.
Minimum value is 10 ms and the maximum value is 450 ms. Not every value
can be set and will be rounded to the next possible number. You can
read back the property to get the actual value.
The power-on default value is 200 ms. | train | https://github.com/kontron/python-aardvark/blob/9827f669fbdc5bceb98e7d08a294b4e4e455d0d5/pyaardvark/aardvark.py#L386-L397 | null | class Aardvark(object):
"""Represents an Aardvark device."""
BUFFER_SIZE = 65535
def __init__(self, port=0):
ret, ver = api.py_aa_open_ext(port)
_raise_error_if_negative(ret)
#: A handle which is used as the first paramter for all calls to the
#: underlying API.
self.handle = ret
# assign some useful names
version = dict(
software = ver[0],
firmware = ver[1],
hardware = ver[2],
sw_req_by_fw = ver[3],
fw_req_by_sw = ver[4],
api_req_by_sw = ver[5],
)
#: Hardware revision of the host adapter as a string. The format is
#: ``M.NN`` where `M` is the major number and `NN` the zero padded
#: minor number.
self.hardware_revision = _to_version_str(version['hardware'])
#: Firmware version of the host adapter as a string. See
#: :attr:`hardware_revision` for more information on the format.
self.firmware_version = _to_version_str(version['firmware'])
#: Version of underlying C module (aardvark.so, aardvark.pyd) as a
#: string. See :attr:`hardware_revision` for more information on the
#: format.
self.api_version = _to_version_str(version['software'])
# version checks
if version['firmware'] < version['fw_req_by_sw']:
log.debug('The API requires a firmware version >= %s, but the '
'device has version %s',
_to_version_str(version['fw_req_by_sw']),
_to_version_str(version['firmware']))
ret = ERR_INCOMPATIBLE_DEVICE
elif version['software'] < version['sw_req_by_fw']:
log.debug('The firmware requires an API version >= %s, but the '
'API has version %s',
_to_version_str(version['sw_req_by_fw']),
_to_version_str(version['software']))
ret = ERR_INCOMPATIBLE_LIBRARY
_raise_error_if_negative(ret)
# Initialize shadow variables
self._i2c_slave_response = None
def __enter__(self):
return self
def __exit__(self, type, value, tb):
self.close()
return False
def close(self):
"""Close the device."""
api.py_aa_close(self.handle)
self.handle = None
def unique_id(self):
"""Return the unique identifier of the device. The identifier is the
serial number you can find on the adapter without the dash. Eg. the
serial number 0012-345678 would be 12345678.
"""
return api.py_aa_unique_id(self.handle)
def unique_id_str(self):
"""Return the unique identifier. But unlike :func:`unique_id`, the ID
is returned as a string which has the format NNNN-MMMMMMM.
"""
return _unique_id_str(self.unique_id())
def _interface_configuration(self, value):
ret = api.py_aa_configure(self.handle, value)
_raise_error_if_negative(ret)
return ret
@property
def enable_i2c(self):
"""Set this to `True` to enable the hardware I2C interface. If set to
`False` the hardware interface will be disabled and its pins (SDA and
SCL) can be used as GPIOs.
"""
config = self._interface_configuration(CONFIG_QUERY)
return config == CONFIG_GPIO_I2C or config == CONFIG_SPI_I2C
@enable_i2c.setter
def enable_i2c(self, value):
new_config = config = self._interface_configuration(CONFIG_QUERY)
if value and config == CONFIG_GPIO_ONLY:
new_config = CONFIG_GPIO_I2C
elif value and config == CONFIG_SPI_GPIO:
new_config = CONFIG_SPI_I2C
elif not value and config == CONFIG_GPIO_I2C:
new_config = CONFIG_GPIO_ONLY
elif not value and config == CONFIG_SPI_I2C:
new_config = CONFIG_SPI_GPIO
if new_config != config:
self._interface_configuration(new_config)
@property
def enable_spi(self):
"""Set this to `True` to enable the hardware SPI interface. If set to
`False` the hardware interface will be disabled and its pins (MISO,
MOSI, SCK and SS) can be used as GPIOs.
"""
config = self._interface_configuration(CONFIG_QUERY)
return config == CONFIG_SPI_GPIO or config == CONFIG_SPI_I2C
@enable_spi.setter
def enable_spi(self, value):
new_config = config = self._interface_configuration(CONFIG_QUERY)
if value and config == CONFIG_GPIO_ONLY:
new_config = CONFIG_SPI_GPIO
elif value and config == CONFIG_GPIO_I2C:
new_config = CONFIG_SPI_I2C
elif not value and config == CONFIG_SPI_GPIO:
new_config = CONFIG_GPIO_ONLY
elif not value and config == CONFIG_SPI_I2C:
new_config = CONFIG_GPIO_I2C
if new_config != config:
self._interface_configuration(new_config)
@property
def i2c_bitrate(self):
"""I2C bitrate in kHz. Not every bitrate is supported by the host
adapter. Therefore, the actual bitrate may be less than the value which
is set.
The power-on default value is 100 kHz.
"""
ret = api.py_aa_i2c_bitrate(self.handle, 0)
_raise_error_if_negative(ret)
return ret
@i2c_bitrate.setter
def i2c_bitrate(self, value):
ret = api.py_aa_i2c_bitrate(self.handle, value)
_raise_error_if_negative(ret)
@property
def i2c_pullups(self):
"""Setting this to `True` will enable the I2C pullup resistors. If set
to `False` the pullup resistors will be disabled.
Raises an :exc:`IOError` if the hardware adapter does not support
pullup resistors.
"""
ret = api.py_aa_i2c_pullup(self.handle, I2C_PULLUP_QUERY)
_raise_error_if_negative(ret)
return ret
@i2c_pullups.setter
def i2c_pullups(self, value):
if value:
pullup = I2C_PULLUP_BOTH
else:
pullup = I2C_PULLUP_NONE
ret = api.py_aa_i2c_pullup(self.handle, pullup)
_raise_error_if_negative(ret)
@property
def target_power(self):
"""Setting this to `True` will activate the power pins (4 and 6). If
set to `False` the power will be deactivated.
Raises an :exc:`IOError` if the hardware adapter does not support
the switchable power pins.
"""
ret = api.py_aa_target_power(self.handle, TARGET_POWER_QUERY)
_raise_error_if_negative(ret)
return ret
@target_power.setter
def target_power(self, value):
if value:
power = TARGET_POWER_BOTH
else:
power = TARGET_POWER_NONE
ret = api.py_aa_target_power(self.handle, power)
_raise_error_if_negative(ret)
@property
@i2c_bus_timeout.setter
def i2c_bus_timeout(self, timeout):
ret = api.py_aa_i2c_bus_timeout(self.handle, timeout)
_raise_error_if_negative(ret)
def i2c_master_write(self, i2c_address, data, flags=I2C_NO_FLAGS):
"""Make an I2C write access.
The given I2C device is addressed and data given as a string is
written. The transaction is finished with an I2C stop condition unless
I2C_NO_STOP is set in the flags.
10 bit addresses are supported if the I2C_10_BIT_ADDR flag is set.
"""
data = array.array('B', data)
status, _ = api.py_aa_i2c_write_ext(self.handle, i2c_address, flags,
len(data), data)
_raise_i2c_status_code_error_if_failure(status)
def i2c_master_read(self, addr, length, flags=I2C_NO_FLAGS):
"""Make an I2C read access.
The given I2C device is addressed and clock cycles for `length` bytes
are generated. A short read will occur if the device generates an early
NAK.
The transaction is finished with an I2C stop condition unless the
I2C_NO_STOP flag is set.
"""
data = array.array('B', (0,) * length)
status, rx_len = api.py_aa_i2c_read_ext(self.handle, addr, flags,
length, data)
_raise_i2c_status_code_error_if_failure(status)
del data[rx_len:]
return bytes(data)
def i2c_master_write_read(self, i2c_address, data, length):
"""Make an I2C write/read access.
First an I2C write access is issued. No stop condition will be
generated. Instead the read access begins with a repeated start.
This method is useful for accessing most addressable I2C devices like
EEPROMs, port expander, etc.
Basically, this is just a convenient function which internally uses
`i2c_master_write` and `i2c_master_read`.
"""
self.i2c_master_write(i2c_address, data, I2C_NO_STOP)
return self.i2c_master_read(i2c_address, length)
def poll(self, timeout=None):
"""Wait for an event to occur.
If `timeout` is given, if specifies the length of time in milliseconds
which the function will wait for events before returing. If `timeout`
is omitted, negative or None, the call will block until there is an
event.
Returns a list of events. In case no event is pending, an empty list is
returned.
"""
if timeout is None:
timeout = -1
ret = api.py_aa_async_poll(self.handle, timeout)
_raise_error_if_negative(ret)
events = list()
for event in (POLL_I2C_READ, POLL_I2C_WRITE, POLL_SPI,
POLL_I2C_MONITOR):
if ret & event:
events.append(event)
return events
def enable_i2c_slave(self, slave_address):
"""Enable I2C slave mode.
The device will respond to the specified slave_address if it is
addressed.
You can wait for the data with :func:`poll` and get it with
`i2c_slave_read`.
"""
ret = api.py_aa_i2c_slave_enable(self.handle, slave_address,
self.BUFFER_SIZE, self.BUFFER_SIZE)
_raise_error_if_negative(ret)
def disable_i2c_slave(self):
"""Disable I2C slave mode."""
ret = api.py_aa_i2c_slave_disable(self.handle)
_raise_error_if_negative(ret)
def i2c_slave_read(self):
"""Read the bytes from an I2C slave reception.
The bytes are returned as a string object.
"""
data = array.array('B', (0,) * self.BUFFER_SIZE)
status, addr, rx_len = api.py_aa_i2c_slave_read_ext(self.handle,
self.BUFFER_SIZE, data)
_raise_i2c_status_code_error_if_failure(status)
# In case of general call, actually return the general call address
if addr == 0x80:
addr = 0x00
del data[rx_len:]
return (addr, bytes(data))
@property
def i2c_slave_response(self):
"""Response to next read command.
An array of bytes that will be transmitted to the I2C master with the
next read operation.
Warning: Due to the fact that the Aardvark API does not provide a means
to read out this value, it is buffered when setting the property.
Reading the property therefore might not return what is actually stored
in the device.
"""
return self._i2c_slave_response
@i2c_slave_response.setter
def i2c_slave_response(self, data):
data = array.array('B', data)
ret = api.py_aa_i2c_slave_set_response(self.handle, len(data), data)
_raise_error_if_negative(ret)
self._i2c_slave_response = data
@property
def i2c_slave_last_transmit_size(self):
"""Returns the number of bytes transmitted by the slave."""
ret = api.py_aa_i2c_slave_write_stats(self.handle)
_raise_error_if_negative(ret)
return ret
def enable_i2c_monitor(self):
"""Activate the I2C monitor.
Enabling the monitor will disable all other functions of the adapter.
Raises an :exc:`IOError` if the hardware adapter does not support
monitor mode.
"""
ret = api.py_aa_i2c_monitor_enable(self.handle)
_raise_error_if_negative(ret)
def disable_i2c_monitor(self):
"""Disable the I2C monitor.
Raises an :exc:`IOError` if the hardware adapter does not support
monitor mode.
"""
ret = api.py_aa_i2c_monitor_disable(self.handle)
_raise_error_if_negative(ret)
def i2c_monitor_read(self):
"""Retrieved any data fetched by the monitor.
This function has an integrated timeout mechanism. You should use
:func:`poll` to determine if there is any data available.
Returns a list of data bytes and special symbols. There are three
special symbols: `I2C_MONITOR_NACK`, I2C_MONITOR_START and
I2C_MONITOR_STOP.
"""
data = array.array('H', (0,) * self.BUFFER_SIZE)
ret = api.py_aa_i2c_monitor_read(self.handle, self.BUFFER_SIZE,
data)
_raise_error_if_negative(ret)
del data[ret:]
return data.tolist()
@property
def spi_bitrate(self):
"""SPI bitrate in kHz. Not every bitrate is supported by the host
adapter. Therefore, the actual bitrate may be less than the value which
is set. The slowest bitrate supported is 125kHz. Any smaller value will
be rounded up to 125kHz.
The power-on default value is 1000 kHz.
"""
ret = api.py_aa_spi_bitrate(self.handle, 0)
_raise_error_if_negative(ret)
return ret
@spi_bitrate.setter
def spi_bitrate(self, value):
ret = api.py_aa_spi_bitrate(self.handle, value)
_raise_error_if_negative(ret)
def spi_configure(self, polarity, phase, bitorder):
"""Configure the SPI interface."""
ret = api.py_aa_spi_configure(self.handle, polarity, phase, bitorder)
_raise_error_if_negative(ret)
def spi_configure_mode(self, spi_mode):
"""Configure the SPI interface by the well known SPI modes."""
if spi_mode == SPI_MODE_0:
self.spi_configure(SPI_POL_RISING_FALLING,
SPI_PHASE_SAMPLE_SETUP, SPI_BITORDER_MSB)
elif spi_mode == SPI_MODE_3:
self.spi_configure(SPI_POL_FALLING_RISING,
SPI_PHASE_SETUP_SAMPLE, SPI_BITORDER_MSB)
else:
raise RuntimeError('SPI Mode not supported')
def spi_write(self, data):
"""Write a stream of bytes to a SPI device."""
data_out = array.array('B', data)
data_in = array.array('B', (0,) * len(data_out))
ret = api.py_aa_spi_write(self.handle, len(data_out), data_out,
len(data_in), data_in)
_raise_error_if_negative(ret)
return bytes(data_in)
def spi_ss_polarity(self, polarity):
"""Change the ouput polarity on the SS line.
Please note, that this only affects the master functions.
"""
ret = api.py_aa_spi_master_ss_polarity(self.handle, polarity)
_raise_error_if_negative(ret)
|
kontron/python-aardvark | pyaardvark/aardvark.py | Aardvark.i2c_master_write | python | def i2c_master_write(self, i2c_address, data, flags=I2C_NO_FLAGS):
data = array.array('B', data)
status, _ = api.py_aa_i2c_write_ext(self.handle, i2c_address, flags,
len(data), data)
_raise_i2c_status_code_error_if_failure(status) | Make an I2C write access.
The given I2C device is addressed and data given as a string is
written. The transaction is finished with an I2C stop condition unless
I2C_NO_STOP is set in the flags.
10 bit addresses are supported if the I2C_10_BIT_ADDR flag is set. | train | https://github.com/kontron/python-aardvark/blob/9827f669fbdc5bceb98e7d08a294b4e4e455d0d5/pyaardvark/aardvark.py#L404-L417 | null | class Aardvark(object):
"""Represents an Aardvark device."""
BUFFER_SIZE = 65535
def __init__(self, port=0):
ret, ver = api.py_aa_open_ext(port)
_raise_error_if_negative(ret)
#: A handle which is used as the first paramter for all calls to the
#: underlying API.
self.handle = ret
# assign some useful names
version = dict(
software = ver[0],
firmware = ver[1],
hardware = ver[2],
sw_req_by_fw = ver[3],
fw_req_by_sw = ver[4],
api_req_by_sw = ver[5],
)
#: Hardware revision of the host adapter as a string. The format is
#: ``M.NN`` where `M` is the major number and `NN` the zero padded
#: minor number.
self.hardware_revision = _to_version_str(version['hardware'])
#: Firmware version of the host adapter as a string. See
#: :attr:`hardware_revision` for more information on the format.
self.firmware_version = _to_version_str(version['firmware'])
#: Version of underlying C module (aardvark.so, aardvark.pyd) as a
#: string. See :attr:`hardware_revision` for more information on the
#: format.
self.api_version = _to_version_str(version['software'])
# version checks
if version['firmware'] < version['fw_req_by_sw']:
log.debug('The API requires a firmware version >= %s, but the '
'device has version %s',
_to_version_str(version['fw_req_by_sw']),
_to_version_str(version['firmware']))
ret = ERR_INCOMPATIBLE_DEVICE
elif version['software'] < version['sw_req_by_fw']:
log.debug('The firmware requires an API version >= %s, but the '
'API has version %s',
_to_version_str(version['sw_req_by_fw']),
_to_version_str(version['software']))
ret = ERR_INCOMPATIBLE_LIBRARY
_raise_error_if_negative(ret)
# Initialize shadow variables
self._i2c_slave_response = None
def __enter__(self):
return self
def __exit__(self, type, value, tb):
self.close()
return False
def close(self):
"""Close the device."""
api.py_aa_close(self.handle)
self.handle = None
def unique_id(self):
"""Return the unique identifier of the device. The identifier is the
serial number you can find on the adapter without the dash. Eg. the
serial number 0012-345678 would be 12345678.
"""
return api.py_aa_unique_id(self.handle)
def unique_id_str(self):
"""Return the unique identifier. But unlike :func:`unique_id`, the ID
is returned as a string which has the format NNNN-MMMMMMM.
"""
return _unique_id_str(self.unique_id())
def _interface_configuration(self, value):
ret = api.py_aa_configure(self.handle, value)
_raise_error_if_negative(ret)
return ret
@property
def enable_i2c(self):
"""Set this to `True` to enable the hardware I2C interface. If set to
`False` the hardware interface will be disabled and its pins (SDA and
SCL) can be used as GPIOs.
"""
config = self._interface_configuration(CONFIG_QUERY)
return config == CONFIG_GPIO_I2C or config == CONFIG_SPI_I2C
@enable_i2c.setter
def enable_i2c(self, value):
new_config = config = self._interface_configuration(CONFIG_QUERY)
if value and config == CONFIG_GPIO_ONLY:
new_config = CONFIG_GPIO_I2C
elif value and config == CONFIG_SPI_GPIO:
new_config = CONFIG_SPI_I2C
elif not value and config == CONFIG_GPIO_I2C:
new_config = CONFIG_GPIO_ONLY
elif not value and config == CONFIG_SPI_I2C:
new_config = CONFIG_SPI_GPIO
if new_config != config:
self._interface_configuration(new_config)
@property
def enable_spi(self):
"""Set this to `True` to enable the hardware SPI interface. If set to
`False` the hardware interface will be disabled and its pins (MISO,
MOSI, SCK and SS) can be used as GPIOs.
"""
config = self._interface_configuration(CONFIG_QUERY)
return config == CONFIG_SPI_GPIO or config == CONFIG_SPI_I2C
@enable_spi.setter
def enable_spi(self, value):
new_config = config = self._interface_configuration(CONFIG_QUERY)
if value and config == CONFIG_GPIO_ONLY:
new_config = CONFIG_SPI_GPIO
elif value and config == CONFIG_GPIO_I2C:
new_config = CONFIG_SPI_I2C
elif not value and config == CONFIG_SPI_GPIO:
new_config = CONFIG_GPIO_ONLY
elif not value and config == CONFIG_SPI_I2C:
new_config = CONFIG_GPIO_I2C
if new_config != config:
self._interface_configuration(new_config)
@property
def i2c_bitrate(self):
"""I2C bitrate in kHz. Not every bitrate is supported by the host
adapter. Therefore, the actual bitrate may be less than the value which
is set.
The power-on default value is 100 kHz.
"""
ret = api.py_aa_i2c_bitrate(self.handle, 0)
_raise_error_if_negative(ret)
return ret
@i2c_bitrate.setter
def i2c_bitrate(self, value):
ret = api.py_aa_i2c_bitrate(self.handle, value)
_raise_error_if_negative(ret)
@property
def i2c_pullups(self):
"""Setting this to `True` will enable the I2C pullup resistors. If set
to `False` the pullup resistors will be disabled.
Raises an :exc:`IOError` if the hardware adapter does not support
pullup resistors.
"""
ret = api.py_aa_i2c_pullup(self.handle, I2C_PULLUP_QUERY)
_raise_error_if_negative(ret)
return ret
@i2c_pullups.setter
def i2c_pullups(self, value):
if value:
pullup = I2C_PULLUP_BOTH
else:
pullup = I2C_PULLUP_NONE
ret = api.py_aa_i2c_pullup(self.handle, pullup)
_raise_error_if_negative(ret)
@property
def target_power(self):
"""Setting this to `True` will activate the power pins (4 and 6). If
set to `False` the power will be deactivated.
Raises an :exc:`IOError` if the hardware adapter does not support
the switchable power pins.
"""
ret = api.py_aa_target_power(self.handle, TARGET_POWER_QUERY)
_raise_error_if_negative(ret)
return ret
@target_power.setter
def target_power(self, value):
if value:
power = TARGET_POWER_BOTH
else:
power = TARGET_POWER_NONE
ret = api.py_aa_target_power(self.handle, power)
_raise_error_if_negative(ret)
@property
def i2c_bus_timeout(self):
"""I2C bus lock timeout in ms.
Minimum value is 10 ms and the maximum value is 450 ms. Not every value
can be set and will be rounded to the next possible number. You can
read back the property to get the actual value.
The power-on default value is 200 ms.
"""
ret = api.py_aa_i2c_bus_timeout(self.handle, 0)
_raise_error_if_negative(ret)
return ret
@i2c_bus_timeout.setter
def i2c_bus_timeout(self, timeout):
ret = api.py_aa_i2c_bus_timeout(self.handle, timeout)
_raise_error_if_negative(ret)
def i2c_master_read(self, addr, length, flags=I2C_NO_FLAGS):
"""Make an I2C read access.
The given I2C device is addressed and clock cycles for `length` bytes
are generated. A short read will occur if the device generates an early
NAK.
The transaction is finished with an I2C stop condition unless the
I2C_NO_STOP flag is set.
"""
data = array.array('B', (0,) * length)
status, rx_len = api.py_aa_i2c_read_ext(self.handle, addr, flags,
length, data)
_raise_i2c_status_code_error_if_failure(status)
del data[rx_len:]
return bytes(data)
def i2c_master_write_read(self, i2c_address, data, length):
"""Make an I2C write/read access.
First an I2C write access is issued. No stop condition will be
generated. Instead the read access begins with a repeated start.
This method is useful for accessing most addressable I2C devices like
EEPROMs, port expander, etc.
Basically, this is just a convenient function which internally uses
`i2c_master_write` and `i2c_master_read`.
"""
self.i2c_master_write(i2c_address, data, I2C_NO_STOP)
return self.i2c_master_read(i2c_address, length)
def poll(self, timeout=None):
"""Wait for an event to occur.
If `timeout` is given, if specifies the length of time in milliseconds
which the function will wait for events before returing. If `timeout`
is omitted, negative or None, the call will block until there is an
event.
Returns a list of events. In case no event is pending, an empty list is
returned.
"""
if timeout is None:
timeout = -1
ret = api.py_aa_async_poll(self.handle, timeout)
_raise_error_if_negative(ret)
events = list()
for event in (POLL_I2C_READ, POLL_I2C_WRITE, POLL_SPI,
POLL_I2C_MONITOR):
if ret & event:
events.append(event)
return events
def enable_i2c_slave(self, slave_address):
"""Enable I2C slave mode.
The device will respond to the specified slave_address if it is
addressed.
You can wait for the data with :func:`poll` and get it with
`i2c_slave_read`.
"""
ret = api.py_aa_i2c_slave_enable(self.handle, slave_address,
self.BUFFER_SIZE, self.BUFFER_SIZE)
_raise_error_if_negative(ret)
def disable_i2c_slave(self):
"""Disable I2C slave mode."""
ret = api.py_aa_i2c_slave_disable(self.handle)
_raise_error_if_negative(ret)
def i2c_slave_read(self):
"""Read the bytes from an I2C slave reception.
The bytes are returned as a string object.
"""
data = array.array('B', (0,) * self.BUFFER_SIZE)
status, addr, rx_len = api.py_aa_i2c_slave_read_ext(self.handle,
self.BUFFER_SIZE, data)
_raise_i2c_status_code_error_if_failure(status)
# In case of general call, actually return the general call address
if addr == 0x80:
addr = 0x00
del data[rx_len:]
return (addr, bytes(data))
@property
def i2c_slave_response(self):
"""Response to next read command.
An array of bytes that will be transmitted to the I2C master with the
next read operation.
Warning: Due to the fact that the Aardvark API does not provide a means
to read out this value, it is buffered when setting the property.
Reading the property therefore might not return what is actually stored
in the device.
"""
return self._i2c_slave_response
@i2c_slave_response.setter
def i2c_slave_response(self, data):
data = array.array('B', data)
ret = api.py_aa_i2c_slave_set_response(self.handle, len(data), data)
_raise_error_if_negative(ret)
self._i2c_slave_response = data
@property
def i2c_slave_last_transmit_size(self):
"""Returns the number of bytes transmitted by the slave."""
ret = api.py_aa_i2c_slave_write_stats(self.handle)
_raise_error_if_negative(ret)
return ret
def enable_i2c_monitor(self):
"""Activate the I2C monitor.
Enabling the monitor will disable all other functions of the adapter.
Raises an :exc:`IOError` if the hardware adapter does not support
monitor mode.
"""
ret = api.py_aa_i2c_monitor_enable(self.handle)
_raise_error_if_negative(ret)
def disable_i2c_monitor(self):
"""Disable the I2C monitor.
Raises an :exc:`IOError` if the hardware adapter does not support
monitor mode.
"""
ret = api.py_aa_i2c_monitor_disable(self.handle)
_raise_error_if_negative(ret)
def i2c_monitor_read(self):
"""Retrieved any data fetched by the monitor.
This function has an integrated timeout mechanism. You should use
:func:`poll` to determine if there is any data available.
Returns a list of data bytes and special symbols. There are three
special symbols: `I2C_MONITOR_NACK`, I2C_MONITOR_START and
I2C_MONITOR_STOP.
"""
data = array.array('H', (0,) * self.BUFFER_SIZE)
ret = api.py_aa_i2c_monitor_read(self.handle, self.BUFFER_SIZE,
data)
_raise_error_if_negative(ret)
del data[ret:]
return data.tolist()
@property
def spi_bitrate(self):
"""SPI bitrate in kHz. Not every bitrate is supported by the host
adapter. Therefore, the actual bitrate may be less than the value which
is set. The slowest bitrate supported is 125kHz. Any smaller value will
be rounded up to 125kHz.
The power-on default value is 1000 kHz.
"""
ret = api.py_aa_spi_bitrate(self.handle, 0)
_raise_error_if_negative(ret)
return ret
@spi_bitrate.setter
def spi_bitrate(self, value):
ret = api.py_aa_spi_bitrate(self.handle, value)
_raise_error_if_negative(ret)
def spi_configure(self, polarity, phase, bitorder):
"""Configure the SPI interface."""
ret = api.py_aa_spi_configure(self.handle, polarity, phase, bitorder)
_raise_error_if_negative(ret)
def spi_configure_mode(self, spi_mode):
"""Configure the SPI interface by the well known SPI modes."""
if spi_mode == SPI_MODE_0:
self.spi_configure(SPI_POL_RISING_FALLING,
SPI_PHASE_SAMPLE_SETUP, SPI_BITORDER_MSB)
elif spi_mode == SPI_MODE_3:
self.spi_configure(SPI_POL_FALLING_RISING,
SPI_PHASE_SETUP_SAMPLE, SPI_BITORDER_MSB)
else:
raise RuntimeError('SPI Mode not supported')
def spi_write(self, data):
"""Write a stream of bytes to a SPI device."""
data_out = array.array('B', data)
data_in = array.array('B', (0,) * len(data_out))
ret = api.py_aa_spi_write(self.handle, len(data_out), data_out,
len(data_in), data_in)
_raise_error_if_negative(ret)
return bytes(data_in)
def spi_ss_polarity(self, polarity):
"""Change the ouput polarity on the SS line.
Please note, that this only affects the master functions.
"""
ret = api.py_aa_spi_master_ss_polarity(self.handle, polarity)
_raise_error_if_negative(ret)
|
kontron/python-aardvark | pyaardvark/aardvark.py | Aardvark.i2c_master_read | python | def i2c_master_read(self, addr, length, flags=I2C_NO_FLAGS):
data = array.array('B', (0,) * length)
status, rx_len = api.py_aa_i2c_read_ext(self.handle, addr, flags,
length, data)
_raise_i2c_status_code_error_if_failure(status)
del data[rx_len:]
return bytes(data) | Make an I2C read access.
The given I2C device is addressed and clock cycles for `length` bytes
are generated. A short read will occur if the device generates an early
NAK.
The transaction is finished with an I2C stop condition unless the
I2C_NO_STOP flag is set. | train | https://github.com/kontron/python-aardvark/blob/9827f669fbdc5bceb98e7d08a294b4e4e455d0d5/pyaardvark/aardvark.py#L419-L435 | null | class Aardvark(object):
"""Represents an Aardvark device."""
BUFFER_SIZE = 65535
def __init__(self, port=0):
ret, ver = api.py_aa_open_ext(port)
_raise_error_if_negative(ret)
#: A handle which is used as the first paramter for all calls to the
#: underlying API.
self.handle = ret
# assign some useful names
version = dict(
software = ver[0],
firmware = ver[1],
hardware = ver[2],
sw_req_by_fw = ver[3],
fw_req_by_sw = ver[4],
api_req_by_sw = ver[5],
)
#: Hardware revision of the host adapter as a string. The format is
#: ``M.NN`` where `M` is the major number and `NN` the zero padded
#: minor number.
self.hardware_revision = _to_version_str(version['hardware'])
#: Firmware version of the host adapter as a string. See
#: :attr:`hardware_revision` for more information on the format.
self.firmware_version = _to_version_str(version['firmware'])
#: Version of underlying C module (aardvark.so, aardvark.pyd) as a
#: string. See :attr:`hardware_revision` for more information on the
#: format.
self.api_version = _to_version_str(version['software'])
# version checks
if version['firmware'] < version['fw_req_by_sw']:
log.debug('The API requires a firmware version >= %s, but the '
'device has version %s',
_to_version_str(version['fw_req_by_sw']),
_to_version_str(version['firmware']))
ret = ERR_INCOMPATIBLE_DEVICE
elif version['software'] < version['sw_req_by_fw']:
log.debug('The firmware requires an API version >= %s, but the '
'API has version %s',
_to_version_str(version['sw_req_by_fw']),
_to_version_str(version['software']))
ret = ERR_INCOMPATIBLE_LIBRARY
_raise_error_if_negative(ret)
# Initialize shadow variables
self._i2c_slave_response = None
def __enter__(self):
return self
def __exit__(self, type, value, tb):
self.close()
return False
def close(self):
"""Close the device."""
api.py_aa_close(self.handle)
self.handle = None
def unique_id(self):
"""Return the unique identifier of the device. The identifier is the
serial number you can find on the adapter without the dash. Eg. the
serial number 0012-345678 would be 12345678.
"""
return api.py_aa_unique_id(self.handle)
def unique_id_str(self):
"""Return the unique identifier. But unlike :func:`unique_id`, the ID
is returned as a string which has the format NNNN-MMMMMMM.
"""
return _unique_id_str(self.unique_id())
def _interface_configuration(self, value):
ret = api.py_aa_configure(self.handle, value)
_raise_error_if_negative(ret)
return ret
@property
def enable_i2c(self):
"""Set this to `True` to enable the hardware I2C interface. If set to
`False` the hardware interface will be disabled and its pins (SDA and
SCL) can be used as GPIOs.
"""
config = self._interface_configuration(CONFIG_QUERY)
return config == CONFIG_GPIO_I2C or config == CONFIG_SPI_I2C
@enable_i2c.setter
def enable_i2c(self, value):
new_config = config = self._interface_configuration(CONFIG_QUERY)
if value and config == CONFIG_GPIO_ONLY:
new_config = CONFIG_GPIO_I2C
elif value and config == CONFIG_SPI_GPIO:
new_config = CONFIG_SPI_I2C
elif not value and config == CONFIG_GPIO_I2C:
new_config = CONFIG_GPIO_ONLY
elif not value and config == CONFIG_SPI_I2C:
new_config = CONFIG_SPI_GPIO
if new_config != config:
self._interface_configuration(new_config)
@property
def enable_spi(self):
"""Set this to `True` to enable the hardware SPI interface. If set to
`False` the hardware interface will be disabled and its pins (MISO,
MOSI, SCK and SS) can be used as GPIOs.
"""
config = self._interface_configuration(CONFIG_QUERY)
return config == CONFIG_SPI_GPIO or config == CONFIG_SPI_I2C
@enable_spi.setter
def enable_spi(self, value):
new_config = config = self._interface_configuration(CONFIG_QUERY)
if value and config == CONFIG_GPIO_ONLY:
new_config = CONFIG_SPI_GPIO
elif value and config == CONFIG_GPIO_I2C:
new_config = CONFIG_SPI_I2C
elif not value and config == CONFIG_SPI_GPIO:
new_config = CONFIG_GPIO_ONLY
elif not value and config == CONFIG_SPI_I2C:
new_config = CONFIG_GPIO_I2C
if new_config != config:
self._interface_configuration(new_config)
@property
def i2c_bitrate(self):
"""I2C bitrate in kHz. Not every bitrate is supported by the host
adapter. Therefore, the actual bitrate may be less than the value which
is set.
The power-on default value is 100 kHz.
"""
ret = api.py_aa_i2c_bitrate(self.handle, 0)
_raise_error_if_negative(ret)
return ret
@i2c_bitrate.setter
def i2c_bitrate(self, value):
ret = api.py_aa_i2c_bitrate(self.handle, value)
_raise_error_if_negative(ret)
@property
def i2c_pullups(self):
"""Setting this to `True` will enable the I2C pullup resistors. If set
to `False` the pullup resistors will be disabled.
Raises an :exc:`IOError` if the hardware adapter does not support
pullup resistors.
"""
ret = api.py_aa_i2c_pullup(self.handle, I2C_PULLUP_QUERY)
_raise_error_if_negative(ret)
return ret
@i2c_pullups.setter
def i2c_pullups(self, value):
if value:
pullup = I2C_PULLUP_BOTH
else:
pullup = I2C_PULLUP_NONE
ret = api.py_aa_i2c_pullup(self.handle, pullup)
_raise_error_if_negative(ret)
@property
def target_power(self):
"""Setting this to `True` will activate the power pins (4 and 6). If
set to `False` the power will be deactivated.
Raises an :exc:`IOError` if the hardware adapter does not support
the switchable power pins.
"""
ret = api.py_aa_target_power(self.handle, TARGET_POWER_QUERY)
_raise_error_if_negative(ret)
return ret
@target_power.setter
def target_power(self, value):
if value:
power = TARGET_POWER_BOTH
else:
power = TARGET_POWER_NONE
ret = api.py_aa_target_power(self.handle, power)
_raise_error_if_negative(ret)
@property
def i2c_bus_timeout(self):
"""I2C bus lock timeout in ms.
Minimum value is 10 ms and the maximum value is 450 ms. Not every value
can be set and will be rounded to the next possible number. You can
read back the property to get the actual value.
The power-on default value is 200 ms.
"""
ret = api.py_aa_i2c_bus_timeout(self.handle, 0)
_raise_error_if_negative(ret)
return ret
@i2c_bus_timeout.setter
def i2c_bus_timeout(self, timeout):
ret = api.py_aa_i2c_bus_timeout(self.handle, timeout)
_raise_error_if_negative(ret)
def i2c_master_write(self, i2c_address, data, flags=I2C_NO_FLAGS):
"""Make an I2C write access.
The given I2C device is addressed and data given as a string is
written. The transaction is finished with an I2C stop condition unless
I2C_NO_STOP is set in the flags.
10 bit addresses are supported if the I2C_10_BIT_ADDR flag is set.
"""
data = array.array('B', data)
status, _ = api.py_aa_i2c_write_ext(self.handle, i2c_address, flags,
len(data), data)
_raise_i2c_status_code_error_if_failure(status)
def i2c_master_write_read(self, i2c_address, data, length):
"""Make an I2C write/read access.
First an I2C write access is issued. No stop condition will be
generated. Instead the read access begins with a repeated start.
This method is useful for accessing most addressable I2C devices like
EEPROMs, port expander, etc.
Basically, this is just a convenient function which internally uses
`i2c_master_write` and `i2c_master_read`.
"""
self.i2c_master_write(i2c_address, data, I2C_NO_STOP)
return self.i2c_master_read(i2c_address, length)
def poll(self, timeout=None):
"""Wait for an event to occur.
If `timeout` is given, if specifies the length of time in milliseconds
which the function will wait for events before returing. If `timeout`
is omitted, negative or None, the call will block until there is an
event.
Returns a list of events. In case no event is pending, an empty list is
returned.
"""
if timeout is None:
timeout = -1
ret = api.py_aa_async_poll(self.handle, timeout)
_raise_error_if_negative(ret)
events = list()
for event in (POLL_I2C_READ, POLL_I2C_WRITE, POLL_SPI,
POLL_I2C_MONITOR):
if ret & event:
events.append(event)
return events
def enable_i2c_slave(self, slave_address):
"""Enable I2C slave mode.
The device will respond to the specified slave_address if it is
addressed.
You can wait for the data with :func:`poll` and get it with
`i2c_slave_read`.
"""
ret = api.py_aa_i2c_slave_enable(self.handle, slave_address,
self.BUFFER_SIZE, self.BUFFER_SIZE)
_raise_error_if_negative(ret)
def disable_i2c_slave(self):
"""Disable I2C slave mode."""
ret = api.py_aa_i2c_slave_disable(self.handle)
_raise_error_if_negative(ret)
def i2c_slave_read(self):
"""Read the bytes from an I2C slave reception.
The bytes are returned as a string object.
"""
data = array.array('B', (0,) * self.BUFFER_SIZE)
status, addr, rx_len = api.py_aa_i2c_slave_read_ext(self.handle,
self.BUFFER_SIZE, data)
_raise_i2c_status_code_error_if_failure(status)
# In case of general call, actually return the general call address
if addr == 0x80:
addr = 0x00
del data[rx_len:]
return (addr, bytes(data))
@property
def i2c_slave_response(self):
"""Response to next read command.
An array of bytes that will be transmitted to the I2C master with the
next read operation.
Warning: Due to the fact that the Aardvark API does not provide a means
to read out this value, it is buffered when setting the property.
Reading the property therefore might not return what is actually stored
in the device.
"""
return self._i2c_slave_response
@i2c_slave_response.setter
def i2c_slave_response(self, data):
data = array.array('B', data)
ret = api.py_aa_i2c_slave_set_response(self.handle, len(data), data)
_raise_error_if_negative(ret)
self._i2c_slave_response = data
@property
def i2c_slave_last_transmit_size(self):
"""Returns the number of bytes transmitted by the slave."""
ret = api.py_aa_i2c_slave_write_stats(self.handle)
_raise_error_if_negative(ret)
return ret
def enable_i2c_monitor(self):
"""Activate the I2C monitor.
Enabling the monitor will disable all other functions of the adapter.
Raises an :exc:`IOError` if the hardware adapter does not support
monitor mode.
"""
ret = api.py_aa_i2c_monitor_enable(self.handle)
_raise_error_if_negative(ret)
def disable_i2c_monitor(self):
"""Disable the I2C monitor.
Raises an :exc:`IOError` if the hardware adapter does not support
monitor mode.
"""
ret = api.py_aa_i2c_monitor_disable(self.handle)
_raise_error_if_negative(ret)
def i2c_monitor_read(self):
"""Retrieved any data fetched by the monitor.
This function has an integrated timeout mechanism. You should use
:func:`poll` to determine if there is any data available.
Returns a list of data bytes and special symbols. There are three
special symbols: `I2C_MONITOR_NACK`, I2C_MONITOR_START and
I2C_MONITOR_STOP.
"""
data = array.array('H', (0,) * self.BUFFER_SIZE)
ret = api.py_aa_i2c_monitor_read(self.handle, self.BUFFER_SIZE,
data)
_raise_error_if_negative(ret)
del data[ret:]
return data.tolist()
@property
def spi_bitrate(self):
"""SPI bitrate in kHz. Not every bitrate is supported by the host
adapter. Therefore, the actual bitrate may be less than the value which
is set. The slowest bitrate supported is 125kHz. Any smaller value will
be rounded up to 125kHz.
The power-on default value is 1000 kHz.
"""
ret = api.py_aa_spi_bitrate(self.handle, 0)
_raise_error_if_negative(ret)
return ret
@spi_bitrate.setter
def spi_bitrate(self, value):
ret = api.py_aa_spi_bitrate(self.handle, value)
_raise_error_if_negative(ret)
def spi_configure(self, polarity, phase, bitorder):
"""Configure the SPI interface."""
ret = api.py_aa_spi_configure(self.handle, polarity, phase, bitorder)
_raise_error_if_negative(ret)
def spi_configure_mode(self, spi_mode):
"""Configure the SPI interface by the well known SPI modes."""
if spi_mode == SPI_MODE_0:
self.spi_configure(SPI_POL_RISING_FALLING,
SPI_PHASE_SAMPLE_SETUP, SPI_BITORDER_MSB)
elif spi_mode == SPI_MODE_3:
self.spi_configure(SPI_POL_FALLING_RISING,
SPI_PHASE_SETUP_SAMPLE, SPI_BITORDER_MSB)
else:
raise RuntimeError('SPI Mode not supported')
def spi_write(self, data):
"""Write a stream of bytes to a SPI device."""
data_out = array.array('B', data)
data_in = array.array('B', (0,) * len(data_out))
ret = api.py_aa_spi_write(self.handle, len(data_out), data_out,
len(data_in), data_in)
_raise_error_if_negative(ret)
return bytes(data_in)
def spi_ss_polarity(self, polarity):
"""Change the ouput polarity on the SS line.
Please note, that this only affects the master functions.
"""
ret = api.py_aa_spi_master_ss_polarity(self.handle, polarity)
_raise_error_if_negative(ret)
|
kontron/python-aardvark | pyaardvark/aardvark.py | Aardvark.i2c_master_write_read | python | def i2c_master_write_read(self, i2c_address, data, length):
self.i2c_master_write(i2c_address, data, I2C_NO_STOP)
return self.i2c_master_read(i2c_address, length) | Make an I2C write/read access.
First an I2C write access is issued. No stop condition will be
generated. Instead the read access begins with a repeated start.
This method is useful for accessing most addressable I2C devices like
EEPROMs, port expander, etc.
Basically, this is just a convenient function which internally uses
`i2c_master_write` and `i2c_master_read`. | train | https://github.com/kontron/python-aardvark/blob/9827f669fbdc5bceb98e7d08a294b4e4e455d0d5/pyaardvark/aardvark.py#L437-L451 | null | class Aardvark(object):
"""Represents an Aardvark device."""
BUFFER_SIZE = 65535
def __init__(self, port=0):
ret, ver = api.py_aa_open_ext(port)
_raise_error_if_negative(ret)
#: A handle which is used as the first paramter for all calls to the
#: underlying API.
self.handle = ret
# assign some useful names
version = dict(
software = ver[0],
firmware = ver[1],
hardware = ver[2],
sw_req_by_fw = ver[3],
fw_req_by_sw = ver[4],
api_req_by_sw = ver[5],
)
#: Hardware revision of the host adapter as a string. The format is
#: ``M.NN`` where `M` is the major number and `NN` the zero padded
#: minor number.
self.hardware_revision = _to_version_str(version['hardware'])
#: Firmware version of the host adapter as a string. See
#: :attr:`hardware_revision` for more information on the format.
self.firmware_version = _to_version_str(version['firmware'])
#: Version of underlying C module (aardvark.so, aardvark.pyd) as a
#: string. See :attr:`hardware_revision` for more information on the
#: format.
self.api_version = _to_version_str(version['software'])
# version checks
if version['firmware'] < version['fw_req_by_sw']:
log.debug('The API requires a firmware version >= %s, but the '
'device has version %s',
_to_version_str(version['fw_req_by_sw']),
_to_version_str(version['firmware']))
ret = ERR_INCOMPATIBLE_DEVICE
elif version['software'] < version['sw_req_by_fw']:
log.debug('The firmware requires an API version >= %s, but the '
'API has version %s',
_to_version_str(version['sw_req_by_fw']),
_to_version_str(version['software']))
ret = ERR_INCOMPATIBLE_LIBRARY
_raise_error_if_negative(ret)
# Initialize shadow variables
self._i2c_slave_response = None
def __enter__(self):
return self
def __exit__(self, type, value, tb):
self.close()
return False
def close(self):
"""Close the device."""
api.py_aa_close(self.handle)
self.handle = None
def unique_id(self):
"""Return the unique identifier of the device. The identifier is the
serial number you can find on the adapter without the dash. Eg. the
serial number 0012-345678 would be 12345678.
"""
return api.py_aa_unique_id(self.handle)
def unique_id_str(self):
"""Return the unique identifier. But unlike :func:`unique_id`, the ID
is returned as a string which has the format NNNN-MMMMMMM.
"""
return _unique_id_str(self.unique_id())
def _interface_configuration(self, value):
ret = api.py_aa_configure(self.handle, value)
_raise_error_if_negative(ret)
return ret
@property
def enable_i2c(self):
"""Set this to `True` to enable the hardware I2C interface. If set to
`False` the hardware interface will be disabled and its pins (SDA and
SCL) can be used as GPIOs.
"""
config = self._interface_configuration(CONFIG_QUERY)
return config == CONFIG_GPIO_I2C or config == CONFIG_SPI_I2C
@enable_i2c.setter
def enable_i2c(self, value):
new_config = config = self._interface_configuration(CONFIG_QUERY)
if value and config == CONFIG_GPIO_ONLY:
new_config = CONFIG_GPIO_I2C
elif value and config == CONFIG_SPI_GPIO:
new_config = CONFIG_SPI_I2C
elif not value and config == CONFIG_GPIO_I2C:
new_config = CONFIG_GPIO_ONLY
elif not value and config == CONFIG_SPI_I2C:
new_config = CONFIG_SPI_GPIO
if new_config != config:
self._interface_configuration(new_config)
@property
def enable_spi(self):
"""Set this to `True` to enable the hardware SPI interface. If set to
`False` the hardware interface will be disabled and its pins (MISO,
MOSI, SCK and SS) can be used as GPIOs.
"""
config = self._interface_configuration(CONFIG_QUERY)
return config == CONFIG_SPI_GPIO or config == CONFIG_SPI_I2C
@enable_spi.setter
def enable_spi(self, value):
new_config = config = self._interface_configuration(CONFIG_QUERY)
if value and config == CONFIG_GPIO_ONLY:
new_config = CONFIG_SPI_GPIO
elif value and config == CONFIG_GPIO_I2C:
new_config = CONFIG_SPI_I2C
elif not value and config == CONFIG_SPI_GPIO:
new_config = CONFIG_GPIO_ONLY
elif not value and config == CONFIG_SPI_I2C:
new_config = CONFIG_GPIO_I2C
if new_config != config:
self._interface_configuration(new_config)
@property
def i2c_bitrate(self):
"""I2C bitrate in kHz. Not every bitrate is supported by the host
adapter. Therefore, the actual bitrate may be less than the value which
is set.
The power-on default value is 100 kHz.
"""
ret = api.py_aa_i2c_bitrate(self.handle, 0)
_raise_error_if_negative(ret)
return ret
@i2c_bitrate.setter
def i2c_bitrate(self, value):
ret = api.py_aa_i2c_bitrate(self.handle, value)
_raise_error_if_negative(ret)
@property
def i2c_pullups(self):
"""Setting this to `True` will enable the I2C pullup resistors. If set
to `False` the pullup resistors will be disabled.
Raises an :exc:`IOError` if the hardware adapter does not support
pullup resistors.
"""
ret = api.py_aa_i2c_pullup(self.handle, I2C_PULLUP_QUERY)
_raise_error_if_negative(ret)
return ret
@i2c_pullups.setter
def i2c_pullups(self, value):
if value:
pullup = I2C_PULLUP_BOTH
else:
pullup = I2C_PULLUP_NONE
ret = api.py_aa_i2c_pullup(self.handle, pullup)
_raise_error_if_negative(ret)
@property
def target_power(self):
"""Setting this to `True` will activate the power pins (4 and 6). If
set to `False` the power will be deactivated.
Raises an :exc:`IOError` if the hardware adapter does not support
the switchable power pins.
"""
ret = api.py_aa_target_power(self.handle, TARGET_POWER_QUERY)
_raise_error_if_negative(ret)
return ret
@target_power.setter
def target_power(self, value):
if value:
power = TARGET_POWER_BOTH
else:
power = TARGET_POWER_NONE
ret = api.py_aa_target_power(self.handle, power)
_raise_error_if_negative(ret)
@property
def i2c_bus_timeout(self):
"""I2C bus lock timeout in ms.
Minimum value is 10 ms and the maximum value is 450 ms. Not every value
can be set and will be rounded to the next possible number. You can
read back the property to get the actual value.
The power-on default value is 200 ms.
"""
ret = api.py_aa_i2c_bus_timeout(self.handle, 0)
_raise_error_if_negative(ret)
return ret
@i2c_bus_timeout.setter
def i2c_bus_timeout(self, timeout):
ret = api.py_aa_i2c_bus_timeout(self.handle, timeout)
_raise_error_if_negative(ret)
def i2c_master_write(self, i2c_address, data, flags=I2C_NO_FLAGS):
"""Make an I2C write access.
The given I2C device is addressed and data given as a string is
written. The transaction is finished with an I2C stop condition unless
I2C_NO_STOP is set in the flags.
10 bit addresses are supported if the I2C_10_BIT_ADDR flag is set.
"""
data = array.array('B', data)
status, _ = api.py_aa_i2c_write_ext(self.handle, i2c_address, flags,
len(data), data)
_raise_i2c_status_code_error_if_failure(status)
def i2c_master_read(self, addr, length, flags=I2C_NO_FLAGS):
"""Make an I2C read access.
The given I2C device is addressed and clock cycles for `length` bytes
are generated. A short read will occur if the device generates an early
NAK.
The transaction is finished with an I2C stop condition unless the
I2C_NO_STOP flag is set.
"""
data = array.array('B', (0,) * length)
status, rx_len = api.py_aa_i2c_read_ext(self.handle, addr, flags,
length, data)
_raise_i2c_status_code_error_if_failure(status)
del data[rx_len:]
return bytes(data)
def poll(self, timeout=None):
"""Wait for an event to occur.
If `timeout` is given, if specifies the length of time in milliseconds
which the function will wait for events before returing. If `timeout`
is omitted, negative or None, the call will block until there is an
event.
Returns a list of events. In case no event is pending, an empty list is
returned.
"""
if timeout is None:
timeout = -1
ret = api.py_aa_async_poll(self.handle, timeout)
_raise_error_if_negative(ret)
events = list()
for event in (POLL_I2C_READ, POLL_I2C_WRITE, POLL_SPI,
POLL_I2C_MONITOR):
if ret & event:
events.append(event)
return events
def enable_i2c_slave(self, slave_address):
"""Enable I2C slave mode.
The device will respond to the specified slave_address if it is
addressed.
You can wait for the data with :func:`poll` and get it with
`i2c_slave_read`.
"""
ret = api.py_aa_i2c_slave_enable(self.handle, slave_address,
self.BUFFER_SIZE, self.BUFFER_SIZE)
_raise_error_if_negative(ret)
def disable_i2c_slave(self):
"""Disable I2C slave mode."""
ret = api.py_aa_i2c_slave_disable(self.handle)
_raise_error_if_negative(ret)
def i2c_slave_read(self):
"""Read the bytes from an I2C slave reception.
The bytes are returned as a string object.
"""
data = array.array('B', (0,) * self.BUFFER_SIZE)
status, addr, rx_len = api.py_aa_i2c_slave_read_ext(self.handle,
self.BUFFER_SIZE, data)
_raise_i2c_status_code_error_if_failure(status)
# In case of general call, actually return the general call address
if addr == 0x80:
addr = 0x00
del data[rx_len:]
return (addr, bytes(data))
@property
def i2c_slave_response(self):
"""Response to next read command.
An array of bytes that will be transmitted to the I2C master with the
next read operation.
Warning: Due to the fact that the Aardvark API does not provide a means
to read out this value, it is buffered when setting the property.
Reading the property therefore might not return what is actually stored
in the device.
"""
return self._i2c_slave_response
@i2c_slave_response.setter
def i2c_slave_response(self, data):
data = array.array('B', data)
ret = api.py_aa_i2c_slave_set_response(self.handle, len(data), data)
_raise_error_if_negative(ret)
self._i2c_slave_response = data
@property
def i2c_slave_last_transmit_size(self):
"""Returns the number of bytes transmitted by the slave."""
ret = api.py_aa_i2c_slave_write_stats(self.handle)
_raise_error_if_negative(ret)
return ret
def enable_i2c_monitor(self):
"""Activate the I2C monitor.
Enabling the monitor will disable all other functions of the adapter.
Raises an :exc:`IOError` if the hardware adapter does not support
monitor mode.
"""
ret = api.py_aa_i2c_monitor_enable(self.handle)
_raise_error_if_negative(ret)
def disable_i2c_monitor(self):
"""Disable the I2C monitor.
Raises an :exc:`IOError` if the hardware adapter does not support
monitor mode.
"""
ret = api.py_aa_i2c_monitor_disable(self.handle)
_raise_error_if_negative(ret)
def i2c_monitor_read(self):
"""Retrieved any data fetched by the monitor.
This function has an integrated timeout mechanism. You should use
:func:`poll` to determine if there is any data available.
Returns a list of data bytes and special symbols. There are three
special symbols: `I2C_MONITOR_NACK`, I2C_MONITOR_START and
I2C_MONITOR_STOP.
"""
data = array.array('H', (0,) * self.BUFFER_SIZE)
ret = api.py_aa_i2c_monitor_read(self.handle, self.BUFFER_SIZE,
data)
_raise_error_if_negative(ret)
del data[ret:]
return data.tolist()
@property
def spi_bitrate(self):
"""SPI bitrate in kHz. Not every bitrate is supported by the host
adapter. Therefore, the actual bitrate may be less than the value which
is set. The slowest bitrate supported is 125kHz. Any smaller value will
be rounded up to 125kHz.
The power-on default value is 1000 kHz.
"""
ret = api.py_aa_spi_bitrate(self.handle, 0)
_raise_error_if_negative(ret)
return ret
@spi_bitrate.setter
def spi_bitrate(self, value):
ret = api.py_aa_spi_bitrate(self.handle, value)
_raise_error_if_negative(ret)
def spi_configure(self, polarity, phase, bitorder):
"""Configure the SPI interface."""
ret = api.py_aa_spi_configure(self.handle, polarity, phase, bitorder)
_raise_error_if_negative(ret)
def spi_configure_mode(self, spi_mode):
"""Configure the SPI interface by the well known SPI modes."""
if spi_mode == SPI_MODE_0:
self.spi_configure(SPI_POL_RISING_FALLING,
SPI_PHASE_SAMPLE_SETUP, SPI_BITORDER_MSB)
elif spi_mode == SPI_MODE_3:
self.spi_configure(SPI_POL_FALLING_RISING,
SPI_PHASE_SETUP_SAMPLE, SPI_BITORDER_MSB)
else:
raise RuntimeError('SPI Mode not supported')
def spi_write(self, data):
"""Write a stream of bytes to a SPI device."""
data_out = array.array('B', data)
data_in = array.array('B', (0,) * len(data_out))
ret = api.py_aa_spi_write(self.handle, len(data_out), data_out,
len(data_in), data_in)
_raise_error_if_negative(ret)
return bytes(data_in)
def spi_ss_polarity(self, polarity):
"""Change the ouput polarity on the SS line.
Please note, that this only affects the master functions.
"""
ret = api.py_aa_spi_master_ss_polarity(self.handle, polarity)
_raise_error_if_negative(ret)
|
kontron/python-aardvark | pyaardvark/aardvark.py | Aardvark.poll | python | def poll(self, timeout=None):
if timeout is None:
timeout = -1
ret = api.py_aa_async_poll(self.handle, timeout)
_raise_error_if_negative(ret)
events = list()
for event in (POLL_I2C_READ, POLL_I2C_WRITE, POLL_SPI,
POLL_I2C_MONITOR):
if ret & event:
events.append(event)
return events | Wait for an event to occur.
If `timeout` is given, if specifies the length of time in milliseconds
which the function will wait for events before returing. If `timeout`
is omitted, negative or None, the call will block until there is an
event.
Returns a list of events. In case no event is pending, an empty list is
returned. | train | https://github.com/kontron/python-aardvark/blob/9827f669fbdc5bceb98e7d08a294b4e4e455d0d5/pyaardvark/aardvark.py#L453-L475 | null | class Aardvark(object):
"""Represents an Aardvark device."""
BUFFER_SIZE = 65535
def __init__(self, port=0):
ret, ver = api.py_aa_open_ext(port)
_raise_error_if_negative(ret)
#: A handle which is used as the first paramter for all calls to the
#: underlying API.
self.handle = ret
# assign some useful names
version = dict(
software = ver[0],
firmware = ver[1],
hardware = ver[2],
sw_req_by_fw = ver[3],
fw_req_by_sw = ver[4],
api_req_by_sw = ver[5],
)
#: Hardware revision of the host adapter as a string. The format is
#: ``M.NN`` where `M` is the major number and `NN` the zero padded
#: minor number.
self.hardware_revision = _to_version_str(version['hardware'])
#: Firmware version of the host adapter as a string. See
#: :attr:`hardware_revision` for more information on the format.
self.firmware_version = _to_version_str(version['firmware'])
#: Version of underlying C module (aardvark.so, aardvark.pyd) as a
#: string. See :attr:`hardware_revision` for more information on the
#: format.
self.api_version = _to_version_str(version['software'])
# version checks
if version['firmware'] < version['fw_req_by_sw']:
log.debug('The API requires a firmware version >= %s, but the '
'device has version %s',
_to_version_str(version['fw_req_by_sw']),
_to_version_str(version['firmware']))
ret = ERR_INCOMPATIBLE_DEVICE
elif version['software'] < version['sw_req_by_fw']:
log.debug('The firmware requires an API version >= %s, but the '
'API has version %s',
_to_version_str(version['sw_req_by_fw']),
_to_version_str(version['software']))
ret = ERR_INCOMPATIBLE_LIBRARY
_raise_error_if_negative(ret)
# Initialize shadow variables
self._i2c_slave_response = None
def __enter__(self):
return self
def __exit__(self, type, value, tb):
self.close()
return False
def close(self):
"""Close the device."""
api.py_aa_close(self.handle)
self.handle = None
def unique_id(self):
"""Return the unique identifier of the device. The identifier is the
serial number you can find on the adapter without the dash. Eg. the
serial number 0012-345678 would be 12345678.
"""
return api.py_aa_unique_id(self.handle)
def unique_id_str(self):
"""Return the unique identifier. But unlike :func:`unique_id`, the ID
is returned as a string which has the format NNNN-MMMMMMM.
"""
return _unique_id_str(self.unique_id())
def _interface_configuration(self, value):
ret = api.py_aa_configure(self.handle, value)
_raise_error_if_negative(ret)
return ret
@property
def enable_i2c(self):
"""Set this to `True` to enable the hardware I2C interface. If set to
`False` the hardware interface will be disabled and its pins (SDA and
SCL) can be used as GPIOs.
"""
config = self._interface_configuration(CONFIG_QUERY)
return config == CONFIG_GPIO_I2C or config == CONFIG_SPI_I2C
@enable_i2c.setter
def enable_i2c(self, value):
new_config = config = self._interface_configuration(CONFIG_QUERY)
if value and config == CONFIG_GPIO_ONLY:
new_config = CONFIG_GPIO_I2C
elif value and config == CONFIG_SPI_GPIO:
new_config = CONFIG_SPI_I2C
elif not value and config == CONFIG_GPIO_I2C:
new_config = CONFIG_GPIO_ONLY
elif not value and config == CONFIG_SPI_I2C:
new_config = CONFIG_SPI_GPIO
if new_config != config:
self._interface_configuration(new_config)
@property
def enable_spi(self):
"""Set this to `True` to enable the hardware SPI interface. If set to
`False` the hardware interface will be disabled and its pins (MISO,
MOSI, SCK and SS) can be used as GPIOs.
"""
config = self._interface_configuration(CONFIG_QUERY)
return config == CONFIG_SPI_GPIO or config == CONFIG_SPI_I2C
@enable_spi.setter
def enable_spi(self, value):
new_config = config = self._interface_configuration(CONFIG_QUERY)
if value and config == CONFIG_GPIO_ONLY:
new_config = CONFIG_SPI_GPIO
elif value and config == CONFIG_GPIO_I2C:
new_config = CONFIG_SPI_I2C
elif not value and config == CONFIG_SPI_GPIO:
new_config = CONFIG_GPIO_ONLY
elif not value and config == CONFIG_SPI_I2C:
new_config = CONFIG_GPIO_I2C
if new_config != config:
self._interface_configuration(new_config)
@property
def i2c_bitrate(self):
"""I2C bitrate in kHz. Not every bitrate is supported by the host
adapter. Therefore, the actual bitrate may be less than the value which
is set.
The power-on default value is 100 kHz.
"""
ret = api.py_aa_i2c_bitrate(self.handle, 0)
_raise_error_if_negative(ret)
return ret
@i2c_bitrate.setter
def i2c_bitrate(self, value):
ret = api.py_aa_i2c_bitrate(self.handle, value)
_raise_error_if_negative(ret)
@property
def i2c_pullups(self):
"""Setting this to `True` will enable the I2C pullup resistors. If set
to `False` the pullup resistors will be disabled.
Raises an :exc:`IOError` if the hardware adapter does not support
pullup resistors.
"""
ret = api.py_aa_i2c_pullup(self.handle, I2C_PULLUP_QUERY)
_raise_error_if_negative(ret)
return ret
@i2c_pullups.setter
def i2c_pullups(self, value):
if value:
pullup = I2C_PULLUP_BOTH
else:
pullup = I2C_PULLUP_NONE
ret = api.py_aa_i2c_pullup(self.handle, pullup)
_raise_error_if_negative(ret)
@property
def target_power(self):
"""Setting this to `True` will activate the power pins (4 and 6). If
set to `False` the power will be deactivated.
Raises an :exc:`IOError` if the hardware adapter does not support
the switchable power pins.
"""
ret = api.py_aa_target_power(self.handle, TARGET_POWER_QUERY)
_raise_error_if_negative(ret)
return ret
@target_power.setter
def target_power(self, value):
if value:
power = TARGET_POWER_BOTH
else:
power = TARGET_POWER_NONE
ret = api.py_aa_target_power(self.handle, power)
_raise_error_if_negative(ret)
@property
def i2c_bus_timeout(self):
"""I2C bus lock timeout in ms.
Minimum value is 10 ms and the maximum value is 450 ms. Not every value
can be set and will be rounded to the next possible number. You can
read back the property to get the actual value.
The power-on default value is 200 ms.
"""
ret = api.py_aa_i2c_bus_timeout(self.handle, 0)
_raise_error_if_negative(ret)
return ret
@i2c_bus_timeout.setter
def i2c_bus_timeout(self, timeout):
ret = api.py_aa_i2c_bus_timeout(self.handle, timeout)
_raise_error_if_negative(ret)
def i2c_master_write(self, i2c_address, data, flags=I2C_NO_FLAGS):
"""Make an I2C write access.
The given I2C device is addressed and data given as a string is
written. The transaction is finished with an I2C stop condition unless
I2C_NO_STOP is set in the flags.
10 bit addresses are supported if the I2C_10_BIT_ADDR flag is set.
"""
data = array.array('B', data)
status, _ = api.py_aa_i2c_write_ext(self.handle, i2c_address, flags,
len(data), data)
_raise_i2c_status_code_error_if_failure(status)
def i2c_master_read(self, addr, length, flags=I2C_NO_FLAGS):
"""Make an I2C read access.
The given I2C device is addressed and clock cycles for `length` bytes
are generated. A short read will occur if the device generates an early
NAK.
The transaction is finished with an I2C stop condition unless the
I2C_NO_STOP flag is set.
"""
data = array.array('B', (0,) * length)
status, rx_len = api.py_aa_i2c_read_ext(self.handle, addr, flags,
length, data)
_raise_i2c_status_code_error_if_failure(status)
del data[rx_len:]
return bytes(data)
def i2c_master_write_read(self, i2c_address, data, length):
"""Make an I2C write/read access.
First an I2C write access is issued. No stop condition will be
generated. Instead the read access begins with a repeated start.
This method is useful for accessing most addressable I2C devices like
EEPROMs, port expander, etc.
Basically, this is just a convenient function which internally uses
`i2c_master_write` and `i2c_master_read`.
"""
self.i2c_master_write(i2c_address, data, I2C_NO_STOP)
return self.i2c_master_read(i2c_address, length)
def enable_i2c_slave(self, slave_address):
"""Enable I2C slave mode.
The device will respond to the specified slave_address if it is
addressed.
You can wait for the data with :func:`poll` and get it with
`i2c_slave_read`.
"""
ret = api.py_aa_i2c_slave_enable(self.handle, slave_address,
self.BUFFER_SIZE, self.BUFFER_SIZE)
_raise_error_if_negative(ret)
def disable_i2c_slave(self):
"""Disable I2C slave mode."""
ret = api.py_aa_i2c_slave_disable(self.handle)
_raise_error_if_negative(ret)
def i2c_slave_read(self):
"""Read the bytes from an I2C slave reception.
The bytes are returned as a string object.
"""
data = array.array('B', (0,) * self.BUFFER_SIZE)
status, addr, rx_len = api.py_aa_i2c_slave_read_ext(self.handle,
self.BUFFER_SIZE, data)
_raise_i2c_status_code_error_if_failure(status)
# In case of general call, actually return the general call address
if addr == 0x80:
addr = 0x00
del data[rx_len:]
return (addr, bytes(data))
@property
def i2c_slave_response(self):
"""Response to next read command.
An array of bytes that will be transmitted to the I2C master with the
next read operation.
Warning: Due to the fact that the Aardvark API does not provide a means
to read out this value, it is buffered when setting the property.
Reading the property therefore might not return what is actually stored
in the device.
"""
return self._i2c_slave_response
@i2c_slave_response.setter
def i2c_slave_response(self, data):
data = array.array('B', data)
ret = api.py_aa_i2c_slave_set_response(self.handle, len(data), data)
_raise_error_if_negative(ret)
self._i2c_slave_response = data
@property
def i2c_slave_last_transmit_size(self):
"""Returns the number of bytes transmitted by the slave."""
ret = api.py_aa_i2c_slave_write_stats(self.handle)
_raise_error_if_negative(ret)
return ret
def enable_i2c_monitor(self):
"""Activate the I2C monitor.
Enabling the monitor will disable all other functions of the adapter.
Raises an :exc:`IOError` if the hardware adapter does not support
monitor mode.
"""
ret = api.py_aa_i2c_monitor_enable(self.handle)
_raise_error_if_negative(ret)
def disable_i2c_monitor(self):
"""Disable the I2C monitor.
Raises an :exc:`IOError` if the hardware adapter does not support
monitor mode.
"""
ret = api.py_aa_i2c_monitor_disable(self.handle)
_raise_error_if_negative(ret)
def i2c_monitor_read(self):
"""Retrieved any data fetched by the monitor.
This function has an integrated timeout mechanism. You should use
:func:`poll` to determine if there is any data available.
Returns a list of data bytes and special symbols. There are three
special symbols: `I2C_MONITOR_NACK`, I2C_MONITOR_START and
I2C_MONITOR_STOP.
"""
data = array.array('H', (0,) * self.BUFFER_SIZE)
ret = api.py_aa_i2c_monitor_read(self.handle, self.BUFFER_SIZE,
data)
_raise_error_if_negative(ret)
del data[ret:]
return data.tolist()
@property
def spi_bitrate(self):
"""SPI bitrate in kHz. Not every bitrate is supported by the host
adapter. Therefore, the actual bitrate may be less than the value which
is set. The slowest bitrate supported is 125kHz. Any smaller value will
be rounded up to 125kHz.
The power-on default value is 1000 kHz.
"""
ret = api.py_aa_spi_bitrate(self.handle, 0)
_raise_error_if_negative(ret)
return ret
@spi_bitrate.setter
def spi_bitrate(self, value):
ret = api.py_aa_spi_bitrate(self.handle, value)
_raise_error_if_negative(ret)
def spi_configure(self, polarity, phase, bitorder):
"""Configure the SPI interface."""
ret = api.py_aa_spi_configure(self.handle, polarity, phase, bitorder)
_raise_error_if_negative(ret)
def spi_configure_mode(self, spi_mode):
"""Configure the SPI interface by the well known SPI modes."""
if spi_mode == SPI_MODE_0:
self.spi_configure(SPI_POL_RISING_FALLING,
SPI_PHASE_SAMPLE_SETUP, SPI_BITORDER_MSB)
elif spi_mode == SPI_MODE_3:
self.spi_configure(SPI_POL_FALLING_RISING,
SPI_PHASE_SETUP_SAMPLE, SPI_BITORDER_MSB)
else:
raise RuntimeError('SPI Mode not supported')
def spi_write(self, data):
"""Write a stream of bytes to a SPI device."""
data_out = array.array('B', data)
data_in = array.array('B', (0,) * len(data_out))
ret = api.py_aa_spi_write(self.handle, len(data_out), data_out,
len(data_in), data_in)
_raise_error_if_negative(ret)
return bytes(data_in)
def spi_ss_polarity(self, polarity):
"""Change the ouput polarity on the SS line.
Please note, that this only affects the master functions.
"""
ret = api.py_aa_spi_master_ss_polarity(self.handle, polarity)
_raise_error_if_negative(ret)
|
kontron/python-aardvark | pyaardvark/aardvark.py | Aardvark.enable_i2c_slave | python | def enable_i2c_slave(self, slave_address):
ret = api.py_aa_i2c_slave_enable(self.handle, slave_address,
self.BUFFER_SIZE, self.BUFFER_SIZE)
_raise_error_if_negative(ret) | Enable I2C slave mode.
The device will respond to the specified slave_address if it is
addressed.
You can wait for the data with :func:`poll` and get it with
`i2c_slave_read`. | train | https://github.com/kontron/python-aardvark/blob/9827f669fbdc5bceb98e7d08a294b4e4e455d0d5/pyaardvark/aardvark.py#L477-L488 | null | class Aardvark(object):
"""Represents an Aardvark device."""
BUFFER_SIZE = 65535
def __init__(self, port=0):
ret, ver = api.py_aa_open_ext(port)
_raise_error_if_negative(ret)
#: A handle which is used as the first paramter for all calls to the
#: underlying API.
self.handle = ret
# assign some useful names
version = dict(
software = ver[0],
firmware = ver[1],
hardware = ver[2],
sw_req_by_fw = ver[3],
fw_req_by_sw = ver[4],
api_req_by_sw = ver[5],
)
#: Hardware revision of the host adapter as a string. The format is
#: ``M.NN`` where `M` is the major number and `NN` the zero padded
#: minor number.
self.hardware_revision = _to_version_str(version['hardware'])
#: Firmware version of the host adapter as a string. See
#: :attr:`hardware_revision` for more information on the format.
self.firmware_version = _to_version_str(version['firmware'])
#: Version of underlying C module (aardvark.so, aardvark.pyd) as a
#: string. See :attr:`hardware_revision` for more information on the
#: format.
self.api_version = _to_version_str(version['software'])
# version checks
if version['firmware'] < version['fw_req_by_sw']:
log.debug('The API requires a firmware version >= %s, but the '
'device has version %s',
_to_version_str(version['fw_req_by_sw']),
_to_version_str(version['firmware']))
ret = ERR_INCOMPATIBLE_DEVICE
elif version['software'] < version['sw_req_by_fw']:
log.debug('The firmware requires an API version >= %s, but the '
'API has version %s',
_to_version_str(version['sw_req_by_fw']),
_to_version_str(version['software']))
ret = ERR_INCOMPATIBLE_LIBRARY
_raise_error_if_negative(ret)
# Initialize shadow variables
self._i2c_slave_response = None
def __enter__(self):
return self
def __exit__(self, type, value, tb):
self.close()
return False
def close(self):
"""Close the device."""
api.py_aa_close(self.handle)
self.handle = None
def unique_id(self):
"""Return the unique identifier of the device. The identifier is the
serial number you can find on the adapter without the dash. Eg. the
serial number 0012-345678 would be 12345678.
"""
return api.py_aa_unique_id(self.handle)
def unique_id_str(self):
"""Return the unique identifier. But unlike :func:`unique_id`, the ID
is returned as a string which has the format NNNN-MMMMMMM.
"""
return _unique_id_str(self.unique_id())
def _interface_configuration(self, value):
ret = api.py_aa_configure(self.handle, value)
_raise_error_if_negative(ret)
return ret
@property
def enable_i2c(self):
"""Set this to `True` to enable the hardware I2C interface. If set to
`False` the hardware interface will be disabled and its pins (SDA and
SCL) can be used as GPIOs.
"""
config = self._interface_configuration(CONFIG_QUERY)
return config == CONFIG_GPIO_I2C or config == CONFIG_SPI_I2C
@enable_i2c.setter
def enable_i2c(self, value):
new_config = config = self._interface_configuration(CONFIG_QUERY)
if value and config == CONFIG_GPIO_ONLY:
new_config = CONFIG_GPIO_I2C
elif value and config == CONFIG_SPI_GPIO:
new_config = CONFIG_SPI_I2C
elif not value and config == CONFIG_GPIO_I2C:
new_config = CONFIG_GPIO_ONLY
elif not value and config == CONFIG_SPI_I2C:
new_config = CONFIG_SPI_GPIO
if new_config != config:
self._interface_configuration(new_config)
@property
def enable_spi(self):
"""Set this to `True` to enable the hardware SPI interface. If set to
`False` the hardware interface will be disabled and its pins (MISO,
MOSI, SCK and SS) can be used as GPIOs.
"""
config = self._interface_configuration(CONFIG_QUERY)
return config == CONFIG_SPI_GPIO or config == CONFIG_SPI_I2C
@enable_spi.setter
def enable_spi(self, value):
new_config = config = self._interface_configuration(CONFIG_QUERY)
if value and config == CONFIG_GPIO_ONLY:
new_config = CONFIG_SPI_GPIO
elif value and config == CONFIG_GPIO_I2C:
new_config = CONFIG_SPI_I2C
elif not value and config == CONFIG_SPI_GPIO:
new_config = CONFIG_GPIO_ONLY
elif not value and config == CONFIG_SPI_I2C:
new_config = CONFIG_GPIO_I2C
if new_config != config:
self._interface_configuration(new_config)
@property
def i2c_bitrate(self):
"""I2C bitrate in kHz. Not every bitrate is supported by the host
adapter. Therefore, the actual bitrate may be less than the value which
is set.
The power-on default value is 100 kHz.
"""
ret = api.py_aa_i2c_bitrate(self.handle, 0)
_raise_error_if_negative(ret)
return ret
@i2c_bitrate.setter
def i2c_bitrate(self, value):
ret = api.py_aa_i2c_bitrate(self.handle, value)
_raise_error_if_negative(ret)
@property
def i2c_pullups(self):
"""Setting this to `True` will enable the I2C pullup resistors. If set
to `False` the pullup resistors will be disabled.
Raises an :exc:`IOError` if the hardware adapter does not support
pullup resistors.
"""
ret = api.py_aa_i2c_pullup(self.handle, I2C_PULLUP_QUERY)
_raise_error_if_negative(ret)
return ret
@i2c_pullups.setter
def i2c_pullups(self, value):
if value:
pullup = I2C_PULLUP_BOTH
else:
pullup = I2C_PULLUP_NONE
ret = api.py_aa_i2c_pullup(self.handle, pullup)
_raise_error_if_negative(ret)
@property
def target_power(self):
"""Setting this to `True` will activate the power pins (4 and 6). If
set to `False` the power will be deactivated.
Raises an :exc:`IOError` if the hardware adapter does not support
the switchable power pins.
"""
ret = api.py_aa_target_power(self.handle, TARGET_POWER_QUERY)
_raise_error_if_negative(ret)
return ret
@target_power.setter
def target_power(self, value):
if value:
power = TARGET_POWER_BOTH
else:
power = TARGET_POWER_NONE
ret = api.py_aa_target_power(self.handle, power)
_raise_error_if_negative(ret)
@property
def i2c_bus_timeout(self):
"""I2C bus lock timeout in ms.
Minimum value is 10 ms and the maximum value is 450 ms. Not every value
can be set and will be rounded to the next possible number. You can
read back the property to get the actual value.
The power-on default value is 200 ms.
"""
ret = api.py_aa_i2c_bus_timeout(self.handle, 0)
_raise_error_if_negative(ret)
return ret
@i2c_bus_timeout.setter
def i2c_bus_timeout(self, timeout):
ret = api.py_aa_i2c_bus_timeout(self.handle, timeout)
_raise_error_if_negative(ret)
def i2c_master_write(self, i2c_address, data, flags=I2C_NO_FLAGS):
"""Make an I2C write access.
The given I2C device is addressed and data given as a string is
written. The transaction is finished with an I2C stop condition unless
I2C_NO_STOP is set in the flags.
10 bit addresses are supported if the I2C_10_BIT_ADDR flag is set.
"""
data = array.array('B', data)
status, _ = api.py_aa_i2c_write_ext(self.handle, i2c_address, flags,
len(data), data)
_raise_i2c_status_code_error_if_failure(status)
def i2c_master_read(self, addr, length, flags=I2C_NO_FLAGS):
"""Make an I2C read access.
The given I2C device is addressed and clock cycles for `length` bytes
are generated. A short read will occur if the device generates an early
NAK.
The transaction is finished with an I2C stop condition unless the
I2C_NO_STOP flag is set.
"""
data = array.array('B', (0,) * length)
status, rx_len = api.py_aa_i2c_read_ext(self.handle, addr, flags,
length, data)
_raise_i2c_status_code_error_if_failure(status)
del data[rx_len:]
return bytes(data)
def i2c_master_write_read(self, i2c_address, data, length):
"""Make an I2C write/read access.
First an I2C write access is issued. No stop condition will be
generated. Instead the read access begins with a repeated start.
This method is useful for accessing most addressable I2C devices like
EEPROMs, port expander, etc.
Basically, this is just a convenient function which internally uses
`i2c_master_write` and `i2c_master_read`.
"""
self.i2c_master_write(i2c_address, data, I2C_NO_STOP)
return self.i2c_master_read(i2c_address, length)
def poll(self, timeout=None):
"""Wait for an event to occur.
If `timeout` is given, if specifies the length of time in milliseconds
which the function will wait for events before returing. If `timeout`
is omitted, negative or None, the call will block until there is an
event.
Returns a list of events. In case no event is pending, an empty list is
returned.
"""
if timeout is None:
timeout = -1
ret = api.py_aa_async_poll(self.handle, timeout)
_raise_error_if_negative(ret)
events = list()
for event in (POLL_I2C_READ, POLL_I2C_WRITE, POLL_SPI,
POLL_I2C_MONITOR):
if ret & event:
events.append(event)
return events
def disable_i2c_slave(self):
"""Disable I2C slave mode."""
ret = api.py_aa_i2c_slave_disable(self.handle)
_raise_error_if_negative(ret)
def i2c_slave_read(self):
"""Read the bytes from an I2C slave reception.
The bytes are returned as a string object.
"""
data = array.array('B', (0,) * self.BUFFER_SIZE)
status, addr, rx_len = api.py_aa_i2c_slave_read_ext(self.handle,
self.BUFFER_SIZE, data)
_raise_i2c_status_code_error_if_failure(status)
# In case of general call, actually return the general call address
if addr == 0x80:
addr = 0x00
del data[rx_len:]
return (addr, bytes(data))
@property
def i2c_slave_response(self):
"""Response to next read command.
An array of bytes that will be transmitted to the I2C master with the
next read operation.
Warning: Due to the fact that the Aardvark API does not provide a means
to read out this value, it is buffered when setting the property.
Reading the property therefore might not return what is actually stored
in the device.
"""
return self._i2c_slave_response
@i2c_slave_response.setter
def i2c_slave_response(self, data):
data = array.array('B', data)
ret = api.py_aa_i2c_slave_set_response(self.handle, len(data), data)
_raise_error_if_negative(ret)
self._i2c_slave_response = data
@property
def i2c_slave_last_transmit_size(self):
"""Returns the number of bytes transmitted by the slave."""
ret = api.py_aa_i2c_slave_write_stats(self.handle)
_raise_error_if_negative(ret)
return ret
def enable_i2c_monitor(self):
"""Activate the I2C monitor.
Enabling the monitor will disable all other functions of the adapter.
Raises an :exc:`IOError` if the hardware adapter does not support
monitor mode.
"""
ret = api.py_aa_i2c_monitor_enable(self.handle)
_raise_error_if_negative(ret)
def disable_i2c_monitor(self):
"""Disable the I2C monitor.
Raises an :exc:`IOError` if the hardware adapter does not support
monitor mode.
"""
ret = api.py_aa_i2c_monitor_disable(self.handle)
_raise_error_if_negative(ret)
def i2c_monitor_read(self):
"""Retrieved any data fetched by the monitor.
This function has an integrated timeout mechanism. You should use
:func:`poll` to determine if there is any data available.
Returns a list of data bytes and special symbols. There are three
special symbols: `I2C_MONITOR_NACK`, I2C_MONITOR_START and
I2C_MONITOR_STOP.
"""
data = array.array('H', (0,) * self.BUFFER_SIZE)
ret = api.py_aa_i2c_monitor_read(self.handle, self.BUFFER_SIZE,
data)
_raise_error_if_negative(ret)
del data[ret:]
return data.tolist()
@property
def spi_bitrate(self):
"""SPI bitrate in kHz. Not every bitrate is supported by the host
adapter. Therefore, the actual bitrate may be less than the value which
is set. The slowest bitrate supported is 125kHz. Any smaller value will
be rounded up to 125kHz.
The power-on default value is 1000 kHz.
"""
ret = api.py_aa_spi_bitrate(self.handle, 0)
_raise_error_if_negative(ret)
return ret
@spi_bitrate.setter
def spi_bitrate(self, value):
ret = api.py_aa_spi_bitrate(self.handle, value)
_raise_error_if_negative(ret)
def spi_configure(self, polarity, phase, bitorder):
"""Configure the SPI interface."""
ret = api.py_aa_spi_configure(self.handle, polarity, phase, bitorder)
_raise_error_if_negative(ret)
def spi_configure_mode(self, spi_mode):
"""Configure the SPI interface by the well known SPI modes."""
if spi_mode == SPI_MODE_0:
self.spi_configure(SPI_POL_RISING_FALLING,
SPI_PHASE_SAMPLE_SETUP, SPI_BITORDER_MSB)
elif spi_mode == SPI_MODE_3:
self.spi_configure(SPI_POL_FALLING_RISING,
SPI_PHASE_SETUP_SAMPLE, SPI_BITORDER_MSB)
else:
raise RuntimeError('SPI Mode not supported')
def spi_write(self, data):
"""Write a stream of bytes to a SPI device."""
data_out = array.array('B', data)
data_in = array.array('B', (0,) * len(data_out))
ret = api.py_aa_spi_write(self.handle, len(data_out), data_out,
len(data_in), data_in)
_raise_error_if_negative(ret)
return bytes(data_in)
def spi_ss_polarity(self, polarity):
"""Change the ouput polarity on the SS line.
Please note, that this only affects the master functions.
"""
ret = api.py_aa_spi_master_ss_polarity(self.handle, polarity)
_raise_error_if_negative(ret)
|
kontron/python-aardvark | pyaardvark/aardvark.py | Aardvark.i2c_slave_read | python | def i2c_slave_read(self):
data = array.array('B', (0,) * self.BUFFER_SIZE)
status, addr, rx_len = api.py_aa_i2c_slave_read_ext(self.handle,
self.BUFFER_SIZE, data)
_raise_i2c_status_code_error_if_failure(status)
# In case of general call, actually return the general call address
if addr == 0x80:
addr = 0x00
del data[rx_len:]
return (addr, bytes(data)) | Read the bytes from an I2C slave reception.
The bytes are returned as a string object. | train | https://github.com/kontron/python-aardvark/blob/9827f669fbdc5bceb98e7d08a294b4e4e455d0d5/pyaardvark/aardvark.py#L495-L509 | null | class Aardvark(object):
"""Represents an Aardvark device."""
BUFFER_SIZE = 65535
def __init__(self, port=0):
ret, ver = api.py_aa_open_ext(port)
_raise_error_if_negative(ret)
#: A handle which is used as the first paramter for all calls to the
#: underlying API.
self.handle = ret
# assign some useful names
version = dict(
software = ver[0],
firmware = ver[1],
hardware = ver[2],
sw_req_by_fw = ver[3],
fw_req_by_sw = ver[4],
api_req_by_sw = ver[5],
)
#: Hardware revision of the host adapter as a string. The format is
#: ``M.NN`` where `M` is the major number and `NN` the zero padded
#: minor number.
self.hardware_revision = _to_version_str(version['hardware'])
#: Firmware version of the host adapter as a string. See
#: :attr:`hardware_revision` for more information on the format.
self.firmware_version = _to_version_str(version['firmware'])
#: Version of underlying C module (aardvark.so, aardvark.pyd) as a
#: string. See :attr:`hardware_revision` for more information on the
#: format.
self.api_version = _to_version_str(version['software'])
# version checks
if version['firmware'] < version['fw_req_by_sw']:
log.debug('The API requires a firmware version >= %s, but the '
'device has version %s',
_to_version_str(version['fw_req_by_sw']),
_to_version_str(version['firmware']))
ret = ERR_INCOMPATIBLE_DEVICE
elif version['software'] < version['sw_req_by_fw']:
log.debug('The firmware requires an API version >= %s, but the '
'API has version %s',
_to_version_str(version['sw_req_by_fw']),
_to_version_str(version['software']))
ret = ERR_INCOMPATIBLE_LIBRARY
_raise_error_if_negative(ret)
# Initialize shadow variables
self._i2c_slave_response = None
def __enter__(self):
return self
def __exit__(self, type, value, tb):
self.close()
return False
def close(self):
"""Close the device."""
api.py_aa_close(self.handle)
self.handle = None
def unique_id(self):
"""Return the unique identifier of the device. The identifier is the
serial number you can find on the adapter without the dash. Eg. the
serial number 0012-345678 would be 12345678.
"""
return api.py_aa_unique_id(self.handle)
def unique_id_str(self):
"""Return the unique identifier. But unlike :func:`unique_id`, the ID
is returned as a string which has the format NNNN-MMMMMMM.
"""
return _unique_id_str(self.unique_id())
def _interface_configuration(self, value):
ret = api.py_aa_configure(self.handle, value)
_raise_error_if_negative(ret)
return ret
@property
def enable_i2c(self):
"""Set this to `True` to enable the hardware I2C interface. If set to
`False` the hardware interface will be disabled and its pins (SDA and
SCL) can be used as GPIOs.
"""
config = self._interface_configuration(CONFIG_QUERY)
return config == CONFIG_GPIO_I2C or config == CONFIG_SPI_I2C
@enable_i2c.setter
def enable_i2c(self, value):
new_config = config = self._interface_configuration(CONFIG_QUERY)
if value and config == CONFIG_GPIO_ONLY:
new_config = CONFIG_GPIO_I2C
elif value and config == CONFIG_SPI_GPIO:
new_config = CONFIG_SPI_I2C
elif not value and config == CONFIG_GPIO_I2C:
new_config = CONFIG_GPIO_ONLY
elif not value and config == CONFIG_SPI_I2C:
new_config = CONFIG_SPI_GPIO
if new_config != config:
self._interface_configuration(new_config)
@property
def enable_spi(self):
"""Set this to `True` to enable the hardware SPI interface. If set to
`False` the hardware interface will be disabled and its pins (MISO,
MOSI, SCK and SS) can be used as GPIOs.
"""
config = self._interface_configuration(CONFIG_QUERY)
return config == CONFIG_SPI_GPIO or config == CONFIG_SPI_I2C
@enable_spi.setter
def enable_spi(self, value):
new_config = config = self._interface_configuration(CONFIG_QUERY)
if value and config == CONFIG_GPIO_ONLY:
new_config = CONFIG_SPI_GPIO
elif value and config == CONFIG_GPIO_I2C:
new_config = CONFIG_SPI_I2C
elif not value and config == CONFIG_SPI_GPIO:
new_config = CONFIG_GPIO_ONLY
elif not value and config == CONFIG_SPI_I2C:
new_config = CONFIG_GPIO_I2C
if new_config != config:
self._interface_configuration(new_config)
@property
def i2c_bitrate(self):
"""I2C bitrate in kHz. Not every bitrate is supported by the host
adapter. Therefore, the actual bitrate may be less than the value which
is set.
The power-on default value is 100 kHz.
"""
ret = api.py_aa_i2c_bitrate(self.handle, 0)
_raise_error_if_negative(ret)
return ret
@i2c_bitrate.setter
def i2c_bitrate(self, value):
ret = api.py_aa_i2c_bitrate(self.handle, value)
_raise_error_if_negative(ret)
@property
def i2c_pullups(self):
"""Setting this to `True` will enable the I2C pullup resistors. If set
to `False` the pullup resistors will be disabled.
Raises an :exc:`IOError` if the hardware adapter does not support
pullup resistors.
"""
ret = api.py_aa_i2c_pullup(self.handle, I2C_PULLUP_QUERY)
_raise_error_if_negative(ret)
return ret
@i2c_pullups.setter
def i2c_pullups(self, value):
if value:
pullup = I2C_PULLUP_BOTH
else:
pullup = I2C_PULLUP_NONE
ret = api.py_aa_i2c_pullup(self.handle, pullup)
_raise_error_if_negative(ret)
@property
def target_power(self):
"""Setting this to `True` will activate the power pins (4 and 6). If
set to `False` the power will be deactivated.
Raises an :exc:`IOError` if the hardware adapter does not support
the switchable power pins.
"""
ret = api.py_aa_target_power(self.handle, TARGET_POWER_QUERY)
_raise_error_if_negative(ret)
return ret
@target_power.setter
def target_power(self, value):
if value:
power = TARGET_POWER_BOTH
else:
power = TARGET_POWER_NONE
ret = api.py_aa_target_power(self.handle, power)
_raise_error_if_negative(ret)
@property
def i2c_bus_timeout(self):
"""I2C bus lock timeout in ms.
Minimum value is 10 ms and the maximum value is 450 ms. Not every value
can be set and will be rounded to the next possible number. You can
read back the property to get the actual value.
The power-on default value is 200 ms.
"""
ret = api.py_aa_i2c_bus_timeout(self.handle, 0)
_raise_error_if_negative(ret)
return ret
@i2c_bus_timeout.setter
def i2c_bus_timeout(self, timeout):
ret = api.py_aa_i2c_bus_timeout(self.handle, timeout)
_raise_error_if_negative(ret)
def i2c_master_write(self, i2c_address, data, flags=I2C_NO_FLAGS):
"""Make an I2C write access.
The given I2C device is addressed and data given as a string is
written. The transaction is finished with an I2C stop condition unless
I2C_NO_STOP is set in the flags.
10 bit addresses are supported if the I2C_10_BIT_ADDR flag is set.
"""
data = array.array('B', data)
status, _ = api.py_aa_i2c_write_ext(self.handle, i2c_address, flags,
len(data), data)
_raise_i2c_status_code_error_if_failure(status)
def i2c_master_read(self, addr, length, flags=I2C_NO_FLAGS):
"""Make an I2C read access.
The given I2C device is addressed and clock cycles for `length` bytes
are generated. A short read will occur if the device generates an early
NAK.
The transaction is finished with an I2C stop condition unless the
I2C_NO_STOP flag is set.
"""
data = array.array('B', (0,) * length)
status, rx_len = api.py_aa_i2c_read_ext(self.handle, addr, flags,
length, data)
_raise_i2c_status_code_error_if_failure(status)
del data[rx_len:]
return bytes(data)
def i2c_master_write_read(self, i2c_address, data, length):
"""Make an I2C write/read access.
First an I2C write access is issued. No stop condition will be
generated. Instead the read access begins with a repeated start.
This method is useful for accessing most addressable I2C devices like
EEPROMs, port expander, etc.
Basically, this is just a convenient function which internally uses
`i2c_master_write` and `i2c_master_read`.
"""
self.i2c_master_write(i2c_address, data, I2C_NO_STOP)
return self.i2c_master_read(i2c_address, length)
def poll(self, timeout=None):
"""Wait for an event to occur.
If `timeout` is given, if specifies the length of time in milliseconds
which the function will wait for events before returing. If `timeout`
is omitted, negative or None, the call will block until there is an
event.
Returns a list of events. In case no event is pending, an empty list is
returned.
"""
if timeout is None:
timeout = -1
ret = api.py_aa_async_poll(self.handle, timeout)
_raise_error_if_negative(ret)
events = list()
for event in (POLL_I2C_READ, POLL_I2C_WRITE, POLL_SPI,
POLL_I2C_MONITOR):
if ret & event:
events.append(event)
return events
def enable_i2c_slave(self, slave_address):
"""Enable I2C slave mode.
The device will respond to the specified slave_address if it is
addressed.
You can wait for the data with :func:`poll` and get it with
`i2c_slave_read`.
"""
ret = api.py_aa_i2c_slave_enable(self.handle, slave_address,
self.BUFFER_SIZE, self.BUFFER_SIZE)
_raise_error_if_negative(ret)
def disable_i2c_slave(self):
"""Disable I2C slave mode."""
ret = api.py_aa_i2c_slave_disable(self.handle)
_raise_error_if_negative(ret)
@property
def i2c_slave_response(self):
"""Response to next read command.
An array of bytes that will be transmitted to the I2C master with the
next read operation.
Warning: Due to the fact that the Aardvark API does not provide a means
to read out this value, it is buffered when setting the property.
Reading the property therefore might not return what is actually stored
in the device.
"""
return self._i2c_slave_response
@i2c_slave_response.setter
def i2c_slave_response(self, data):
data = array.array('B', data)
ret = api.py_aa_i2c_slave_set_response(self.handle, len(data), data)
_raise_error_if_negative(ret)
self._i2c_slave_response = data
@property
def i2c_slave_last_transmit_size(self):
"""Returns the number of bytes transmitted by the slave."""
ret = api.py_aa_i2c_slave_write_stats(self.handle)
_raise_error_if_negative(ret)
return ret
def enable_i2c_monitor(self):
"""Activate the I2C monitor.
Enabling the monitor will disable all other functions of the adapter.
Raises an :exc:`IOError` if the hardware adapter does not support
monitor mode.
"""
ret = api.py_aa_i2c_monitor_enable(self.handle)
_raise_error_if_negative(ret)
def disable_i2c_monitor(self):
"""Disable the I2C monitor.
Raises an :exc:`IOError` if the hardware adapter does not support
monitor mode.
"""
ret = api.py_aa_i2c_monitor_disable(self.handle)
_raise_error_if_negative(ret)
def i2c_monitor_read(self):
"""Retrieved any data fetched by the monitor.
This function has an integrated timeout mechanism. You should use
:func:`poll` to determine if there is any data available.
Returns a list of data bytes and special symbols. There are three
special symbols: `I2C_MONITOR_NACK`, I2C_MONITOR_START and
I2C_MONITOR_STOP.
"""
data = array.array('H', (0,) * self.BUFFER_SIZE)
ret = api.py_aa_i2c_monitor_read(self.handle, self.BUFFER_SIZE,
data)
_raise_error_if_negative(ret)
del data[ret:]
return data.tolist()
@property
def spi_bitrate(self):
"""SPI bitrate in kHz. Not every bitrate is supported by the host
adapter. Therefore, the actual bitrate may be less than the value which
is set. The slowest bitrate supported is 125kHz. Any smaller value will
be rounded up to 125kHz.
The power-on default value is 1000 kHz.
"""
ret = api.py_aa_spi_bitrate(self.handle, 0)
_raise_error_if_negative(ret)
return ret
@spi_bitrate.setter
def spi_bitrate(self, value):
ret = api.py_aa_spi_bitrate(self.handle, value)
_raise_error_if_negative(ret)
def spi_configure(self, polarity, phase, bitorder):
"""Configure the SPI interface."""
ret = api.py_aa_spi_configure(self.handle, polarity, phase, bitorder)
_raise_error_if_negative(ret)
def spi_configure_mode(self, spi_mode):
"""Configure the SPI interface by the well known SPI modes."""
if spi_mode == SPI_MODE_0:
self.spi_configure(SPI_POL_RISING_FALLING,
SPI_PHASE_SAMPLE_SETUP, SPI_BITORDER_MSB)
elif spi_mode == SPI_MODE_3:
self.spi_configure(SPI_POL_FALLING_RISING,
SPI_PHASE_SETUP_SAMPLE, SPI_BITORDER_MSB)
else:
raise RuntimeError('SPI Mode not supported')
def spi_write(self, data):
"""Write a stream of bytes to a SPI device."""
data_out = array.array('B', data)
data_in = array.array('B', (0,) * len(data_out))
ret = api.py_aa_spi_write(self.handle, len(data_out), data_out,
len(data_in), data_in)
_raise_error_if_negative(ret)
return bytes(data_in)
def spi_ss_polarity(self, polarity):
"""Change the ouput polarity on the SS line.
Please note, that this only affects the master functions.
"""
ret = api.py_aa_spi_master_ss_polarity(self.handle, polarity)
_raise_error_if_negative(ret)
|
kontron/python-aardvark | pyaardvark/aardvark.py | Aardvark.i2c_slave_last_transmit_size | python | def i2c_slave_last_transmit_size(self):
ret = api.py_aa_i2c_slave_write_stats(self.handle)
_raise_error_if_negative(ret)
return ret | Returns the number of bytes transmitted by the slave. | train | https://github.com/kontron/python-aardvark/blob/9827f669fbdc5bceb98e7d08a294b4e4e455d0d5/pyaardvark/aardvark.py#L533-L537 | null | class Aardvark(object):
"""Represents an Aardvark device."""
BUFFER_SIZE = 65535
def __init__(self, port=0):
ret, ver = api.py_aa_open_ext(port)
_raise_error_if_negative(ret)
#: A handle which is used as the first paramter for all calls to the
#: underlying API.
self.handle = ret
# assign some useful names
version = dict(
software = ver[0],
firmware = ver[1],
hardware = ver[2],
sw_req_by_fw = ver[3],
fw_req_by_sw = ver[4],
api_req_by_sw = ver[5],
)
#: Hardware revision of the host adapter as a string. The format is
#: ``M.NN`` where `M` is the major number and `NN` the zero padded
#: minor number.
self.hardware_revision = _to_version_str(version['hardware'])
#: Firmware version of the host adapter as a string. See
#: :attr:`hardware_revision` for more information on the format.
self.firmware_version = _to_version_str(version['firmware'])
#: Version of underlying C module (aardvark.so, aardvark.pyd) as a
#: string. See :attr:`hardware_revision` for more information on the
#: format.
self.api_version = _to_version_str(version['software'])
# version checks
if version['firmware'] < version['fw_req_by_sw']:
log.debug('The API requires a firmware version >= %s, but the '
'device has version %s',
_to_version_str(version['fw_req_by_sw']),
_to_version_str(version['firmware']))
ret = ERR_INCOMPATIBLE_DEVICE
elif version['software'] < version['sw_req_by_fw']:
log.debug('The firmware requires an API version >= %s, but the '
'API has version %s',
_to_version_str(version['sw_req_by_fw']),
_to_version_str(version['software']))
ret = ERR_INCOMPATIBLE_LIBRARY
_raise_error_if_negative(ret)
# Initialize shadow variables
self._i2c_slave_response = None
def __enter__(self):
return self
def __exit__(self, type, value, tb):
self.close()
return False
def close(self):
"""Close the device."""
api.py_aa_close(self.handle)
self.handle = None
def unique_id(self):
"""Return the unique identifier of the device. The identifier is the
serial number you can find on the adapter without the dash. Eg. the
serial number 0012-345678 would be 12345678.
"""
return api.py_aa_unique_id(self.handle)
def unique_id_str(self):
"""Return the unique identifier. But unlike :func:`unique_id`, the ID
is returned as a string which has the format NNNN-MMMMMMM.
"""
return _unique_id_str(self.unique_id())
def _interface_configuration(self, value):
ret = api.py_aa_configure(self.handle, value)
_raise_error_if_negative(ret)
return ret
@property
def enable_i2c(self):
"""Set this to `True` to enable the hardware I2C interface. If set to
`False` the hardware interface will be disabled and its pins (SDA and
SCL) can be used as GPIOs.
"""
config = self._interface_configuration(CONFIG_QUERY)
return config == CONFIG_GPIO_I2C or config == CONFIG_SPI_I2C
@enable_i2c.setter
def enable_i2c(self, value):
new_config = config = self._interface_configuration(CONFIG_QUERY)
if value and config == CONFIG_GPIO_ONLY:
new_config = CONFIG_GPIO_I2C
elif value and config == CONFIG_SPI_GPIO:
new_config = CONFIG_SPI_I2C
elif not value and config == CONFIG_GPIO_I2C:
new_config = CONFIG_GPIO_ONLY
elif not value and config == CONFIG_SPI_I2C:
new_config = CONFIG_SPI_GPIO
if new_config != config:
self._interface_configuration(new_config)
@property
def enable_spi(self):
"""Set this to `True` to enable the hardware SPI interface. If set to
`False` the hardware interface will be disabled and its pins (MISO,
MOSI, SCK and SS) can be used as GPIOs.
"""
config = self._interface_configuration(CONFIG_QUERY)
return config == CONFIG_SPI_GPIO or config == CONFIG_SPI_I2C
@enable_spi.setter
def enable_spi(self, value):
new_config = config = self._interface_configuration(CONFIG_QUERY)
if value and config == CONFIG_GPIO_ONLY:
new_config = CONFIG_SPI_GPIO
elif value and config == CONFIG_GPIO_I2C:
new_config = CONFIG_SPI_I2C
elif not value and config == CONFIG_SPI_GPIO:
new_config = CONFIG_GPIO_ONLY
elif not value and config == CONFIG_SPI_I2C:
new_config = CONFIG_GPIO_I2C
if new_config != config:
self._interface_configuration(new_config)
@property
def i2c_bitrate(self):
"""I2C bitrate in kHz. Not every bitrate is supported by the host
adapter. Therefore, the actual bitrate may be less than the value which
is set.
The power-on default value is 100 kHz.
"""
ret = api.py_aa_i2c_bitrate(self.handle, 0)
_raise_error_if_negative(ret)
return ret
@i2c_bitrate.setter
def i2c_bitrate(self, value):
ret = api.py_aa_i2c_bitrate(self.handle, value)
_raise_error_if_negative(ret)
@property
def i2c_pullups(self):
"""Setting this to `True` will enable the I2C pullup resistors. If set
to `False` the pullup resistors will be disabled.
Raises an :exc:`IOError` if the hardware adapter does not support
pullup resistors.
"""
ret = api.py_aa_i2c_pullup(self.handle, I2C_PULLUP_QUERY)
_raise_error_if_negative(ret)
return ret
@i2c_pullups.setter
def i2c_pullups(self, value):
if value:
pullup = I2C_PULLUP_BOTH
else:
pullup = I2C_PULLUP_NONE
ret = api.py_aa_i2c_pullup(self.handle, pullup)
_raise_error_if_negative(ret)
@property
def target_power(self):
"""Setting this to `True` will activate the power pins (4 and 6). If
set to `False` the power will be deactivated.
Raises an :exc:`IOError` if the hardware adapter does not support
the switchable power pins.
"""
ret = api.py_aa_target_power(self.handle, TARGET_POWER_QUERY)
_raise_error_if_negative(ret)
return ret
@target_power.setter
def target_power(self, value):
if value:
power = TARGET_POWER_BOTH
else:
power = TARGET_POWER_NONE
ret = api.py_aa_target_power(self.handle, power)
_raise_error_if_negative(ret)
@property
def i2c_bus_timeout(self):
"""I2C bus lock timeout in ms.
Minimum value is 10 ms and the maximum value is 450 ms. Not every value
can be set and will be rounded to the next possible number. You can
read back the property to get the actual value.
The power-on default value is 200 ms.
"""
ret = api.py_aa_i2c_bus_timeout(self.handle, 0)
_raise_error_if_negative(ret)
return ret
@i2c_bus_timeout.setter
def i2c_bus_timeout(self, timeout):
ret = api.py_aa_i2c_bus_timeout(self.handle, timeout)
_raise_error_if_negative(ret)
def i2c_master_write(self, i2c_address, data, flags=I2C_NO_FLAGS):
"""Make an I2C write access.
The given I2C device is addressed and data given as a string is
written. The transaction is finished with an I2C stop condition unless
I2C_NO_STOP is set in the flags.
10 bit addresses are supported if the I2C_10_BIT_ADDR flag is set.
"""
data = array.array('B', data)
status, _ = api.py_aa_i2c_write_ext(self.handle, i2c_address, flags,
len(data), data)
_raise_i2c_status_code_error_if_failure(status)
def i2c_master_read(self, addr, length, flags=I2C_NO_FLAGS):
"""Make an I2C read access.
The given I2C device is addressed and clock cycles for `length` bytes
are generated. A short read will occur if the device generates an early
NAK.
The transaction is finished with an I2C stop condition unless the
I2C_NO_STOP flag is set.
"""
data = array.array('B', (0,) * length)
status, rx_len = api.py_aa_i2c_read_ext(self.handle, addr, flags,
length, data)
_raise_i2c_status_code_error_if_failure(status)
del data[rx_len:]
return bytes(data)
def i2c_master_write_read(self, i2c_address, data, length):
"""Make an I2C write/read access.
First an I2C write access is issued. No stop condition will be
generated. Instead the read access begins with a repeated start.
This method is useful for accessing most addressable I2C devices like
EEPROMs, port expander, etc.
Basically, this is just a convenient function which internally uses
`i2c_master_write` and `i2c_master_read`.
"""
self.i2c_master_write(i2c_address, data, I2C_NO_STOP)
return self.i2c_master_read(i2c_address, length)
def poll(self, timeout=None):
"""Wait for an event to occur.
If `timeout` is given, if specifies the length of time in milliseconds
which the function will wait for events before returing. If `timeout`
is omitted, negative or None, the call will block until there is an
event.
Returns a list of events. In case no event is pending, an empty list is
returned.
"""
if timeout is None:
timeout = -1
ret = api.py_aa_async_poll(self.handle, timeout)
_raise_error_if_negative(ret)
events = list()
for event in (POLL_I2C_READ, POLL_I2C_WRITE, POLL_SPI,
POLL_I2C_MONITOR):
if ret & event:
events.append(event)
return events
def enable_i2c_slave(self, slave_address):
"""Enable I2C slave mode.
The device will respond to the specified slave_address if it is
addressed.
You can wait for the data with :func:`poll` and get it with
`i2c_slave_read`.
"""
ret = api.py_aa_i2c_slave_enable(self.handle, slave_address,
self.BUFFER_SIZE, self.BUFFER_SIZE)
_raise_error_if_negative(ret)
def disable_i2c_slave(self):
"""Disable I2C slave mode."""
ret = api.py_aa_i2c_slave_disable(self.handle)
_raise_error_if_negative(ret)
def i2c_slave_read(self):
"""Read the bytes from an I2C slave reception.
The bytes are returned as a string object.
"""
data = array.array('B', (0,) * self.BUFFER_SIZE)
status, addr, rx_len = api.py_aa_i2c_slave_read_ext(self.handle,
self.BUFFER_SIZE, data)
_raise_i2c_status_code_error_if_failure(status)
# In case of general call, actually return the general call address
if addr == 0x80:
addr = 0x00
del data[rx_len:]
return (addr, bytes(data))
@property
def i2c_slave_response(self):
"""Response to next read command.
An array of bytes that will be transmitted to the I2C master with the
next read operation.
Warning: Due to the fact that the Aardvark API does not provide a means
to read out this value, it is buffered when setting the property.
Reading the property therefore might not return what is actually stored
in the device.
"""
return self._i2c_slave_response
@i2c_slave_response.setter
def i2c_slave_response(self, data):
data = array.array('B', data)
ret = api.py_aa_i2c_slave_set_response(self.handle, len(data), data)
_raise_error_if_negative(ret)
self._i2c_slave_response = data
@property
def enable_i2c_monitor(self):
"""Activate the I2C monitor.
Enabling the monitor will disable all other functions of the adapter.
Raises an :exc:`IOError` if the hardware adapter does not support
monitor mode.
"""
ret = api.py_aa_i2c_monitor_enable(self.handle)
_raise_error_if_negative(ret)
def disable_i2c_monitor(self):
"""Disable the I2C monitor.
Raises an :exc:`IOError` if the hardware adapter does not support
monitor mode.
"""
ret = api.py_aa_i2c_monitor_disable(self.handle)
_raise_error_if_negative(ret)
def i2c_monitor_read(self):
"""Retrieved any data fetched by the monitor.
This function has an integrated timeout mechanism. You should use
:func:`poll` to determine if there is any data available.
Returns a list of data bytes and special symbols. There are three
special symbols: `I2C_MONITOR_NACK`, I2C_MONITOR_START and
I2C_MONITOR_STOP.
"""
data = array.array('H', (0,) * self.BUFFER_SIZE)
ret = api.py_aa_i2c_monitor_read(self.handle, self.BUFFER_SIZE,
data)
_raise_error_if_negative(ret)
del data[ret:]
return data.tolist()
@property
def spi_bitrate(self):
"""SPI bitrate in kHz. Not every bitrate is supported by the host
adapter. Therefore, the actual bitrate may be less than the value which
is set. The slowest bitrate supported is 125kHz. Any smaller value will
be rounded up to 125kHz.
The power-on default value is 1000 kHz.
"""
ret = api.py_aa_spi_bitrate(self.handle, 0)
_raise_error_if_negative(ret)
return ret
@spi_bitrate.setter
def spi_bitrate(self, value):
ret = api.py_aa_spi_bitrate(self.handle, value)
_raise_error_if_negative(ret)
def spi_configure(self, polarity, phase, bitorder):
"""Configure the SPI interface."""
ret = api.py_aa_spi_configure(self.handle, polarity, phase, bitorder)
_raise_error_if_negative(ret)
def spi_configure_mode(self, spi_mode):
"""Configure the SPI interface by the well known SPI modes."""
if spi_mode == SPI_MODE_0:
self.spi_configure(SPI_POL_RISING_FALLING,
SPI_PHASE_SAMPLE_SETUP, SPI_BITORDER_MSB)
elif spi_mode == SPI_MODE_3:
self.spi_configure(SPI_POL_FALLING_RISING,
SPI_PHASE_SETUP_SAMPLE, SPI_BITORDER_MSB)
else:
raise RuntimeError('SPI Mode not supported')
def spi_write(self, data):
"""Write a stream of bytes to a SPI device."""
data_out = array.array('B', data)
data_in = array.array('B', (0,) * len(data_out))
ret = api.py_aa_spi_write(self.handle, len(data_out), data_out,
len(data_in), data_in)
_raise_error_if_negative(ret)
return bytes(data_in)
def spi_ss_polarity(self, polarity):
"""Change the ouput polarity on the SS line.
Please note, that this only affects the master functions.
"""
ret = api.py_aa_spi_master_ss_polarity(self.handle, polarity)
_raise_error_if_negative(ret)
|
kontron/python-aardvark | pyaardvark/aardvark.py | Aardvark.i2c_monitor_read | python | def i2c_monitor_read(self):
data = array.array('H', (0,) * self.BUFFER_SIZE)
ret = api.py_aa_i2c_monitor_read(self.handle, self.BUFFER_SIZE,
data)
_raise_error_if_negative(ret)
del data[ret:]
return data.tolist() | Retrieved any data fetched by the monitor.
This function has an integrated timeout mechanism. You should use
:func:`poll` to determine if there is any data available.
Returns a list of data bytes and special symbols. There are three
special symbols: `I2C_MONITOR_NACK`, I2C_MONITOR_START and
I2C_MONITOR_STOP. | train | https://github.com/kontron/python-aardvark/blob/9827f669fbdc5bceb98e7d08a294b4e4e455d0d5/pyaardvark/aardvark.py#L559-L575 | null | class Aardvark(object):
"""Represents an Aardvark device."""
BUFFER_SIZE = 65535
def __init__(self, port=0):
ret, ver = api.py_aa_open_ext(port)
_raise_error_if_negative(ret)
#: A handle which is used as the first paramter for all calls to the
#: underlying API.
self.handle = ret
# assign some useful names
version = dict(
software = ver[0],
firmware = ver[1],
hardware = ver[2],
sw_req_by_fw = ver[3],
fw_req_by_sw = ver[4],
api_req_by_sw = ver[5],
)
#: Hardware revision of the host adapter as a string. The format is
#: ``M.NN`` where `M` is the major number and `NN` the zero padded
#: minor number.
self.hardware_revision = _to_version_str(version['hardware'])
#: Firmware version of the host adapter as a string. See
#: :attr:`hardware_revision` for more information on the format.
self.firmware_version = _to_version_str(version['firmware'])
#: Version of underlying C module (aardvark.so, aardvark.pyd) as a
#: string. See :attr:`hardware_revision` for more information on the
#: format.
self.api_version = _to_version_str(version['software'])
# version checks
if version['firmware'] < version['fw_req_by_sw']:
log.debug('The API requires a firmware version >= %s, but the '
'device has version %s',
_to_version_str(version['fw_req_by_sw']),
_to_version_str(version['firmware']))
ret = ERR_INCOMPATIBLE_DEVICE
elif version['software'] < version['sw_req_by_fw']:
log.debug('The firmware requires an API version >= %s, but the '
'API has version %s',
_to_version_str(version['sw_req_by_fw']),
_to_version_str(version['software']))
ret = ERR_INCOMPATIBLE_LIBRARY
_raise_error_if_negative(ret)
# Initialize shadow variables
self._i2c_slave_response = None
def __enter__(self):
return self
def __exit__(self, type, value, tb):
self.close()
return False
def close(self):
"""Close the device."""
api.py_aa_close(self.handle)
self.handle = None
def unique_id(self):
"""Return the unique identifier of the device. The identifier is the
serial number you can find on the adapter without the dash. Eg. the
serial number 0012-345678 would be 12345678.
"""
return api.py_aa_unique_id(self.handle)
def unique_id_str(self):
"""Return the unique identifier. But unlike :func:`unique_id`, the ID
is returned as a string which has the format NNNN-MMMMMMM.
"""
return _unique_id_str(self.unique_id())
def _interface_configuration(self, value):
ret = api.py_aa_configure(self.handle, value)
_raise_error_if_negative(ret)
return ret
@property
def enable_i2c(self):
"""Set this to `True` to enable the hardware I2C interface. If set to
`False` the hardware interface will be disabled and its pins (SDA and
SCL) can be used as GPIOs.
"""
config = self._interface_configuration(CONFIG_QUERY)
return config == CONFIG_GPIO_I2C or config == CONFIG_SPI_I2C
@enable_i2c.setter
def enable_i2c(self, value):
new_config = config = self._interface_configuration(CONFIG_QUERY)
if value and config == CONFIG_GPIO_ONLY:
new_config = CONFIG_GPIO_I2C
elif value and config == CONFIG_SPI_GPIO:
new_config = CONFIG_SPI_I2C
elif not value and config == CONFIG_GPIO_I2C:
new_config = CONFIG_GPIO_ONLY
elif not value and config == CONFIG_SPI_I2C:
new_config = CONFIG_SPI_GPIO
if new_config != config:
self._interface_configuration(new_config)
@property
def enable_spi(self):
"""Set this to `True` to enable the hardware SPI interface. If set to
`False` the hardware interface will be disabled and its pins (MISO,
MOSI, SCK and SS) can be used as GPIOs.
"""
config = self._interface_configuration(CONFIG_QUERY)
return config == CONFIG_SPI_GPIO or config == CONFIG_SPI_I2C
@enable_spi.setter
def enable_spi(self, value):
new_config = config = self._interface_configuration(CONFIG_QUERY)
if value and config == CONFIG_GPIO_ONLY:
new_config = CONFIG_SPI_GPIO
elif value and config == CONFIG_GPIO_I2C:
new_config = CONFIG_SPI_I2C
elif not value and config == CONFIG_SPI_GPIO:
new_config = CONFIG_GPIO_ONLY
elif not value and config == CONFIG_SPI_I2C:
new_config = CONFIG_GPIO_I2C
if new_config != config:
self._interface_configuration(new_config)
@property
def i2c_bitrate(self):
"""I2C bitrate in kHz. Not every bitrate is supported by the host
adapter. Therefore, the actual bitrate may be less than the value which
is set.
The power-on default value is 100 kHz.
"""
ret = api.py_aa_i2c_bitrate(self.handle, 0)
_raise_error_if_negative(ret)
return ret
@i2c_bitrate.setter
def i2c_bitrate(self, value):
ret = api.py_aa_i2c_bitrate(self.handle, value)
_raise_error_if_negative(ret)
@property
def i2c_pullups(self):
"""Setting this to `True` will enable the I2C pullup resistors. If set
to `False` the pullup resistors will be disabled.
Raises an :exc:`IOError` if the hardware adapter does not support
pullup resistors.
"""
ret = api.py_aa_i2c_pullup(self.handle, I2C_PULLUP_QUERY)
_raise_error_if_negative(ret)
return ret
@i2c_pullups.setter
def i2c_pullups(self, value):
if value:
pullup = I2C_PULLUP_BOTH
else:
pullup = I2C_PULLUP_NONE
ret = api.py_aa_i2c_pullup(self.handle, pullup)
_raise_error_if_negative(ret)
@property
def target_power(self):
"""Setting this to `True` will activate the power pins (4 and 6). If
set to `False` the power will be deactivated.
Raises an :exc:`IOError` if the hardware adapter does not support
the switchable power pins.
"""
ret = api.py_aa_target_power(self.handle, TARGET_POWER_QUERY)
_raise_error_if_negative(ret)
return ret
@target_power.setter
def target_power(self, value):
if value:
power = TARGET_POWER_BOTH
else:
power = TARGET_POWER_NONE
ret = api.py_aa_target_power(self.handle, power)
_raise_error_if_negative(ret)
@property
def i2c_bus_timeout(self):
"""I2C bus lock timeout in ms.
Minimum value is 10 ms and the maximum value is 450 ms. Not every value
can be set and will be rounded to the next possible number. You can
read back the property to get the actual value.
The power-on default value is 200 ms.
"""
ret = api.py_aa_i2c_bus_timeout(self.handle, 0)
_raise_error_if_negative(ret)
return ret
@i2c_bus_timeout.setter
def i2c_bus_timeout(self, timeout):
ret = api.py_aa_i2c_bus_timeout(self.handle, timeout)
_raise_error_if_negative(ret)
def i2c_master_write(self, i2c_address, data, flags=I2C_NO_FLAGS):
"""Make an I2C write access.
The given I2C device is addressed and data given as a string is
written. The transaction is finished with an I2C stop condition unless
I2C_NO_STOP is set in the flags.
10 bit addresses are supported if the I2C_10_BIT_ADDR flag is set.
"""
data = array.array('B', data)
status, _ = api.py_aa_i2c_write_ext(self.handle, i2c_address, flags,
len(data), data)
_raise_i2c_status_code_error_if_failure(status)
def i2c_master_read(self, addr, length, flags=I2C_NO_FLAGS):
"""Make an I2C read access.
The given I2C device is addressed and clock cycles for `length` bytes
are generated. A short read will occur if the device generates an early
NAK.
The transaction is finished with an I2C stop condition unless the
I2C_NO_STOP flag is set.
"""
data = array.array('B', (0,) * length)
status, rx_len = api.py_aa_i2c_read_ext(self.handle, addr, flags,
length, data)
_raise_i2c_status_code_error_if_failure(status)
del data[rx_len:]
return bytes(data)
def i2c_master_write_read(self, i2c_address, data, length):
"""Make an I2C write/read access.
First an I2C write access is issued. No stop condition will be
generated. Instead the read access begins with a repeated start.
This method is useful for accessing most addressable I2C devices like
EEPROMs, port expander, etc.
Basically, this is just a convenient function which internally uses
`i2c_master_write` and `i2c_master_read`.
"""
self.i2c_master_write(i2c_address, data, I2C_NO_STOP)
return self.i2c_master_read(i2c_address, length)
def poll(self, timeout=None):
"""Wait for an event to occur.
If `timeout` is given, if specifies the length of time in milliseconds
which the function will wait for events before returing. If `timeout`
is omitted, negative or None, the call will block until there is an
event.
Returns a list of events. In case no event is pending, an empty list is
returned.
"""
if timeout is None:
timeout = -1
ret = api.py_aa_async_poll(self.handle, timeout)
_raise_error_if_negative(ret)
events = list()
for event in (POLL_I2C_READ, POLL_I2C_WRITE, POLL_SPI,
POLL_I2C_MONITOR):
if ret & event:
events.append(event)
return events
def enable_i2c_slave(self, slave_address):
"""Enable I2C slave mode.
The device will respond to the specified slave_address if it is
addressed.
You can wait for the data with :func:`poll` and get it with
`i2c_slave_read`.
"""
ret = api.py_aa_i2c_slave_enable(self.handle, slave_address,
self.BUFFER_SIZE, self.BUFFER_SIZE)
_raise_error_if_negative(ret)
def disable_i2c_slave(self):
"""Disable I2C slave mode."""
ret = api.py_aa_i2c_slave_disable(self.handle)
_raise_error_if_negative(ret)
def i2c_slave_read(self):
"""Read the bytes from an I2C slave reception.
The bytes are returned as a string object.
"""
data = array.array('B', (0,) * self.BUFFER_SIZE)
status, addr, rx_len = api.py_aa_i2c_slave_read_ext(self.handle,
self.BUFFER_SIZE, data)
_raise_i2c_status_code_error_if_failure(status)
# In case of general call, actually return the general call address
if addr == 0x80:
addr = 0x00
del data[rx_len:]
return (addr, bytes(data))
@property
def i2c_slave_response(self):
"""Response to next read command.
An array of bytes that will be transmitted to the I2C master with the
next read operation.
Warning: Due to the fact that the Aardvark API does not provide a means
to read out this value, it is buffered when setting the property.
Reading the property therefore might not return what is actually stored
in the device.
"""
return self._i2c_slave_response
@i2c_slave_response.setter
def i2c_slave_response(self, data):
data = array.array('B', data)
ret = api.py_aa_i2c_slave_set_response(self.handle, len(data), data)
_raise_error_if_negative(ret)
self._i2c_slave_response = data
@property
def i2c_slave_last_transmit_size(self):
"""Returns the number of bytes transmitted by the slave."""
ret = api.py_aa_i2c_slave_write_stats(self.handle)
_raise_error_if_negative(ret)
return ret
def enable_i2c_monitor(self):
"""Activate the I2C monitor.
Enabling the monitor will disable all other functions of the adapter.
Raises an :exc:`IOError` if the hardware adapter does not support
monitor mode.
"""
ret = api.py_aa_i2c_monitor_enable(self.handle)
_raise_error_if_negative(ret)
def disable_i2c_monitor(self):
"""Disable the I2C monitor.
Raises an :exc:`IOError` if the hardware adapter does not support
monitor mode.
"""
ret = api.py_aa_i2c_monitor_disable(self.handle)
_raise_error_if_negative(ret)
@property
def spi_bitrate(self):
"""SPI bitrate in kHz. Not every bitrate is supported by the host
adapter. Therefore, the actual bitrate may be less than the value which
is set. The slowest bitrate supported is 125kHz. Any smaller value will
be rounded up to 125kHz.
The power-on default value is 1000 kHz.
"""
ret = api.py_aa_spi_bitrate(self.handle, 0)
_raise_error_if_negative(ret)
return ret
@spi_bitrate.setter
def spi_bitrate(self, value):
ret = api.py_aa_spi_bitrate(self.handle, value)
_raise_error_if_negative(ret)
def spi_configure(self, polarity, phase, bitorder):
"""Configure the SPI interface."""
ret = api.py_aa_spi_configure(self.handle, polarity, phase, bitorder)
_raise_error_if_negative(ret)
def spi_configure_mode(self, spi_mode):
"""Configure the SPI interface by the well known SPI modes."""
if spi_mode == SPI_MODE_0:
self.spi_configure(SPI_POL_RISING_FALLING,
SPI_PHASE_SAMPLE_SETUP, SPI_BITORDER_MSB)
elif spi_mode == SPI_MODE_3:
self.spi_configure(SPI_POL_FALLING_RISING,
SPI_PHASE_SETUP_SAMPLE, SPI_BITORDER_MSB)
else:
raise RuntimeError('SPI Mode not supported')
def spi_write(self, data):
"""Write a stream of bytes to a SPI device."""
data_out = array.array('B', data)
data_in = array.array('B', (0,) * len(data_out))
ret = api.py_aa_spi_write(self.handle, len(data_out), data_out,
len(data_in), data_in)
_raise_error_if_negative(ret)
return bytes(data_in)
def spi_ss_polarity(self, polarity):
"""Change the ouput polarity on the SS line.
Please note, that this only affects the master functions.
"""
ret = api.py_aa_spi_master_ss_polarity(self.handle, polarity)
_raise_error_if_negative(ret)
|
kontron/python-aardvark | pyaardvark/aardvark.py | Aardvark.spi_bitrate | python | def spi_bitrate(self):
ret = api.py_aa_spi_bitrate(self.handle, 0)
_raise_error_if_negative(ret)
return ret | SPI bitrate in kHz. Not every bitrate is supported by the host
adapter. Therefore, the actual bitrate may be less than the value which
is set. The slowest bitrate supported is 125kHz. Any smaller value will
be rounded up to 125kHz.
The power-on default value is 1000 kHz. | train | https://github.com/kontron/python-aardvark/blob/9827f669fbdc5bceb98e7d08a294b4e4e455d0d5/pyaardvark/aardvark.py#L578-L588 | null | class Aardvark(object):
"""Represents an Aardvark device."""
BUFFER_SIZE = 65535
def __init__(self, port=0):
ret, ver = api.py_aa_open_ext(port)
_raise_error_if_negative(ret)
#: A handle which is used as the first paramter for all calls to the
#: underlying API.
self.handle = ret
# assign some useful names
version = dict(
software = ver[0],
firmware = ver[1],
hardware = ver[2],
sw_req_by_fw = ver[3],
fw_req_by_sw = ver[4],
api_req_by_sw = ver[5],
)
#: Hardware revision of the host adapter as a string. The format is
#: ``M.NN`` where `M` is the major number and `NN` the zero padded
#: minor number.
self.hardware_revision = _to_version_str(version['hardware'])
#: Firmware version of the host adapter as a string. See
#: :attr:`hardware_revision` for more information on the format.
self.firmware_version = _to_version_str(version['firmware'])
#: Version of underlying C module (aardvark.so, aardvark.pyd) as a
#: string. See :attr:`hardware_revision` for more information on the
#: format.
self.api_version = _to_version_str(version['software'])
# version checks
if version['firmware'] < version['fw_req_by_sw']:
log.debug('The API requires a firmware version >= %s, but the '
'device has version %s',
_to_version_str(version['fw_req_by_sw']),
_to_version_str(version['firmware']))
ret = ERR_INCOMPATIBLE_DEVICE
elif version['software'] < version['sw_req_by_fw']:
log.debug('The firmware requires an API version >= %s, but the '
'API has version %s',
_to_version_str(version['sw_req_by_fw']),
_to_version_str(version['software']))
ret = ERR_INCOMPATIBLE_LIBRARY
_raise_error_if_negative(ret)
# Initialize shadow variables
self._i2c_slave_response = None
def __enter__(self):
return self
def __exit__(self, type, value, tb):
self.close()
return False
def close(self):
"""Close the device."""
api.py_aa_close(self.handle)
self.handle = None
def unique_id(self):
"""Return the unique identifier of the device. The identifier is the
serial number you can find on the adapter without the dash. Eg. the
serial number 0012-345678 would be 12345678.
"""
return api.py_aa_unique_id(self.handle)
def unique_id_str(self):
"""Return the unique identifier. But unlike :func:`unique_id`, the ID
is returned as a string which has the format NNNN-MMMMMMM.
"""
return _unique_id_str(self.unique_id())
def _interface_configuration(self, value):
ret = api.py_aa_configure(self.handle, value)
_raise_error_if_negative(ret)
return ret
@property
def enable_i2c(self):
"""Set this to `True` to enable the hardware I2C interface. If set to
`False` the hardware interface will be disabled and its pins (SDA and
SCL) can be used as GPIOs.
"""
config = self._interface_configuration(CONFIG_QUERY)
return config == CONFIG_GPIO_I2C or config == CONFIG_SPI_I2C
@enable_i2c.setter
def enable_i2c(self, value):
new_config = config = self._interface_configuration(CONFIG_QUERY)
if value and config == CONFIG_GPIO_ONLY:
new_config = CONFIG_GPIO_I2C
elif value and config == CONFIG_SPI_GPIO:
new_config = CONFIG_SPI_I2C
elif not value and config == CONFIG_GPIO_I2C:
new_config = CONFIG_GPIO_ONLY
elif not value and config == CONFIG_SPI_I2C:
new_config = CONFIG_SPI_GPIO
if new_config != config:
self._interface_configuration(new_config)
@property
def enable_spi(self):
"""Set this to `True` to enable the hardware SPI interface. If set to
`False` the hardware interface will be disabled and its pins (MISO,
MOSI, SCK and SS) can be used as GPIOs.
"""
config = self._interface_configuration(CONFIG_QUERY)
return config == CONFIG_SPI_GPIO or config == CONFIG_SPI_I2C
@enable_spi.setter
def enable_spi(self, value):
new_config = config = self._interface_configuration(CONFIG_QUERY)
if value and config == CONFIG_GPIO_ONLY:
new_config = CONFIG_SPI_GPIO
elif value and config == CONFIG_GPIO_I2C:
new_config = CONFIG_SPI_I2C
elif not value and config == CONFIG_SPI_GPIO:
new_config = CONFIG_GPIO_ONLY
elif not value and config == CONFIG_SPI_I2C:
new_config = CONFIG_GPIO_I2C
if new_config != config:
self._interface_configuration(new_config)
@property
def i2c_bitrate(self):
"""I2C bitrate in kHz. Not every bitrate is supported by the host
adapter. Therefore, the actual bitrate may be less than the value which
is set.
The power-on default value is 100 kHz.
"""
ret = api.py_aa_i2c_bitrate(self.handle, 0)
_raise_error_if_negative(ret)
return ret
@i2c_bitrate.setter
def i2c_bitrate(self, value):
ret = api.py_aa_i2c_bitrate(self.handle, value)
_raise_error_if_negative(ret)
@property
def i2c_pullups(self):
"""Setting this to `True` will enable the I2C pullup resistors. If set
to `False` the pullup resistors will be disabled.
Raises an :exc:`IOError` if the hardware adapter does not support
pullup resistors.
"""
ret = api.py_aa_i2c_pullup(self.handle, I2C_PULLUP_QUERY)
_raise_error_if_negative(ret)
return ret
@i2c_pullups.setter
def i2c_pullups(self, value):
if value:
pullup = I2C_PULLUP_BOTH
else:
pullup = I2C_PULLUP_NONE
ret = api.py_aa_i2c_pullup(self.handle, pullup)
_raise_error_if_negative(ret)
@property
def target_power(self):
"""Setting this to `True` will activate the power pins (4 and 6). If
set to `False` the power will be deactivated.
Raises an :exc:`IOError` if the hardware adapter does not support
the switchable power pins.
"""
ret = api.py_aa_target_power(self.handle, TARGET_POWER_QUERY)
_raise_error_if_negative(ret)
return ret
@target_power.setter
def target_power(self, value):
if value:
power = TARGET_POWER_BOTH
else:
power = TARGET_POWER_NONE
ret = api.py_aa_target_power(self.handle, power)
_raise_error_if_negative(ret)
@property
def i2c_bus_timeout(self):
"""I2C bus lock timeout in ms.
Minimum value is 10 ms and the maximum value is 450 ms. Not every value
can be set and will be rounded to the next possible number. You can
read back the property to get the actual value.
The power-on default value is 200 ms.
"""
ret = api.py_aa_i2c_bus_timeout(self.handle, 0)
_raise_error_if_negative(ret)
return ret
@i2c_bus_timeout.setter
def i2c_bus_timeout(self, timeout):
ret = api.py_aa_i2c_bus_timeout(self.handle, timeout)
_raise_error_if_negative(ret)
def i2c_master_write(self, i2c_address, data, flags=I2C_NO_FLAGS):
"""Make an I2C write access.
The given I2C device is addressed and data given as a string is
written. The transaction is finished with an I2C stop condition unless
I2C_NO_STOP is set in the flags.
10 bit addresses are supported if the I2C_10_BIT_ADDR flag is set.
"""
data = array.array('B', data)
status, _ = api.py_aa_i2c_write_ext(self.handle, i2c_address, flags,
len(data), data)
_raise_i2c_status_code_error_if_failure(status)
def i2c_master_read(self, addr, length, flags=I2C_NO_FLAGS):
"""Make an I2C read access.
The given I2C device is addressed and clock cycles for `length` bytes
are generated. A short read will occur if the device generates an early
NAK.
The transaction is finished with an I2C stop condition unless the
I2C_NO_STOP flag is set.
"""
data = array.array('B', (0,) * length)
status, rx_len = api.py_aa_i2c_read_ext(self.handle, addr, flags,
length, data)
_raise_i2c_status_code_error_if_failure(status)
del data[rx_len:]
return bytes(data)
def i2c_master_write_read(self, i2c_address, data, length):
"""Make an I2C write/read access.
First an I2C write access is issued. No stop condition will be
generated. Instead the read access begins with a repeated start.
This method is useful for accessing most addressable I2C devices like
EEPROMs, port expander, etc.
Basically, this is just a convenient function which internally uses
`i2c_master_write` and `i2c_master_read`.
"""
self.i2c_master_write(i2c_address, data, I2C_NO_STOP)
return self.i2c_master_read(i2c_address, length)
def poll(self, timeout=None):
"""Wait for an event to occur.
If `timeout` is given, if specifies the length of time in milliseconds
which the function will wait for events before returing. If `timeout`
is omitted, negative or None, the call will block until there is an
event.
Returns a list of events. In case no event is pending, an empty list is
returned.
"""
if timeout is None:
timeout = -1
ret = api.py_aa_async_poll(self.handle, timeout)
_raise_error_if_negative(ret)
events = list()
for event in (POLL_I2C_READ, POLL_I2C_WRITE, POLL_SPI,
POLL_I2C_MONITOR):
if ret & event:
events.append(event)
return events
def enable_i2c_slave(self, slave_address):
"""Enable I2C slave mode.
The device will respond to the specified slave_address if it is
addressed.
You can wait for the data with :func:`poll` and get it with
`i2c_slave_read`.
"""
ret = api.py_aa_i2c_slave_enable(self.handle, slave_address,
self.BUFFER_SIZE, self.BUFFER_SIZE)
_raise_error_if_negative(ret)
def disable_i2c_slave(self):
"""Disable I2C slave mode."""
ret = api.py_aa_i2c_slave_disable(self.handle)
_raise_error_if_negative(ret)
def i2c_slave_read(self):
"""Read the bytes from an I2C slave reception.
The bytes are returned as a string object.
"""
data = array.array('B', (0,) * self.BUFFER_SIZE)
status, addr, rx_len = api.py_aa_i2c_slave_read_ext(self.handle,
self.BUFFER_SIZE, data)
_raise_i2c_status_code_error_if_failure(status)
# In case of general call, actually return the general call address
if addr == 0x80:
addr = 0x00
del data[rx_len:]
return (addr, bytes(data))
@property
def i2c_slave_response(self):
"""Response to next read command.
An array of bytes that will be transmitted to the I2C master with the
next read operation.
Warning: Due to the fact that the Aardvark API does not provide a means
to read out this value, it is buffered when setting the property.
Reading the property therefore might not return what is actually stored
in the device.
"""
return self._i2c_slave_response
@i2c_slave_response.setter
def i2c_slave_response(self, data):
data = array.array('B', data)
ret = api.py_aa_i2c_slave_set_response(self.handle, len(data), data)
_raise_error_if_negative(ret)
self._i2c_slave_response = data
@property
def i2c_slave_last_transmit_size(self):
"""Returns the number of bytes transmitted by the slave."""
ret = api.py_aa_i2c_slave_write_stats(self.handle)
_raise_error_if_negative(ret)
return ret
def enable_i2c_monitor(self):
"""Activate the I2C monitor.
Enabling the monitor will disable all other functions of the adapter.
Raises an :exc:`IOError` if the hardware adapter does not support
monitor mode.
"""
ret = api.py_aa_i2c_monitor_enable(self.handle)
_raise_error_if_negative(ret)
def disable_i2c_monitor(self):
"""Disable the I2C monitor.
Raises an :exc:`IOError` if the hardware adapter does not support
monitor mode.
"""
ret = api.py_aa_i2c_monitor_disable(self.handle)
_raise_error_if_negative(ret)
def i2c_monitor_read(self):
"""Retrieved any data fetched by the monitor.
This function has an integrated timeout mechanism. You should use
:func:`poll` to determine if there is any data available.
Returns a list of data bytes and special symbols. There are three
special symbols: `I2C_MONITOR_NACK`, I2C_MONITOR_START and
I2C_MONITOR_STOP.
"""
data = array.array('H', (0,) * self.BUFFER_SIZE)
ret = api.py_aa_i2c_monitor_read(self.handle, self.BUFFER_SIZE,
data)
_raise_error_if_negative(ret)
del data[ret:]
return data.tolist()
@property
@spi_bitrate.setter
def spi_bitrate(self, value):
ret = api.py_aa_spi_bitrate(self.handle, value)
_raise_error_if_negative(ret)
def spi_configure(self, polarity, phase, bitorder):
"""Configure the SPI interface."""
ret = api.py_aa_spi_configure(self.handle, polarity, phase, bitorder)
_raise_error_if_negative(ret)
def spi_configure_mode(self, spi_mode):
"""Configure the SPI interface by the well known SPI modes."""
if spi_mode == SPI_MODE_0:
self.spi_configure(SPI_POL_RISING_FALLING,
SPI_PHASE_SAMPLE_SETUP, SPI_BITORDER_MSB)
elif spi_mode == SPI_MODE_3:
self.spi_configure(SPI_POL_FALLING_RISING,
SPI_PHASE_SETUP_SAMPLE, SPI_BITORDER_MSB)
else:
raise RuntimeError('SPI Mode not supported')
def spi_write(self, data):
"""Write a stream of bytes to a SPI device."""
data_out = array.array('B', data)
data_in = array.array('B', (0,) * len(data_out))
ret = api.py_aa_spi_write(self.handle, len(data_out), data_out,
len(data_in), data_in)
_raise_error_if_negative(ret)
return bytes(data_in)
def spi_ss_polarity(self, polarity):
"""Change the ouput polarity on the SS line.
Please note, that this only affects the master functions.
"""
ret = api.py_aa_spi_master_ss_polarity(self.handle, polarity)
_raise_error_if_negative(ret)
|
kontron/python-aardvark | pyaardvark/aardvark.py | Aardvark.spi_configure | python | def spi_configure(self, polarity, phase, bitorder):
ret = api.py_aa_spi_configure(self.handle, polarity, phase, bitorder)
_raise_error_if_negative(ret) | Configure the SPI interface. | train | https://github.com/kontron/python-aardvark/blob/9827f669fbdc5bceb98e7d08a294b4e4e455d0d5/pyaardvark/aardvark.py#L595-L598 | null | class Aardvark(object):
"""Represents an Aardvark device."""
BUFFER_SIZE = 65535
def __init__(self, port=0):
ret, ver = api.py_aa_open_ext(port)
_raise_error_if_negative(ret)
#: A handle which is used as the first paramter for all calls to the
#: underlying API.
self.handle = ret
# assign some useful names
version = dict(
software = ver[0],
firmware = ver[1],
hardware = ver[2],
sw_req_by_fw = ver[3],
fw_req_by_sw = ver[4],
api_req_by_sw = ver[5],
)
#: Hardware revision of the host adapter as a string. The format is
#: ``M.NN`` where `M` is the major number and `NN` the zero padded
#: minor number.
self.hardware_revision = _to_version_str(version['hardware'])
#: Firmware version of the host adapter as a string. See
#: :attr:`hardware_revision` for more information on the format.
self.firmware_version = _to_version_str(version['firmware'])
#: Version of underlying C module (aardvark.so, aardvark.pyd) as a
#: string. See :attr:`hardware_revision` for more information on the
#: format.
self.api_version = _to_version_str(version['software'])
# version checks
if version['firmware'] < version['fw_req_by_sw']:
log.debug('The API requires a firmware version >= %s, but the '
'device has version %s',
_to_version_str(version['fw_req_by_sw']),
_to_version_str(version['firmware']))
ret = ERR_INCOMPATIBLE_DEVICE
elif version['software'] < version['sw_req_by_fw']:
log.debug('The firmware requires an API version >= %s, but the '
'API has version %s',
_to_version_str(version['sw_req_by_fw']),
_to_version_str(version['software']))
ret = ERR_INCOMPATIBLE_LIBRARY
_raise_error_if_negative(ret)
# Initialize shadow variables
self._i2c_slave_response = None
def __enter__(self):
return self
def __exit__(self, type, value, tb):
self.close()
return False
def close(self):
"""Close the device."""
api.py_aa_close(self.handle)
self.handle = None
def unique_id(self):
"""Return the unique identifier of the device. The identifier is the
serial number you can find on the adapter without the dash. Eg. the
serial number 0012-345678 would be 12345678.
"""
return api.py_aa_unique_id(self.handle)
def unique_id_str(self):
"""Return the unique identifier. But unlike :func:`unique_id`, the ID
is returned as a string which has the format NNNN-MMMMMMM.
"""
return _unique_id_str(self.unique_id())
def _interface_configuration(self, value):
ret = api.py_aa_configure(self.handle, value)
_raise_error_if_negative(ret)
return ret
@property
def enable_i2c(self):
"""Set this to `True` to enable the hardware I2C interface. If set to
`False` the hardware interface will be disabled and its pins (SDA and
SCL) can be used as GPIOs.
"""
config = self._interface_configuration(CONFIG_QUERY)
return config == CONFIG_GPIO_I2C or config == CONFIG_SPI_I2C
@enable_i2c.setter
def enable_i2c(self, value):
new_config = config = self._interface_configuration(CONFIG_QUERY)
if value and config == CONFIG_GPIO_ONLY:
new_config = CONFIG_GPIO_I2C
elif value and config == CONFIG_SPI_GPIO:
new_config = CONFIG_SPI_I2C
elif not value and config == CONFIG_GPIO_I2C:
new_config = CONFIG_GPIO_ONLY
elif not value and config == CONFIG_SPI_I2C:
new_config = CONFIG_SPI_GPIO
if new_config != config:
self._interface_configuration(new_config)
@property
def enable_spi(self):
"""Set this to `True` to enable the hardware SPI interface. If set to
`False` the hardware interface will be disabled and its pins (MISO,
MOSI, SCK and SS) can be used as GPIOs.
"""
config = self._interface_configuration(CONFIG_QUERY)
return config == CONFIG_SPI_GPIO or config == CONFIG_SPI_I2C
@enable_spi.setter
def enable_spi(self, value):
new_config = config = self._interface_configuration(CONFIG_QUERY)
if value and config == CONFIG_GPIO_ONLY:
new_config = CONFIG_SPI_GPIO
elif value and config == CONFIG_GPIO_I2C:
new_config = CONFIG_SPI_I2C
elif not value and config == CONFIG_SPI_GPIO:
new_config = CONFIG_GPIO_ONLY
elif not value and config == CONFIG_SPI_I2C:
new_config = CONFIG_GPIO_I2C
if new_config != config:
self._interface_configuration(new_config)
@property
def i2c_bitrate(self):
"""I2C bitrate in kHz. Not every bitrate is supported by the host
adapter. Therefore, the actual bitrate may be less than the value which
is set.
The power-on default value is 100 kHz.
"""
ret = api.py_aa_i2c_bitrate(self.handle, 0)
_raise_error_if_negative(ret)
return ret
@i2c_bitrate.setter
def i2c_bitrate(self, value):
ret = api.py_aa_i2c_bitrate(self.handle, value)
_raise_error_if_negative(ret)
@property
def i2c_pullups(self):
"""Setting this to `True` will enable the I2C pullup resistors. If set
to `False` the pullup resistors will be disabled.
Raises an :exc:`IOError` if the hardware adapter does not support
pullup resistors.
"""
ret = api.py_aa_i2c_pullup(self.handle, I2C_PULLUP_QUERY)
_raise_error_if_negative(ret)
return ret
@i2c_pullups.setter
def i2c_pullups(self, value):
if value:
pullup = I2C_PULLUP_BOTH
else:
pullup = I2C_PULLUP_NONE
ret = api.py_aa_i2c_pullup(self.handle, pullup)
_raise_error_if_negative(ret)
@property
def target_power(self):
"""Setting this to `True` will activate the power pins (4 and 6). If
set to `False` the power will be deactivated.
Raises an :exc:`IOError` if the hardware adapter does not support
the switchable power pins.
"""
ret = api.py_aa_target_power(self.handle, TARGET_POWER_QUERY)
_raise_error_if_negative(ret)
return ret
@target_power.setter
def target_power(self, value):
if value:
power = TARGET_POWER_BOTH
else:
power = TARGET_POWER_NONE
ret = api.py_aa_target_power(self.handle, power)
_raise_error_if_negative(ret)
@property
def i2c_bus_timeout(self):
"""I2C bus lock timeout in ms.
Minimum value is 10 ms and the maximum value is 450 ms. Not every value
can be set and will be rounded to the next possible number. You can
read back the property to get the actual value.
The power-on default value is 200 ms.
"""
ret = api.py_aa_i2c_bus_timeout(self.handle, 0)
_raise_error_if_negative(ret)
return ret
@i2c_bus_timeout.setter
def i2c_bus_timeout(self, timeout):
ret = api.py_aa_i2c_bus_timeout(self.handle, timeout)
_raise_error_if_negative(ret)
def i2c_master_write(self, i2c_address, data, flags=I2C_NO_FLAGS):
"""Make an I2C write access.
The given I2C device is addressed and data given as a string is
written. The transaction is finished with an I2C stop condition unless
I2C_NO_STOP is set in the flags.
10 bit addresses are supported if the I2C_10_BIT_ADDR flag is set.
"""
data = array.array('B', data)
status, _ = api.py_aa_i2c_write_ext(self.handle, i2c_address, flags,
len(data), data)
_raise_i2c_status_code_error_if_failure(status)
def i2c_master_read(self, addr, length, flags=I2C_NO_FLAGS):
"""Make an I2C read access.
The given I2C device is addressed and clock cycles for `length` bytes
are generated. A short read will occur if the device generates an early
NAK.
The transaction is finished with an I2C stop condition unless the
I2C_NO_STOP flag is set.
"""
data = array.array('B', (0,) * length)
status, rx_len = api.py_aa_i2c_read_ext(self.handle, addr, flags,
length, data)
_raise_i2c_status_code_error_if_failure(status)
del data[rx_len:]
return bytes(data)
def i2c_master_write_read(self, i2c_address, data, length):
"""Make an I2C write/read access.
First an I2C write access is issued. No stop condition will be
generated. Instead the read access begins with a repeated start.
This method is useful for accessing most addressable I2C devices like
EEPROMs, port expander, etc.
Basically, this is just a convenient function which internally uses
`i2c_master_write` and `i2c_master_read`.
"""
self.i2c_master_write(i2c_address, data, I2C_NO_STOP)
return self.i2c_master_read(i2c_address, length)
def poll(self, timeout=None):
"""Wait for an event to occur.
If `timeout` is given, if specifies the length of time in milliseconds
which the function will wait for events before returing. If `timeout`
is omitted, negative or None, the call will block until there is an
event.
Returns a list of events. In case no event is pending, an empty list is
returned.
"""
if timeout is None:
timeout = -1
ret = api.py_aa_async_poll(self.handle, timeout)
_raise_error_if_negative(ret)
events = list()
for event in (POLL_I2C_READ, POLL_I2C_WRITE, POLL_SPI,
POLL_I2C_MONITOR):
if ret & event:
events.append(event)
return events
def enable_i2c_slave(self, slave_address):
"""Enable I2C slave mode.
The device will respond to the specified slave_address if it is
addressed.
You can wait for the data with :func:`poll` and get it with
`i2c_slave_read`.
"""
ret = api.py_aa_i2c_slave_enable(self.handle, slave_address,
self.BUFFER_SIZE, self.BUFFER_SIZE)
_raise_error_if_negative(ret)
def disable_i2c_slave(self):
"""Disable I2C slave mode."""
ret = api.py_aa_i2c_slave_disable(self.handle)
_raise_error_if_negative(ret)
def i2c_slave_read(self):
"""Read the bytes from an I2C slave reception.
The bytes are returned as a string object.
"""
data = array.array('B', (0,) * self.BUFFER_SIZE)
status, addr, rx_len = api.py_aa_i2c_slave_read_ext(self.handle,
self.BUFFER_SIZE, data)
_raise_i2c_status_code_error_if_failure(status)
# In case of general call, actually return the general call address
if addr == 0x80:
addr = 0x00
del data[rx_len:]
return (addr, bytes(data))
@property
def i2c_slave_response(self):
"""Response to next read command.
An array of bytes that will be transmitted to the I2C master with the
next read operation.
Warning: Due to the fact that the Aardvark API does not provide a means
to read out this value, it is buffered when setting the property.
Reading the property therefore might not return what is actually stored
in the device.
"""
return self._i2c_slave_response
@i2c_slave_response.setter
def i2c_slave_response(self, data):
data = array.array('B', data)
ret = api.py_aa_i2c_slave_set_response(self.handle, len(data), data)
_raise_error_if_negative(ret)
self._i2c_slave_response = data
@property
def i2c_slave_last_transmit_size(self):
"""Returns the number of bytes transmitted by the slave."""
ret = api.py_aa_i2c_slave_write_stats(self.handle)
_raise_error_if_negative(ret)
return ret
def enable_i2c_monitor(self):
"""Activate the I2C monitor.
Enabling the monitor will disable all other functions of the adapter.
Raises an :exc:`IOError` if the hardware adapter does not support
monitor mode.
"""
ret = api.py_aa_i2c_monitor_enable(self.handle)
_raise_error_if_negative(ret)
def disable_i2c_monitor(self):
"""Disable the I2C monitor.
Raises an :exc:`IOError` if the hardware adapter does not support
monitor mode.
"""
ret = api.py_aa_i2c_monitor_disable(self.handle)
_raise_error_if_negative(ret)
def i2c_monitor_read(self):
"""Retrieved any data fetched by the monitor.
This function has an integrated timeout mechanism. You should use
:func:`poll` to determine if there is any data available.
Returns a list of data bytes and special symbols. There are three
special symbols: `I2C_MONITOR_NACK`, I2C_MONITOR_START and
I2C_MONITOR_STOP.
"""
data = array.array('H', (0,) * self.BUFFER_SIZE)
ret = api.py_aa_i2c_monitor_read(self.handle, self.BUFFER_SIZE,
data)
_raise_error_if_negative(ret)
del data[ret:]
return data.tolist()
@property
def spi_bitrate(self):
"""SPI bitrate in kHz. Not every bitrate is supported by the host
adapter. Therefore, the actual bitrate may be less than the value which
is set. The slowest bitrate supported is 125kHz. Any smaller value will
be rounded up to 125kHz.
The power-on default value is 1000 kHz.
"""
ret = api.py_aa_spi_bitrate(self.handle, 0)
_raise_error_if_negative(ret)
return ret
@spi_bitrate.setter
def spi_bitrate(self, value):
ret = api.py_aa_spi_bitrate(self.handle, value)
_raise_error_if_negative(ret)
def spi_configure_mode(self, spi_mode):
"""Configure the SPI interface by the well known SPI modes."""
if spi_mode == SPI_MODE_0:
self.spi_configure(SPI_POL_RISING_FALLING,
SPI_PHASE_SAMPLE_SETUP, SPI_BITORDER_MSB)
elif spi_mode == SPI_MODE_3:
self.spi_configure(SPI_POL_FALLING_RISING,
SPI_PHASE_SETUP_SAMPLE, SPI_BITORDER_MSB)
else:
raise RuntimeError('SPI Mode not supported')
def spi_write(self, data):
"""Write a stream of bytes to a SPI device."""
data_out = array.array('B', data)
data_in = array.array('B', (0,) * len(data_out))
ret = api.py_aa_spi_write(self.handle, len(data_out), data_out,
len(data_in), data_in)
_raise_error_if_negative(ret)
return bytes(data_in)
def spi_ss_polarity(self, polarity):
"""Change the ouput polarity on the SS line.
Please note, that this only affects the master functions.
"""
ret = api.py_aa_spi_master_ss_polarity(self.handle, polarity)
_raise_error_if_negative(ret)
|
kontron/python-aardvark | pyaardvark/aardvark.py | Aardvark.spi_configure_mode | python | def spi_configure_mode(self, spi_mode):
if spi_mode == SPI_MODE_0:
self.spi_configure(SPI_POL_RISING_FALLING,
SPI_PHASE_SAMPLE_SETUP, SPI_BITORDER_MSB)
elif spi_mode == SPI_MODE_3:
self.spi_configure(SPI_POL_FALLING_RISING,
SPI_PHASE_SETUP_SAMPLE, SPI_BITORDER_MSB)
else:
raise RuntimeError('SPI Mode not supported') | Configure the SPI interface by the well known SPI modes. | train | https://github.com/kontron/python-aardvark/blob/9827f669fbdc5bceb98e7d08a294b4e4e455d0d5/pyaardvark/aardvark.py#L600-L609 | null | class Aardvark(object):
"""Represents an Aardvark device."""
BUFFER_SIZE = 65535
def __init__(self, port=0):
ret, ver = api.py_aa_open_ext(port)
_raise_error_if_negative(ret)
#: A handle which is used as the first paramter for all calls to the
#: underlying API.
self.handle = ret
# assign some useful names
version = dict(
software = ver[0],
firmware = ver[1],
hardware = ver[2],
sw_req_by_fw = ver[3],
fw_req_by_sw = ver[4],
api_req_by_sw = ver[5],
)
#: Hardware revision of the host adapter as a string. The format is
#: ``M.NN`` where `M` is the major number and `NN` the zero padded
#: minor number.
self.hardware_revision = _to_version_str(version['hardware'])
#: Firmware version of the host adapter as a string. See
#: :attr:`hardware_revision` for more information on the format.
self.firmware_version = _to_version_str(version['firmware'])
#: Version of underlying C module (aardvark.so, aardvark.pyd) as a
#: string. See :attr:`hardware_revision` for more information on the
#: format.
self.api_version = _to_version_str(version['software'])
# version checks
if version['firmware'] < version['fw_req_by_sw']:
log.debug('The API requires a firmware version >= %s, but the '
'device has version %s',
_to_version_str(version['fw_req_by_sw']),
_to_version_str(version['firmware']))
ret = ERR_INCOMPATIBLE_DEVICE
elif version['software'] < version['sw_req_by_fw']:
log.debug('The firmware requires an API version >= %s, but the '
'API has version %s',
_to_version_str(version['sw_req_by_fw']),
_to_version_str(version['software']))
ret = ERR_INCOMPATIBLE_LIBRARY
_raise_error_if_negative(ret)
# Initialize shadow variables
self._i2c_slave_response = None
def __enter__(self):
return self
def __exit__(self, type, value, tb):
self.close()
return False
def close(self):
"""Close the device."""
api.py_aa_close(self.handle)
self.handle = None
def unique_id(self):
"""Return the unique identifier of the device. The identifier is the
serial number you can find on the adapter without the dash. Eg. the
serial number 0012-345678 would be 12345678.
"""
return api.py_aa_unique_id(self.handle)
def unique_id_str(self):
"""Return the unique identifier. But unlike :func:`unique_id`, the ID
is returned as a string which has the format NNNN-MMMMMMM.
"""
return _unique_id_str(self.unique_id())
def _interface_configuration(self, value):
ret = api.py_aa_configure(self.handle, value)
_raise_error_if_negative(ret)
return ret
@property
def enable_i2c(self):
"""Set this to `True` to enable the hardware I2C interface. If set to
`False` the hardware interface will be disabled and its pins (SDA and
SCL) can be used as GPIOs.
"""
config = self._interface_configuration(CONFIG_QUERY)
return config == CONFIG_GPIO_I2C or config == CONFIG_SPI_I2C
@enable_i2c.setter
def enable_i2c(self, value):
new_config = config = self._interface_configuration(CONFIG_QUERY)
if value and config == CONFIG_GPIO_ONLY:
new_config = CONFIG_GPIO_I2C
elif value and config == CONFIG_SPI_GPIO:
new_config = CONFIG_SPI_I2C
elif not value and config == CONFIG_GPIO_I2C:
new_config = CONFIG_GPIO_ONLY
elif not value and config == CONFIG_SPI_I2C:
new_config = CONFIG_SPI_GPIO
if new_config != config:
self._interface_configuration(new_config)
@property
def enable_spi(self):
"""Set this to `True` to enable the hardware SPI interface. If set to
`False` the hardware interface will be disabled and its pins (MISO,
MOSI, SCK and SS) can be used as GPIOs.
"""
config = self._interface_configuration(CONFIG_QUERY)
return config == CONFIG_SPI_GPIO or config == CONFIG_SPI_I2C
@enable_spi.setter
def enable_spi(self, value):
new_config = config = self._interface_configuration(CONFIG_QUERY)
if value and config == CONFIG_GPIO_ONLY:
new_config = CONFIG_SPI_GPIO
elif value and config == CONFIG_GPIO_I2C:
new_config = CONFIG_SPI_I2C
elif not value and config == CONFIG_SPI_GPIO:
new_config = CONFIG_GPIO_ONLY
elif not value and config == CONFIG_SPI_I2C:
new_config = CONFIG_GPIO_I2C
if new_config != config:
self._interface_configuration(new_config)
@property
def i2c_bitrate(self):
"""I2C bitrate in kHz. Not every bitrate is supported by the host
adapter. Therefore, the actual bitrate may be less than the value which
is set.
The power-on default value is 100 kHz.
"""
ret = api.py_aa_i2c_bitrate(self.handle, 0)
_raise_error_if_negative(ret)
return ret
@i2c_bitrate.setter
def i2c_bitrate(self, value):
ret = api.py_aa_i2c_bitrate(self.handle, value)
_raise_error_if_negative(ret)
@property
def i2c_pullups(self):
"""Setting this to `True` will enable the I2C pullup resistors. If set
to `False` the pullup resistors will be disabled.
Raises an :exc:`IOError` if the hardware adapter does not support
pullup resistors.
"""
ret = api.py_aa_i2c_pullup(self.handle, I2C_PULLUP_QUERY)
_raise_error_if_negative(ret)
return ret
@i2c_pullups.setter
def i2c_pullups(self, value):
if value:
pullup = I2C_PULLUP_BOTH
else:
pullup = I2C_PULLUP_NONE
ret = api.py_aa_i2c_pullup(self.handle, pullup)
_raise_error_if_negative(ret)
@property
def target_power(self):
"""Setting this to `True` will activate the power pins (4 and 6). If
set to `False` the power will be deactivated.
Raises an :exc:`IOError` if the hardware adapter does not support
the switchable power pins.
"""
ret = api.py_aa_target_power(self.handle, TARGET_POWER_QUERY)
_raise_error_if_negative(ret)
return ret
@target_power.setter
def target_power(self, value):
if value:
power = TARGET_POWER_BOTH
else:
power = TARGET_POWER_NONE
ret = api.py_aa_target_power(self.handle, power)
_raise_error_if_negative(ret)
@property
def i2c_bus_timeout(self):
"""I2C bus lock timeout in ms.
Minimum value is 10 ms and the maximum value is 450 ms. Not every value
can be set and will be rounded to the next possible number. You can
read back the property to get the actual value.
The power-on default value is 200 ms.
"""
ret = api.py_aa_i2c_bus_timeout(self.handle, 0)
_raise_error_if_negative(ret)
return ret
@i2c_bus_timeout.setter
def i2c_bus_timeout(self, timeout):
ret = api.py_aa_i2c_bus_timeout(self.handle, timeout)
_raise_error_if_negative(ret)
def i2c_master_write(self, i2c_address, data, flags=I2C_NO_FLAGS):
"""Make an I2C write access.
The given I2C device is addressed and data given as a string is
written. The transaction is finished with an I2C stop condition unless
I2C_NO_STOP is set in the flags.
10 bit addresses are supported if the I2C_10_BIT_ADDR flag is set.
"""
data = array.array('B', data)
status, _ = api.py_aa_i2c_write_ext(self.handle, i2c_address, flags,
len(data), data)
_raise_i2c_status_code_error_if_failure(status)
def i2c_master_read(self, addr, length, flags=I2C_NO_FLAGS):
"""Make an I2C read access.
The given I2C device is addressed and clock cycles for `length` bytes
are generated. A short read will occur if the device generates an early
NAK.
The transaction is finished with an I2C stop condition unless the
I2C_NO_STOP flag is set.
"""
data = array.array('B', (0,) * length)
status, rx_len = api.py_aa_i2c_read_ext(self.handle, addr, flags,
length, data)
_raise_i2c_status_code_error_if_failure(status)
del data[rx_len:]
return bytes(data)
def i2c_master_write_read(self, i2c_address, data, length):
"""Make an I2C write/read access.
First an I2C write access is issued. No stop condition will be
generated. Instead the read access begins with a repeated start.
This method is useful for accessing most addressable I2C devices like
EEPROMs, port expander, etc.
Basically, this is just a convenient function which internally uses
`i2c_master_write` and `i2c_master_read`.
"""
self.i2c_master_write(i2c_address, data, I2C_NO_STOP)
return self.i2c_master_read(i2c_address, length)
def poll(self, timeout=None):
"""Wait for an event to occur.
If `timeout` is given, if specifies the length of time in milliseconds
which the function will wait for events before returing. If `timeout`
is omitted, negative or None, the call will block until there is an
event.
Returns a list of events. In case no event is pending, an empty list is
returned.
"""
if timeout is None:
timeout = -1
ret = api.py_aa_async_poll(self.handle, timeout)
_raise_error_if_negative(ret)
events = list()
for event in (POLL_I2C_READ, POLL_I2C_WRITE, POLL_SPI,
POLL_I2C_MONITOR):
if ret & event:
events.append(event)
return events
def enable_i2c_slave(self, slave_address):
"""Enable I2C slave mode.
The device will respond to the specified slave_address if it is
addressed.
You can wait for the data with :func:`poll` and get it with
`i2c_slave_read`.
"""
ret = api.py_aa_i2c_slave_enable(self.handle, slave_address,
self.BUFFER_SIZE, self.BUFFER_SIZE)
_raise_error_if_negative(ret)
def disable_i2c_slave(self):
"""Disable I2C slave mode."""
ret = api.py_aa_i2c_slave_disable(self.handle)
_raise_error_if_negative(ret)
def i2c_slave_read(self):
"""Read the bytes from an I2C slave reception.
The bytes are returned as a string object.
"""
data = array.array('B', (0,) * self.BUFFER_SIZE)
status, addr, rx_len = api.py_aa_i2c_slave_read_ext(self.handle,
self.BUFFER_SIZE, data)
_raise_i2c_status_code_error_if_failure(status)
# In case of general call, actually return the general call address
if addr == 0x80:
addr = 0x00
del data[rx_len:]
return (addr, bytes(data))
@property
def i2c_slave_response(self):
"""Response to next read command.
An array of bytes that will be transmitted to the I2C master with the
next read operation.
Warning: Due to the fact that the Aardvark API does not provide a means
to read out this value, it is buffered when setting the property.
Reading the property therefore might not return what is actually stored
in the device.
"""
return self._i2c_slave_response
@i2c_slave_response.setter
def i2c_slave_response(self, data):
data = array.array('B', data)
ret = api.py_aa_i2c_slave_set_response(self.handle, len(data), data)
_raise_error_if_negative(ret)
self._i2c_slave_response = data
@property
def i2c_slave_last_transmit_size(self):
"""Returns the number of bytes transmitted by the slave."""
ret = api.py_aa_i2c_slave_write_stats(self.handle)
_raise_error_if_negative(ret)
return ret
def enable_i2c_monitor(self):
"""Activate the I2C monitor.
Enabling the monitor will disable all other functions of the adapter.
Raises an :exc:`IOError` if the hardware adapter does not support
monitor mode.
"""
ret = api.py_aa_i2c_monitor_enable(self.handle)
_raise_error_if_negative(ret)
def disable_i2c_monitor(self):
"""Disable the I2C monitor.
Raises an :exc:`IOError` if the hardware adapter does not support
monitor mode.
"""
ret = api.py_aa_i2c_monitor_disable(self.handle)
_raise_error_if_negative(ret)
def i2c_monitor_read(self):
"""Retrieved any data fetched by the monitor.
This function has an integrated timeout mechanism. You should use
:func:`poll` to determine if there is any data available.
Returns a list of data bytes and special symbols. There are three
special symbols: `I2C_MONITOR_NACK`, I2C_MONITOR_START and
I2C_MONITOR_STOP.
"""
data = array.array('H', (0,) * self.BUFFER_SIZE)
ret = api.py_aa_i2c_monitor_read(self.handle, self.BUFFER_SIZE,
data)
_raise_error_if_negative(ret)
del data[ret:]
return data.tolist()
@property
def spi_bitrate(self):
"""SPI bitrate in kHz. Not every bitrate is supported by the host
adapter. Therefore, the actual bitrate may be less than the value which
is set. The slowest bitrate supported is 125kHz. Any smaller value will
be rounded up to 125kHz.
The power-on default value is 1000 kHz.
"""
ret = api.py_aa_spi_bitrate(self.handle, 0)
_raise_error_if_negative(ret)
return ret
@spi_bitrate.setter
def spi_bitrate(self, value):
ret = api.py_aa_spi_bitrate(self.handle, value)
_raise_error_if_negative(ret)
def spi_configure(self, polarity, phase, bitorder):
"""Configure the SPI interface."""
ret = api.py_aa_spi_configure(self.handle, polarity, phase, bitorder)
_raise_error_if_negative(ret)
def spi_write(self, data):
"""Write a stream of bytes to a SPI device."""
data_out = array.array('B', data)
data_in = array.array('B', (0,) * len(data_out))
ret = api.py_aa_spi_write(self.handle, len(data_out), data_out,
len(data_in), data_in)
_raise_error_if_negative(ret)
return bytes(data_in)
def spi_ss_polarity(self, polarity):
"""Change the ouput polarity on the SS line.
Please note, that this only affects the master functions.
"""
ret = api.py_aa_spi_master_ss_polarity(self.handle, polarity)
_raise_error_if_negative(ret)
|
kontron/python-aardvark | pyaardvark/aardvark.py | Aardvark.spi_write | python | def spi_write(self, data):
data_out = array.array('B', data)
data_in = array.array('B', (0,) * len(data_out))
ret = api.py_aa_spi_write(self.handle, len(data_out), data_out,
len(data_in), data_in)
_raise_error_if_negative(ret)
return bytes(data_in) | Write a stream of bytes to a SPI device. | train | https://github.com/kontron/python-aardvark/blob/9827f669fbdc5bceb98e7d08a294b4e4e455d0d5/pyaardvark/aardvark.py#L611-L618 | null | class Aardvark(object):
"""Represents an Aardvark device."""
BUFFER_SIZE = 65535
def __init__(self, port=0):
ret, ver = api.py_aa_open_ext(port)
_raise_error_if_negative(ret)
#: A handle which is used as the first paramter for all calls to the
#: underlying API.
self.handle = ret
# assign some useful names
version = dict(
software = ver[0],
firmware = ver[1],
hardware = ver[2],
sw_req_by_fw = ver[3],
fw_req_by_sw = ver[4],
api_req_by_sw = ver[5],
)
#: Hardware revision of the host adapter as a string. The format is
#: ``M.NN`` where `M` is the major number and `NN` the zero padded
#: minor number.
self.hardware_revision = _to_version_str(version['hardware'])
#: Firmware version of the host adapter as a string. See
#: :attr:`hardware_revision` for more information on the format.
self.firmware_version = _to_version_str(version['firmware'])
#: Version of underlying C module (aardvark.so, aardvark.pyd) as a
#: string. See :attr:`hardware_revision` for more information on the
#: format.
self.api_version = _to_version_str(version['software'])
# version checks
if version['firmware'] < version['fw_req_by_sw']:
log.debug('The API requires a firmware version >= %s, but the '
'device has version %s',
_to_version_str(version['fw_req_by_sw']),
_to_version_str(version['firmware']))
ret = ERR_INCOMPATIBLE_DEVICE
elif version['software'] < version['sw_req_by_fw']:
log.debug('The firmware requires an API version >= %s, but the '
'API has version %s',
_to_version_str(version['sw_req_by_fw']),
_to_version_str(version['software']))
ret = ERR_INCOMPATIBLE_LIBRARY
_raise_error_if_negative(ret)
# Initialize shadow variables
self._i2c_slave_response = None
def __enter__(self):
return self
def __exit__(self, type, value, tb):
self.close()
return False
def close(self):
"""Close the device."""
api.py_aa_close(self.handle)
self.handle = None
def unique_id(self):
"""Return the unique identifier of the device. The identifier is the
serial number you can find on the adapter without the dash. Eg. the
serial number 0012-345678 would be 12345678.
"""
return api.py_aa_unique_id(self.handle)
def unique_id_str(self):
"""Return the unique identifier. But unlike :func:`unique_id`, the ID
is returned as a string which has the format NNNN-MMMMMMM.
"""
return _unique_id_str(self.unique_id())
def _interface_configuration(self, value):
ret = api.py_aa_configure(self.handle, value)
_raise_error_if_negative(ret)
return ret
@property
def enable_i2c(self):
"""Set this to `True` to enable the hardware I2C interface. If set to
`False` the hardware interface will be disabled and its pins (SDA and
SCL) can be used as GPIOs.
"""
config = self._interface_configuration(CONFIG_QUERY)
return config == CONFIG_GPIO_I2C or config == CONFIG_SPI_I2C
@enable_i2c.setter
def enable_i2c(self, value):
new_config = config = self._interface_configuration(CONFIG_QUERY)
if value and config == CONFIG_GPIO_ONLY:
new_config = CONFIG_GPIO_I2C
elif value and config == CONFIG_SPI_GPIO:
new_config = CONFIG_SPI_I2C
elif not value and config == CONFIG_GPIO_I2C:
new_config = CONFIG_GPIO_ONLY
elif not value and config == CONFIG_SPI_I2C:
new_config = CONFIG_SPI_GPIO
if new_config != config:
self._interface_configuration(new_config)
@property
def enable_spi(self):
"""Set this to `True` to enable the hardware SPI interface. If set to
`False` the hardware interface will be disabled and its pins (MISO,
MOSI, SCK and SS) can be used as GPIOs.
"""
config = self._interface_configuration(CONFIG_QUERY)
return config == CONFIG_SPI_GPIO or config == CONFIG_SPI_I2C
@enable_spi.setter
def enable_spi(self, value):
new_config = config = self._interface_configuration(CONFIG_QUERY)
if value and config == CONFIG_GPIO_ONLY:
new_config = CONFIG_SPI_GPIO
elif value and config == CONFIG_GPIO_I2C:
new_config = CONFIG_SPI_I2C
elif not value and config == CONFIG_SPI_GPIO:
new_config = CONFIG_GPIO_ONLY
elif not value and config == CONFIG_SPI_I2C:
new_config = CONFIG_GPIO_I2C
if new_config != config:
self._interface_configuration(new_config)
@property
def i2c_bitrate(self):
"""I2C bitrate in kHz. Not every bitrate is supported by the host
adapter. Therefore, the actual bitrate may be less than the value which
is set.
The power-on default value is 100 kHz.
"""
ret = api.py_aa_i2c_bitrate(self.handle, 0)
_raise_error_if_negative(ret)
return ret
@i2c_bitrate.setter
def i2c_bitrate(self, value):
ret = api.py_aa_i2c_bitrate(self.handle, value)
_raise_error_if_negative(ret)
@property
def i2c_pullups(self):
"""Setting this to `True` will enable the I2C pullup resistors. If set
to `False` the pullup resistors will be disabled.
Raises an :exc:`IOError` if the hardware adapter does not support
pullup resistors.
"""
ret = api.py_aa_i2c_pullup(self.handle, I2C_PULLUP_QUERY)
_raise_error_if_negative(ret)
return ret
@i2c_pullups.setter
def i2c_pullups(self, value):
if value:
pullup = I2C_PULLUP_BOTH
else:
pullup = I2C_PULLUP_NONE
ret = api.py_aa_i2c_pullup(self.handle, pullup)
_raise_error_if_negative(ret)
@property
def target_power(self):
"""Setting this to `True` will activate the power pins (4 and 6). If
set to `False` the power will be deactivated.
Raises an :exc:`IOError` if the hardware adapter does not support
the switchable power pins.
"""
ret = api.py_aa_target_power(self.handle, TARGET_POWER_QUERY)
_raise_error_if_negative(ret)
return ret
@target_power.setter
def target_power(self, value):
if value:
power = TARGET_POWER_BOTH
else:
power = TARGET_POWER_NONE
ret = api.py_aa_target_power(self.handle, power)
_raise_error_if_negative(ret)
@property
def i2c_bus_timeout(self):
"""I2C bus lock timeout in ms.
Minimum value is 10 ms and the maximum value is 450 ms. Not every value
can be set and will be rounded to the next possible number. You can
read back the property to get the actual value.
The power-on default value is 200 ms.
"""
ret = api.py_aa_i2c_bus_timeout(self.handle, 0)
_raise_error_if_negative(ret)
return ret
@i2c_bus_timeout.setter
def i2c_bus_timeout(self, timeout):
ret = api.py_aa_i2c_bus_timeout(self.handle, timeout)
_raise_error_if_negative(ret)
def i2c_master_write(self, i2c_address, data, flags=I2C_NO_FLAGS):
"""Make an I2C write access.
The given I2C device is addressed and data given as a string is
written. The transaction is finished with an I2C stop condition unless
I2C_NO_STOP is set in the flags.
10 bit addresses are supported if the I2C_10_BIT_ADDR flag is set.
"""
data = array.array('B', data)
status, _ = api.py_aa_i2c_write_ext(self.handle, i2c_address, flags,
len(data), data)
_raise_i2c_status_code_error_if_failure(status)
def i2c_master_read(self, addr, length, flags=I2C_NO_FLAGS):
"""Make an I2C read access.
The given I2C device is addressed and clock cycles for `length` bytes
are generated. A short read will occur if the device generates an early
NAK.
The transaction is finished with an I2C stop condition unless the
I2C_NO_STOP flag is set.
"""
data = array.array('B', (0,) * length)
status, rx_len = api.py_aa_i2c_read_ext(self.handle, addr, flags,
length, data)
_raise_i2c_status_code_error_if_failure(status)
del data[rx_len:]
return bytes(data)
def i2c_master_write_read(self, i2c_address, data, length):
"""Make an I2C write/read access.
First an I2C write access is issued. No stop condition will be
generated. Instead the read access begins with a repeated start.
This method is useful for accessing most addressable I2C devices like
EEPROMs, port expander, etc.
Basically, this is just a convenient function which internally uses
`i2c_master_write` and `i2c_master_read`.
"""
self.i2c_master_write(i2c_address, data, I2C_NO_STOP)
return self.i2c_master_read(i2c_address, length)
def poll(self, timeout=None):
"""Wait for an event to occur.
If `timeout` is given, if specifies the length of time in milliseconds
which the function will wait for events before returing. If `timeout`
is omitted, negative or None, the call will block until there is an
event.
Returns a list of events. In case no event is pending, an empty list is
returned.
"""
if timeout is None:
timeout = -1
ret = api.py_aa_async_poll(self.handle, timeout)
_raise_error_if_negative(ret)
events = list()
for event in (POLL_I2C_READ, POLL_I2C_WRITE, POLL_SPI,
POLL_I2C_MONITOR):
if ret & event:
events.append(event)
return events
def enable_i2c_slave(self, slave_address):
"""Enable I2C slave mode.
The device will respond to the specified slave_address if it is
addressed.
You can wait for the data with :func:`poll` and get it with
`i2c_slave_read`.
"""
ret = api.py_aa_i2c_slave_enable(self.handle, slave_address,
self.BUFFER_SIZE, self.BUFFER_SIZE)
_raise_error_if_negative(ret)
def disable_i2c_slave(self):
"""Disable I2C slave mode."""
ret = api.py_aa_i2c_slave_disable(self.handle)
_raise_error_if_negative(ret)
def i2c_slave_read(self):
"""Read the bytes from an I2C slave reception.
The bytes are returned as a string object.
"""
data = array.array('B', (0,) * self.BUFFER_SIZE)
status, addr, rx_len = api.py_aa_i2c_slave_read_ext(self.handle,
self.BUFFER_SIZE, data)
_raise_i2c_status_code_error_if_failure(status)
# In case of general call, actually return the general call address
if addr == 0x80:
addr = 0x00
del data[rx_len:]
return (addr, bytes(data))
@property
def i2c_slave_response(self):
"""Response to next read command.
An array of bytes that will be transmitted to the I2C master with the
next read operation.
Warning: Due to the fact that the Aardvark API does not provide a means
to read out this value, it is buffered when setting the property.
Reading the property therefore might not return what is actually stored
in the device.
"""
return self._i2c_slave_response
@i2c_slave_response.setter
def i2c_slave_response(self, data):
data = array.array('B', data)
ret = api.py_aa_i2c_slave_set_response(self.handle, len(data), data)
_raise_error_if_negative(ret)
self._i2c_slave_response = data
@property
def i2c_slave_last_transmit_size(self):
"""Returns the number of bytes transmitted by the slave."""
ret = api.py_aa_i2c_slave_write_stats(self.handle)
_raise_error_if_negative(ret)
return ret
def enable_i2c_monitor(self):
"""Activate the I2C monitor.
Enabling the monitor will disable all other functions of the adapter.
Raises an :exc:`IOError` if the hardware adapter does not support
monitor mode.
"""
ret = api.py_aa_i2c_monitor_enable(self.handle)
_raise_error_if_negative(ret)
def disable_i2c_monitor(self):
"""Disable the I2C monitor.
Raises an :exc:`IOError` if the hardware adapter does not support
monitor mode.
"""
ret = api.py_aa_i2c_monitor_disable(self.handle)
_raise_error_if_negative(ret)
def i2c_monitor_read(self):
"""Retrieved any data fetched by the monitor.
This function has an integrated timeout mechanism. You should use
:func:`poll` to determine if there is any data available.
Returns a list of data bytes and special symbols. There are three
special symbols: `I2C_MONITOR_NACK`, I2C_MONITOR_START and
I2C_MONITOR_STOP.
"""
data = array.array('H', (0,) * self.BUFFER_SIZE)
ret = api.py_aa_i2c_monitor_read(self.handle, self.BUFFER_SIZE,
data)
_raise_error_if_negative(ret)
del data[ret:]
return data.tolist()
@property
def spi_bitrate(self):
"""SPI bitrate in kHz. Not every bitrate is supported by the host
adapter. Therefore, the actual bitrate may be less than the value which
is set. The slowest bitrate supported is 125kHz. Any smaller value will
be rounded up to 125kHz.
The power-on default value is 1000 kHz.
"""
ret = api.py_aa_spi_bitrate(self.handle, 0)
_raise_error_if_negative(ret)
return ret
@spi_bitrate.setter
def spi_bitrate(self, value):
ret = api.py_aa_spi_bitrate(self.handle, value)
_raise_error_if_negative(ret)
def spi_configure(self, polarity, phase, bitorder):
"""Configure the SPI interface."""
ret = api.py_aa_spi_configure(self.handle, polarity, phase, bitorder)
_raise_error_if_negative(ret)
def spi_configure_mode(self, spi_mode):
"""Configure the SPI interface by the well known SPI modes."""
if spi_mode == SPI_MODE_0:
self.spi_configure(SPI_POL_RISING_FALLING,
SPI_PHASE_SAMPLE_SETUP, SPI_BITORDER_MSB)
elif spi_mode == SPI_MODE_3:
self.spi_configure(SPI_POL_FALLING_RISING,
SPI_PHASE_SETUP_SAMPLE, SPI_BITORDER_MSB)
else:
raise RuntimeError('SPI Mode not supported')
def spi_ss_polarity(self, polarity):
"""Change the ouput polarity on the SS line.
Please note, that this only affects the master functions.
"""
ret = api.py_aa_spi_master_ss_polarity(self.handle, polarity)
_raise_error_if_negative(ret)
|
kontron/python-aardvark | pyaardvark/aardvark.py | Aardvark.spi_ss_polarity | python | def spi_ss_polarity(self, polarity):
ret = api.py_aa_spi_master_ss_polarity(self.handle, polarity)
_raise_error_if_negative(ret) | Change the ouput polarity on the SS line.
Please note, that this only affects the master functions. | train | https://github.com/kontron/python-aardvark/blob/9827f669fbdc5bceb98e7d08a294b4e4e455d0d5/pyaardvark/aardvark.py#L620-L626 | null | class Aardvark(object):
"""Represents an Aardvark device."""
BUFFER_SIZE = 65535
def __init__(self, port=0):
ret, ver = api.py_aa_open_ext(port)
_raise_error_if_negative(ret)
#: A handle which is used as the first paramter for all calls to the
#: underlying API.
self.handle = ret
# assign some useful names
version = dict(
software = ver[0],
firmware = ver[1],
hardware = ver[2],
sw_req_by_fw = ver[3],
fw_req_by_sw = ver[4],
api_req_by_sw = ver[5],
)
#: Hardware revision of the host adapter as a string. The format is
#: ``M.NN`` where `M` is the major number and `NN` the zero padded
#: minor number.
self.hardware_revision = _to_version_str(version['hardware'])
#: Firmware version of the host adapter as a string. See
#: :attr:`hardware_revision` for more information on the format.
self.firmware_version = _to_version_str(version['firmware'])
#: Version of underlying C module (aardvark.so, aardvark.pyd) as a
#: string. See :attr:`hardware_revision` for more information on the
#: format.
self.api_version = _to_version_str(version['software'])
# version checks
if version['firmware'] < version['fw_req_by_sw']:
log.debug('The API requires a firmware version >= %s, but the '
'device has version %s',
_to_version_str(version['fw_req_by_sw']),
_to_version_str(version['firmware']))
ret = ERR_INCOMPATIBLE_DEVICE
elif version['software'] < version['sw_req_by_fw']:
log.debug('The firmware requires an API version >= %s, but the '
'API has version %s',
_to_version_str(version['sw_req_by_fw']),
_to_version_str(version['software']))
ret = ERR_INCOMPATIBLE_LIBRARY
_raise_error_if_negative(ret)
# Initialize shadow variables
self._i2c_slave_response = None
def __enter__(self):
return self
def __exit__(self, type, value, tb):
self.close()
return False
def close(self):
"""Close the device."""
api.py_aa_close(self.handle)
self.handle = None
def unique_id(self):
"""Return the unique identifier of the device. The identifier is the
serial number you can find on the adapter without the dash. Eg. the
serial number 0012-345678 would be 12345678.
"""
return api.py_aa_unique_id(self.handle)
def unique_id_str(self):
"""Return the unique identifier. But unlike :func:`unique_id`, the ID
is returned as a string which has the format NNNN-MMMMMMM.
"""
return _unique_id_str(self.unique_id())
def _interface_configuration(self, value):
ret = api.py_aa_configure(self.handle, value)
_raise_error_if_negative(ret)
return ret
@property
def enable_i2c(self):
"""Set this to `True` to enable the hardware I2C interface. If set to
`False` the hardware interface will be disabled and its pins (SDA and
SCL) can be used as GPIOs.
"""
config = self._interface_configuration(CONFIG_QUERY)
return config == CONFIG_GPIO_I2C or config == CONFIG_SPI_I2C
@enable_i2c.setter
def enable_i2c(self, value):
new_config = config = self._interface_configuration(CONFIG_QUERY)
if value and config == CONFIG_GPIO_ONLY:
new_config = CONFIG_GPIO_I2C
elif value and config == CONFIG_SPI_GPIO:
new_config = CONFIG_SPI_I2C
elif not value and config == CONFIG_GPIO_I2C:
new_config = CONFIG_GPIO_ONLY
elif not value and config == CONFIG_SPI_I2C:
new_config = CONFIG_SPI_GPIO
if new_config != config:
self._interface_configuration(new_config)
@property
def enable_spi(self):
"""Set this to `True` to enable the hardware SPI interface. If set to
`False` the hardware interface will be disabled and its pins (MISO,
MOSI, SCK and SS) can be used as GPIOs.
"""
config = self._interface_configuration(CONFIG_QUERY)
return config == CONFIG_SPI_GPIO or config == CONFIG_SPI_I2C
@enable_spi.setter
def enable_spi(self, value):
new_config = config = self._interface_configuration(CONFIG_QUERY)
if value and config == CONFIG_GPIO_ONLY:
new_config = CONFIG_SPI_GPIO
elif value and config == CONFIG_GPIO_I2C:
new_config = CONFIG_SPI_I2C
elif not value and config == CONFIG_SPI_GPIO:
new_config = CONFIG_GPIO_ONLY
elif not value and config == CONFIG_SPI_I2C:
new_config = CONFIG_GPIO_I2C
if new_config != config:
self._interface_configuration(new_config)
@property
def i2c_bitrate(self):
"""I2C bitrate in kHz. Not every bitrate is supported by the host
adapter. Therefore, the actual bitrate may be less than the value which
is set.
The power-on default value is 100 kHz.
"""
ret = api.py_aa_i2c_bitrate(self.handle, 0)
_raise_error_if_negative(ret)
return ret
@i2c_bitrate.setter
def i2c_bitrate(self, value):
ret = api.py_aa_i2c_bitrate(self.handle, value)
_raise_error_if_negative(ret)
@property
def i2c_pullups(self):
"""Setting this to `True` will enable the I2C pullup resistors. If set
to `False` the pullup resistors will be disabled.
Raises an :exc:`IOError` if the hardware adapter does not support
pullup resistors.
"""
ret = api.py_aa_i2c_pullup(self.handle, I2C_PULLUP_QUERY)
_raise_error_if_negative(ret)
return ret
@i2c_pullups.setter
def i2c_pullups(self, value):
if value:
pullup = I2C_PULLUP_BOTH
else:
pullup = I2C_PULLUP_NONE
ret = api.py_aa_i2c_pullup(self.handle, pullup)
_raise_error_if_negative(ret)
@property
def target_power(self):
"""Setting this to `True` will activate the power pins (4 and 6). If
set to `False` the power will be deactivated.
Raises an :exc:`IOError` if the hardware adapter does not support
the switchable power pins.
"""
ret = api.py_aa_target_power(self.handle, TARGET_POWER_QUERY)
_raise_error_if_negative(ret)
return ret
@target_power.setter
def target_power(self, value):
if value:
power = TARGET_POWER_BOTH
else:
power = TARGET_POWER_NONE
ret = api.py_aa_target_power(self.handle, power)
_raise_error_if_negative(ret)
@property
def i2c_bus_timeout(self):
"""I2C bus lock timeout in ms.
Minimum value is 10 ms and the maximum value is 450 ms. Not every value
can be set and will be rounded to the next possible number. You can
read back the property to get the actual value.
The power-on default value is 200 ms.
"""
ret = api.py_aa_i2c_bus_timeout(self.handle, 0)
_raise_error_if_negative(ret)
return ret
@i2c_bus_timeout.setter
def i2c_bus_timeout(self, timeout):
ret = api.py_aa_i2c_bus_timeout(self.handle, timeout)
_raise_error_if_negative(ret)
def i2c_master_write(self, i2c_address, data, flags=I2C_NO_FLAGS):
"""Make an I2C write access.
The given I2C device is addressed and data given as a string is
written. The transaction is finished with an I2C stop condition unless
I2C_NO_STOP is set in the flags.
10 bit addresses are supported if the I2C_10_BIT_ADDR flag is set.
"""
data = array.array('B', data)
status, _ = api.py_aa_i2c_write_ext(self.handle, i2c_address, flags,
len(data), data)
_raise_i2c_status_code_error_if_failure(status)
def i2c_master_read(self, addr, length, flags=I2C_NO_FLAGS):
"""Make an I2C read access.
The given I2C device is addressed and clock cycles for `length` bytes
are generated. A short read will occur if the device generates an early
NAK.
The transaction is finished with an I2C stop condition unless the
I2C_NO_STOP flag is set.
"""
data = array.array('B', (0,) * length)
status, rx_len = api.py_aa_i2c_read_ext(self.handle, addr, flags,
length, data)
_raise_i2c_status_code_error_if_failure(status)
del data[rx_len:]
return bytes(data)
def i2c_master_write_read(self, i2c_address, data, length):
"""Make an I2C write/read access.
First an I2C write access is issued. No stop condition will be
generated. Instead the read access begins with a repeated start.
This method is useful for accessing most addressable I2C devices like
EEPROMs, port expander, etc.
Basically, this is just a convenient function which internally uses
`i2c_master_write` and `i2c_master_read`.
"""
self.i2c_master_write(i2c_address, data, I2C_NO_STOP)
return self.i2c_master_read(i2c_address, length)
def poll(self, timeout=None):
"""Wait for an event to occur.
If `timeout` is given, if specifies the length of time in milliseconds
which the function will wait for events before returing. If `timeout`
is omitted, negative or None, the call will block until there is an
event.
Returns a list of events. In case no event is pending, an empty list is
returned.
"""
if timeout is None:
timeout = -1
ret = api.py_aa_async_poll(self.handle, timeout)
_raise_error_if_negative(ret)
events = list()
for event in (POLL_I2C_READ, POLL_I2C_WRITE, POLL_SPI,
POLL_I2C_MONITOR):
if ret & event:
events.append(event)
return events
def enable_i2c_slave(self, slave_address):
"""Enable I2C slave mode.
The device will respond to the specified slave_address if it is
addressed.
You can wait for the data with :func:`poll` and get it with
`i2c_slave_read`.
"""
ret = api.py_aa_i2c_slave_enable(self.handle, slave_address,
self.BUFFER_SIZE, self.BUFFER_SIZE)
_raise_error_if_negative(ret)
def disable_i2c_slave(self):
"""Disable I2C slave mode."""
ret = api.py_aa_i2c_slave_disable(self.handle)
_raise_error_if_negative(ret)
def i2c_slave_read(self):
"""Read the bytes from an I2C slave reception.
The bytes are returned as a string object.
"""
data = array.array('B', (0,) * self.BUFFER_SIZE)
status, addr, rx_len = api.py_aa_i2c_slave_read_ext(self.handle,
self.BUFFER_SIZE, data)
_raise_i2c_status_code_error_if_failure(status)
# In case of general call, actually return the general call address
if addr == 0x80:
addr = 0x00
del data[rx_len:]
return (addr, bytes(data))
@property
def i2c_slave_response(self):
"""Response to next read command.
An array of bytes that will be transmitted to the I2C master with the
next read operation.
Warning: Due to the fact that the Aardvark API does not provide a means
to read out this value, it is buffered when setting the property.
Reading the property therefore might not return what is actually stored
in the device.
"""
return self._i2c_slave_response
@i2c_slave_response.setter
def i2c_slave_response(self, data):
data = array.array('B', data)
ret = api.py_aa_i2c_slave_set_response(self.handle, len(data), data)
_raise_error_if_negative(ret)
self._i2c_slave_response = data
@property
def i2c_slave_last_transmit_size(self):
"""Returns the number of bytes transmitted by the slave."""
ret = api.py_aa_i2c_slave_write_stats(self.handle)
_raise_error_if_negative(ret)
return ret
def enable_i2c_monitor(self):
"""Activate the I2C monitor.
Enabling the monitor will disable all other functions of the adapter.
Raises an :exc:`IOError` if the hardware adapter does not support
monitor mode.
"""
ret = api.py_aa_i2c_monitor_enable(self.handle)
_raise_error_if_negative(ret)
def disable_i2c_monitor(self):
"""Disable the I2C monitor.
Raises an :exc:`IOError` if the hardware adapter does not support
monitor mode.
"""
ret = api.py_aa_i2c_monitor_disable(self.handle)
_raise_error_if_negative(ret)
def i2c_monitor_read(self):
"""Retrieved any data fetched by the monitor.
This function has an integrated timeout mechanism. You should use
:func:`poll` to determine if there is any data available.
Returns a list of data bytes and special symbols. There are three
special symbols: `I2C_MONITOR_NACK`, I2C_MONITOR_START and
I2C_MONITOR_STOP.
"""
data = array.array('H', (0,) * self.BUFFER_SIZE)
ret = api.py_aa_i2c_monitor_read(self.handle, self.BUFFER_SIZE,
data)
_raise_error_if_negative(ret)
del data[ret:]
return data.tolist()
@property
def spi_bitrate(self):
"""SPI bitrate in kHz. Not every bitrate is supported by the host
adapter. Therefore, the actual bitrate may be less than the value which
is set. The slowest bitrate supported is 125kHz. Any smaller value will
be rounded up to 125kHz.
The power-on default value is 1000 kHz.
"""
ret = api.py_aa_spi_bitrate(self.handle, 0)
_raise_error_if_negative(ret)
return ret
@spi_bitrate.setter
def spi_bitrate(self, value):
ret = api.py_aa_spi_bitrate(self.handle, value)
_raise_error_if_negative(ret)
def spi_configure(self, polarity, phase, bitorder):
"""Configure the SPI interface."""
ret = api.py_aa_spi_configure(self.handle, polarity, phase, bitorder)
_raise_error_if_negative(ret)
def spi_configure_mode(self, spi_mode):
"""Configure the SPI interface by the well known SPI modes."""
if spi_mode == SPI_MODE_0:
self.spi_configure(SPI_POL_RISING_FALLING,
SPI_PHASE_SAMPLE_SETUP, SPI_BITORDER_MSB)
elif spi_mode == SPI_MODE_3:
self.spi_configure(SPI_POL_FALLING_RISING,
SPI_PHASE_SETUP_SAMPLE, SPI_BITORDER_MSB)
else:
raise RuntimeError('SPI Mode not supported')
def spi_write(self, data):
"""Write a stream of bytes to a SPI device."""
data_out = array.array('B', data)
data_in = array.array('B', (0,) * len(data_out))
ret = api.py_aa_spi_write(self.handle, len(data_out), data_out,
len(data_in), data_in)
_raise_error_if_negative(ret)
return bytes(data_in)
|
praw-dev/prawcore | examples/obtain_refresh_token.py | receive_connection | python | def receive_connection():
server = socket.socket(socket.AF_INET, socket.SOCK_STREAM)
server.setsockopt(socket.SOL_SOCKET, socket.SO_REUSEADDR, 1)
server.bind(("localhost", 8080))
server.listen(1)
client = server.accept()[0]
server.close()
return client | Wait for and then return a connected socket..
Opens a TCP connection on port 8080, and waits for a single client. | train | https://github.com/praw-dev/prawcore/blob/b16ae88a1f2bf98095ed6fe64851cb7add7ed752/examples/obtain_refresh_token.py#L19-L31 | null | #!/usr/bin/env python
"""This example demonstrates the flow for retrieving a refresh token.
In order for this example to work your application's redirect URI must be set
to http://localhost:8080.
This tool can be used to conveniently create refresh tokens for later use with
your web application OAuth2 credentials.
"""
import os
import prawcore
import random
import socket
import sys
def send_message(client, message):
"""Send message to client and close the connection."""
print(message)
client.send("HTTP/1.1 200 OK\r\n\r\n{}".format(message).encode("utf-8"))
client.close()
def main():
"""Provide the program's entry point when directly executed."""
if len(sys.argv) < 2:
print("Usage: {} SCOPE...".format(sys.argv[0]))
return 1
authenticator = prawcore.TrustedAuthenticator(
prawcore.Requestor("prawcore_refresh_token_example"),
os.environ["PRAWCORE_CLIENT_ID"],
os.environ["PRAWCORE_CLIENT_SECRET"],
os.environ["PRAWCORE_REDIRECT_URI"],
)
state = str(random.randint(0, 65000))
url = authenticator.authorize_url("permanent", sys.argv[1:], state)
print(url)
client = receive_connection()
data = client.recv(1024).decode("utf-8")
param_tokens = data.split(" ", 2)[1].split("?", 1)[1].split("&")
params = {
key: value
for (key, value) in [token.split("=") for token in param_tokens]
}
if state != params["state"]:
send_message(
client,
"State mismatch. Expected: {} Received: {}".format(
state, params["state"]
),
)
return 1
elif "error" in params:
send_message(client, params["error"])
return 1
authorizer = prawcore.Authorizer(authenticator)
authorizer.authorize(params["code"])
send_message(client, "Refresh token: {}".format(authorizer.refresh_token))
return 0
if __name__ == "__main__":
sys.exit(main())
|
praw-dev/prawcore | examples/obtain_refresh_token.py | send_message | python | def send_message(client, message):
print(message)
client.send("HTTP/1.1 200 OK\r\n\r\n{}".format(message).encode("utf-8"))
client.close() | Send message to client and close the connection. | train | https://github.com/praw-dev/prawcore/blob/b16ae88a1f2bf98095ed6fe64851cb7add7ed752/examples/obtain_refresh_token.py#L34-L38 | null | #!/usr/bin/env python
"""This example demonstrates the flow for retrieving a refresh token.
In order for this example to work your application's redirect URI must be set
to http://localhost:8080.
This tool can be used to conveniently create refresh tokens for later use with
your web application OAuth2 credentials.
"""
import os
import prawcore
import random
import socket
import sys
def receive_connection():
"""Wait for and then return a connected socket..
Opens a TCP connection on port 8080, and waits for a single client.
"""
server = socket.socket(socket.AF_INET, socket.SOCK_STREAM)
server.setsockopt(socket.SOL_SOCKET, socket.SO_REUSEADDR, 1)
server.bind(("localhost", 8080))
server.listen(1)
client = server.accept()[0]
server.close()
return client
def main():
"""Provide the program's entry point when directly executed."""
if len(sys.argv) < 2:
print("Usage: {} SCOPE...".format(sys.argv[0]))
return 1
authenticator = prawcore.TrustedAuthenticator(
prawcore.Requestor("prawcore_refresh_token_example"),
os.environ["PRAWCORE_CLIENT_ID"],
os.environ["PRAWCORE_CLIENT_SECRET"],
os.environ["PRAWCORE_REDIRECT_URI"],
)
state = str(random.randint(0, 65000))
url = authenticator.authorize_url("permanent", sys.argv[1:], state)
print(url)
client = receive_connection()
data = client.recv(1024).decode("utf-8")
param_tokens = data.split(" ", 2)[1].split("?", 1)[1].split("&")
params = {
key: value
for (key, value) in [token.split("=") for token in param_tokens]
}
if state != params["state"]:
send_message(
client,
"State mismatch. Expected: {} Received: {}".format(
state, params["state"]
),
)
return 1
elif "error" in params:
send_message(client, params["error"])
return 1
authorizer = prawcore.Authorizer(authenticator)
authorizer.authorize(params["code"])
send_message(client, "Refresh token: {}".format(authorizer.refresh_token))
return 0
if __name__ == "__main__":
sys.exit(main())
|
praw-dev/prawcore | examples/obtain_refresh_token.py | main | python | def main():
if len(sys.argv) < 2:
print("Usage: {} SCOPE...".format(sys.argv[0]))
return 1
authenticator = prawcore.TrustedAuthenticator(
prawcore.Requestor("prawcore_refresh_token_example"),
os.environ["PRAWCORE_CLIENT_ID"],
os.environ["PRAWCORE_CLIENT_SECRET"],
os.environ["PRAWCORE_REDIRECT_URI"],
)
state = str(random.randint(0, 65000))
url = authenticator.authorize_url("permanent", sys.argv[1:], state)
print(url)
client = receive_connection()
data = client.recv(1024).decode("utf-8")
param_tokens = data.split(" ", 2)[1].split("?", 1)[1].split("&")
params = {
key: value
for (key, value) in [token.split("=") for token in param_tokens]
}
if state != params["state"]:
send_message(
client,
"State mismatch. Expected: {} Received: {}".format(
state, params["state"]
),
)
return 1
elif "error" in params:
send_message(client, params["error"])
return 1
authorizer = prawcore.Authorizer(authenticator)
authorizer.authorize(params["code"])
send_message(client, "Refresh token: {}".format(authorizer.refresh_token))
return 0 | Provide the program's entry point when directly executed. | train | https://github.com/praw-dev/prawcore/blob/b16ae88a1f2bf98095ed6fe64851cb7add7ed752/examples/obtain_refresh_token.py#L41-L82 | [
"def receive_connection():\n \"\"\"Wait for and then return a connected socket..\n\n Opens a TCP connection on port 8080, and waits for a single client.\n\n \"\"\"\n server = socket.socket(socket.AF_INET, socket.SOCK_STREAM)\n server.setsockopt(socket.SOL_SOCKET, socket.SO_REUSEADDR, 1)\n server.bind((\"localhost\", 8080))\n server.listen(1)\n client = server.accept()[0]\n server.close()\n return client\n",
"def send_message(client, message):\n \"\"\"Send message to client and close the connection.\"\"\"\n print(message)\n client.send(\"HTTP/1.1 200 OK\\r\\n\\r\\n{}\".format(message).encode(\"utf-8\"))\n client.close()\n",
"def authorize_url(self, duration, scopes, state, implicit=False):\n \"\"\"Return the URL used out-of-band to grant access to your application.\n\n :param duration: Either ``permanent`` or ``temporary``. ``temporary``\n authorizations generate access tokens that last only 1\n hour. ``permanent`` authorizations additionally generate a refresh\n token that can be indefinitely used to generate new hour-long\n access tokens. Only ``temporary`` can be specified if ``implicit``\n is set to ``True``.\n :param scopes: A list of OAuth scopes to request authorization for.\n :param state: A string that will be reflected in the callback to\n ``redirect_uri``. This value should be temporarily unique to the\n client for whom the URL was generated for.\n :param implicit: (optional) Use the implicit grant flow (default:\n False). This flow is only available for UntrustedAuthenticators.\n\n \"\"\"\n if self.redirect_uri is None:\n raise InvalidInvocation(\"redirect URI not provided\")\n if implicit and not isinstance(self, UntrustedAuthenticator):\n raise InvalidInvocation(\n \"Only UntrustedAuthentictor instances can \"\n \"use the implicit grant flow.\"\n )\n if implicit and duration != \"temporary\":\n raise InvalidInvocation(\n \"The implicit grant flow only supports \"\n \"temporary access tokens.\"\n )\n\n params = {\n \"client_id\": self.client_id,\n \"duration\": duration,\n \"redirect_uri\": self.redirect_uri,\n \"response_type\": \"token\" if implicit else \"code\",\n \"scope\": \" \".join(scopes),\n \"state\": state,\n }\n url = self._requestor.reddit_url + const.AUTHORIZATION_PATH\n request = Request(\"GET\", url, params=params)\n return request.prepare().url\n",
"def authorize(self, code):\n \"\"\"Obtain and set authorization tokens based on ``code``.\n\n :param code: The code obtained by an out-of-band authorization request\n to Reddit.\n\n \"\"\"\n if self._authenticator.redirect_uri is None:\n raise InvalidInvocation(\"redirect URI not provided\")\n self._request_token(\n code=code,\n grant_type=\"authorization_code\",\n redirect_uri=self._authenticator.redirect_uri,\n )\n"
] | #!/usr/bin/env python
"""This example demonstrates the flow for retrieving a refresh token.
In order for this example to work your application's redirect URI must be set
to http://localhost:8080.
This tool can be used to conveniently create refresh tokens for later use with
your web application OAuth2 credentials.
"""
import os
import prawcore
import random
import socket
import sys
def receive_connection():
"""Wait for and then return a connected socket..
Opens a TCP connection on port 8080, and waits for a single client.
"""
server = socket.socket(socket.AF_INET, socket.SOCK_STREAM)
server.setsockopt(socket.SOL_SOCKET, socket.SO_REUSEADDR, 1)
server.bind(("localhost", 8080))
server.listen(1)
client = server.accept()[0]
server.close()
return client
def send_message(client, message):
"""Send message to client and close the connection."""
print(message)
client.send("HTTP/1.1 200 OK\r\n\r\n{}".format(message).encode("utf-8"))
client.close()
if __name__ == "__main__":
sys.exit(main())
|
praw-dev/prawcore | prawcore/auth.py | BaseAuthenticator.authorize_url | python | def authorize_url(self, duration, scopes, state, implicit=False):
if self.redirect_uri is None:
raise InvalidInvocation("redirect URI not provided")
if implicit and not isinstance(self, UntrustedAuthenticator):
raise InvalidInvocation(
"Only UntrustedAuthentictor instances can "
"use the implicit grant flow."
)
if implicit and duration != "temporary":
raise InvalidInvocation(
"The implicit grant flow only supports "
"temporary access tokens."
)
params = {
"client_id": self.client_id,
"duration": duration,
"redirect_uri": self.redirect_uri,
"response_type": "token" if implicit else "code",
"scope": " ".join(scopes),
"state": state,
}
url = self._requestor.reddit_url + const.AUTHORIZATION_PATH
request = Request("GET", url, params=params)
return request.prepare().url | Return the URL used out-of-band to grant access to your application.
:param duration: Either ``permanent`` or ``temporary``. ``temporary``
authorizations generate access tokens that last only 1
hour. ``permanent`` authorizations additionally generate a refresh
token that can be indefinitely used to generate new hour-long
access tokens. Only ``temporary`` can be specified if ``implicit``
is set to ``True``.
:param scopes: A list of OAuth scopes to request authorization for.
:param state: A string that will be reflected in the callback to
``redirect_uri``. This value should be temporarily unique to the
client for whom the URL was generated for.
:param implicit: (optional) Use the implicit grant flow (default:
False). This flow is only available for UntrustedAuthenticators. | train | https://github.com/praw-dev/prawcore/blob/b16ae88a1f2bf98095ed6fe64851cb7add7ed752/prawcore/auth.py#L35-L75 | null | class BaseAuthenticator(object):
"""Provide the base authenticator object that stores OAuth2 credentials."""
def __init__(self, requestor, client_id, redirect_uri=None):
"""Represent a single authentication to Reddit's API.
:param requestor: An instance of :class:`Requestor`.
:param client_id: The OAuth2 client ID to use with the session.
:param redirect_uri: (optional) The redirect URI exactly as specified
in your OAuth application settings on Reddit. This parameter is
required if you want to use the ``authorize_url`` method, or the
``authorize`` method of the ``Authorizer`` class.
"""
self._requestor = requestor
self.client_id = client_id
self.redirect_uri = redirect_uri
def _post(self, url, success_status=codes["ok"], **data):
response = self._requestor.request(
"post", url, auth=self._auth(), data=sorted(data.items())
)
if response.status_code != success_status:
raise ResponseException(response)
return response
def revoke_token(self, token, token_type=None):
"""Ask Reddit to revoke the provided token.
:param token: The access or refresh token to revoke.
:param token_type: (Optional) When provided, hint to Reddit what the
token type is for a possible efficiency gain. The value can be
either ``access_token`` or ``refresh_token``.
"""
data = {"token": token}
if token_type is not None:
data["token_type_hint"] = token_type
url = self._requestor.reddit_url + const.REVOKE_TOKEN_PATH
self._post(url, success_status=codes["no_content"], **data)
|
praw-dev/prawcore | prawcore/auth.py | BaseAuthenticator.revoke_token | python | def revoke_token(self, token, token_type=None):
data = {"token": token}
if token_type is not None:
data["token_type_hint"] = token_type
url = self._requestor.reddit_url + const.REVOKE_TOKEN_PATH
self._post(url, success_status=codes["no_content"], **data) | Ask Reddit to revoke the provided token.
:param token: The access or refresh token to revoke.
:param token_type: (Optional) When provided, hint to Reddit what the
token type is for a possible efficiency gain. The value can be
either ``access_token`` or ``refresh_token``. | train | https://github.com/praw-dev/prawcore/blob/b16ae88a1f2bf98095ed6fe64851cb7add7ed752/prawcore/auth.py#L77-L90 | [
"def _post(self, url, success_status=codes[\"ok\"], **data):\n response = self._requestor.request(\n \"post\", url, auth=self._auth(), data=sorted(data.items())\n )\n if response.status_code != success_status:\n raise ResponseException(response)\n return response\n"
] | class BaseAuthenticator(object):
"""Provide the base authenticator object that stores OAuth2 credentials."""
def __init__(self, requestor, client_id, redirect_uri=None):
"""Represent a single authentication to Reddit's API.
:param requestor: An instance of :class:`Requestor`.
:param client_id: The OAuth2 client ID to use with the session.
:param redirect_uri: (optional) The redirect URI exactly as specified
in your OAuth application settings on Reddit. This parameter is
required if you want to use the ``authorize_url`` method, or the
``authorize`` method of the ``Authorizer`` class.
"""
self._requestor = requestor
self.client_id = client_id
self.redirect_uri = redirect_uri
def _post(self, url, success_status=codes["ok"], **data):
response = self._requestor.request(
"post", url, auth=self._auth(), data=sorted(data.items())
)
if response.status_code != success_status:
raise ResponseException(response)
return response
def authorize_url(self, duration, scopes, state, implicit=False):
"""Return the URL used out-of-band to grant access to your application.
:param duration: Either ``permanent`` or ``temporary``. ``temporary``
authorizations generate access tokens that last only 1
hour. ``permanent`` authorizations additionally generate a refresh
token that can be indefinitely used to generate new hour-long
access tokens. Only ``temporary`` can be specified if ``implicit``
is set to ``True``.
:param scopes: A list of OAuth scopes to request authorization for.
:param state: A string that will be reflected in the callback to
``redirect_uri``. This value should be temporarily unique to the
client for whom the URL was generated for.
:param implicit: (optional) Use the implicit grant flow (default:
False). This flow is only available for UntrustedAuthenticators.
"""
if self.redirect_uri is None:
raise InvalidInvocation("redirect URI not provided")
if implicit and not isinstance(self, UntrustedAuthenticator):
raise InvalidInvocation(
"Only UntrustedAuthentictor instances can "
"use the implicit grant flow."
)
if implicit and duration != "temporary":
raise InvalidInvocation(
"The implicit grant flow only supports "
"temporary access tokens."
)
params = {
"client_id": self.client_id,
"duration": duration,
"redirect_uri": self.redirect_uri,
"response_type": "token" if implicit else "code",
"scope": " ".join(scopes),
"state": state,
}
url = self._requestor.reddit_url + const.AUTHORIZATION_PATH
request = Request("GET", url, params=params)
return request.prepare().url
|
praw-dev/prawcore | prawcore/auth.py | BaseAuthorizer.revoke | python | def revoke(self):
if self.access_token is None:
raise InvalidInvocation("no token available to revoke")
self._authenticator.revoke_token(self.access_token, "access_token")
self._clear_access_token() | Revoke the current Authorization. | train | https://github.com/praw-dev/prawcore/blob/b16ae88a1f2bf98095ed6fe64851cb7add7ed752/prawcore/auth.py#L184-L190 | [
"def _clear_access_token(self):\n self._expiration_timestamp = None\n self.access_token = None\n self.scopes = None\n"
] | class BaseAuthorizer(object):
"""Superclass for OAuth2 authorization tokens and scopes."""
def __init__(self, authenticator):
"""Represent a single authorization to Reddit's API.
:param authenticator: An instance of :class:`BaseAuthenticator`.
"""
self._authenticator = authenticator
self._clear_access_token()
self._validate_authenticator()
def _clear_access_token(self):
self._expiration_timestamp = None
self.access_token = None
self.scopes = None
def _request_token(self, **data):
url = (
self._authenticator._requestor.reddit_url + const.ACCESS_TOKEN_PATH
)
pre_request_time = time.time()
response = self._authenticator._post(url, **data)
payload = response.json()
if "error" in payload: # Why are these OKAY responses?
raise OAuthException(
response, payload["error"], payload.get("error_description")
)
self._expiration_timestamp = (
pre_request_time - 10 + payload["expires_in"]
)
self.access_token = payload["access_token"]
if "refresh_token" in payload:
self.refresh_token = payload["refresh_token"]
self.scopes = set(payload["scope"].split(" "))
def _validate_authenticator(self):
if not isinstance(self._authenticator, self.AUTHENTICATOR_CLASS):
raise InvalidInvocation(
"Must use a authenticator of type {}.".format(
self.AUTHENTICATOR_CLASS.__name__
)
)
def is_valid(self):
"""Return whether or not the Authorizer is ready to authorize requests.
A ``True`` return value does not guarantee that the access_token is
actually valid on the server side.
"""
return (
self.access_token is not None
and time.time() < self._expiration_timestamp
)
|
praw-dev/prawcore | prawcore/auth.py | Authorizer.authorize | python | def authorize(self, code):
if self._authenticator.redirect_uri is None:
raise InvalidInvocation("redirect URI not provided")
self._request_token(
code=code,
grant_type="authorization_code",
redirect_uri=self._authenticator.redirect_uri,
) | Obtain and set authorization tokens based on ``code``.
:param code: The code obtained by an out-of-band authorization request
to Reddit. | train | https://github.com/praw-dev/prawcore/blob/b16ae88a1f2bf98095ed6fe64851cb7add7ed752/prawcore/auth.py#L210-L223 | [
"def _request_token(self, **data):\n url = (\n self._authenticator._requestor.reddit_url + const.ACCESS_TOKEN_PATH\n )\n pre_request_time = time.time()\n response = self._authenticator._post(url, **data)\n payload = response.json()\n if \"error\" in payload: # Why are these OKAY responses?\n raise OAuthException(\n response, payload[\"error\"], payload.get(\"error_description\")\n )\n\n self._expiration_timestamp = (\n pre_request_time - 10 + payload[\"expires_in\"]\n )\n self.access_token = payload[\"access_token\"]\n if \"refresh_token\" in payload:\n self.refresh_token = payload[\"refresh_token\"]\n self.scopes = set(payload[\"scope\"].split(\" \"))\n"
] | class Authorizer(BaseAuthorizer):
"""Manages OAuth2 authorization tokens and scopes."""
AUTHENTICATOR_CLASS = BaseAuthenticator
def __init__(self, authenticator, refresh_token=None):
"""Represent a single authorization to Reddit's API.
:param authenticator: An instance of a subclass of
:class:`BaseAuthenticator`.
:param refresh_token: (Optional) Enables the ability to refresh the
authorization.
"""
super(Authorizer, self).__init__(authenticator)
self.refresh_token = refresh_token
def refresh(self):
"""Obtain a new access token from the refresh_token."""
if self.refresh_token is None:
raise InvalidInvocation("refresh token not provided")
self._request_token(
grant_type="refresh_token", refresh_token=self.refresh_token
)
def revoke(self, only_access=False):
"""Revoke the current Authorization.
:param only_access: (Optional) When explicitly set to True, do not
evict the refresh token if one is set.
Revoking a refresh token will in-turn revoke all access tokens
associated with that authorization.
"""
if only_access or self.refresh_token is None:
super(Authorizer, self).revoke()
else:
self._authenticator.revoke_token(
self.refresh_token, "refresh_token"
)
self._clear_access_token()
self.refresh_token = None
|
praw-dev/prawcore | prawcore/auth.py | Authorizer.refresh | python | def refresh(self):
if self.refresh_token is None:
raise InvalidInvocation("refresh token not provided")
self._request_token(
grant_type="refresh_token", refresh_token=self.refresh_token
) | Obtain a new access token from the refresh_token. | train | https://github.com/praw-dev/prawcore/blob/b16ae88a1f2bf98095ed6fe64851cb7add7ed752/prawcore/auth.py#L225-L231 | [
"def _request_token(self, **data):\n url = (\n self._authenticator._requestor.reddit_url + const.ACCESS_TOKEN_PATH\n )\n pre_request_time = time.time()\n response = self._authenticator._post(url, **data)\n payload = response.json()\n if \"error\" in payload: # Why are these OKAY responses?\n raise OAuthException(\n response, payload[\"error\"], payload.get(\"error_description\")\n )\n\n self._expiration_timestamp = (\n pre_request_time - 10 + payload[\"expires_in\"]\n )\n self.access_token = payload[\"access_token\"]\n if \"refresh_token\" in payload:\n self.refresh_token = payload[\"refresh_token\"]\n self.scopes = set(payload[\"scope\"].split(\" \"))\n"
] | class Authorizer(BaseAuthorizer):
"""Manages OAuth2 authorization tokens and scopes."""
AUTHENTICATOR_CLASS = BaseAuthenticator
def __init__(self, authenticator, refresh_token=None):
"""Represent a single authorization to Reddit's API.
:param authenticator: An instance of a subclass of
:class:`BaseAuthenticator`.
:param refresh_token: (Optional) Enables the ability to refresh the
authorization.
"""
super(Authorizer, self).__init__(authenticator)
self.refresh_token = refresh_token
def authorize(self, code):
"""Obtain and set authorization tokens based on ``code``.
:param code: The code obtained by an out-of-band authorization request
to Reddit.
"""
if self._authenticator.redirect_uri is None:
raise InvalidInvocation("redirect URI not provided")
self._request_token(
code=code,
grant_type="authorization_code",
redirect_uri=self._authenticator.redirect_uri,
)
def revoke(self, only_access=False):
"""Revoke the current Authorization.
:param only_access: (Optional) When explicitly set to True, do not
evict the refresh token if one is set.
Revoking a refresh token will in-turn revoke all access tokens
associated with that authorization.
"""
if only_access or self.refresh_token is None:
super(Authorizer, self).revoke()
else:
self._authenticator.revoke_token(
self.refresh_token, "refresh_token"
)
self._clear_access_token()
self.refresh_token = None
|
praw-dev/prawcore | prawcore/auth.py | Authorizer.revoke | python | def revoke(self, only_access=False):
if only_access or self.refresh_token is None:
super(Authorizer, self).revoke()
else:
self._authenticator.revoke_token(
self.refresh_token, "refresh_token"
)
self._clear_access_token()
self.refresh_token = None | Revoke the current Authorization.
:param only_access: (Optional) When explicitly set to True, do not
evict the refresh token if one is set.
Revoking a refresh token will in-turn revoke all access tokens
associated with that authorization. | train | https://github.com/praw-dev/prawcore/blob/b16ae88a1f2bf98095ed6fe64851cb7add7ed752/prawcore/auth.py#L233-L250 | [
"def _clear_access_token(self):\n self._expiration_timestamp = None\n self.access_token = None\n self.scopes = None\n",
"def revoke(self):\n \"\"\"Revoke the current Authorization.\"\"\"\n if self.access_token is None:\n raise InvalidInvocation(\"no token available to revoke\")\n\n self._authenticator.revoke_token(self.access_token, \"access_token\")\n self._clear_access_token()\n"
] | class Authorizer(BaseAuthorizer):
"""Manages OAuth2 authorization tokens and scopes."""
AUTHENTICATOR_CLASS = BaseAuthenticator
def __init__(self, authenticator, refresh_token=None):
"""Represent a single authorization to Reddit's API.
:param authenticator: An instance of a subclass of
:class:`BaseAuthenticator`.
:param refresh_token: (Optional) Enables the ability to refresh the
authorization.
"""
super(Authorizer, self).__init__(authenticator)
self.refresh_token = refresh_token
def authorize(self, code):
"""Obtain and set authorization tokens based on ``code``.
:param code: The code obtained by an out-of-band authorization request
to Reddit.
"""
if self._authenticator.redirect_uri is None:
raise InvalidInvocation("redirect URI not provided")
self._request_token(
code=code,
grant_type="authorization_code",
redirect_uri=self._authenticator.redirect_uri,
)
def refresh(self):
"""Obtain a new access token from the refresh_token."""
if self.refresh_token is None:
raise InvalidInvocation("refresh token not provided")
self._request_token(
grant_type="refresh_token", refresh_token=self.refresh_token
)
|
praw-dev/prawcore | prawcore/auth.py | DeviceIDAuthorizer.refresh | python | def refresh(self):
grant_type = "https://oauth.reddit.com/grants/installed_client"
self._request_token(grant_type=grant_type, device_id=self._device_id) | Obtain a new access token. | train | https://github.com/praw-dev/prawcore/blob/b16ae88a1f2bf98095ed6fe64851cb7add7ed752/prawcore/auth.py#L275-L278 | [
"def _request_token(self, **data):\n url = (\n self._authenticator._requestor.reddit_url + const.ACCESS_TOKEN_PATH\n )\n pre_request_time = time.time()\n response = self._authenticator._post(url, **data)\n payload = response.json()\n if \"error\" in payload: # Why are these OKAY responses?\n raise OAuthException(\n response, payload[\"error\"], payload.get(\"error_description\")\n )\n\n self._expiration_timestamp = (\n pre_request_time - 10 + payload[\"expires_in\"]\n )\n self.access_token = payload[\"access_token\"]\n if \"refresh_token\" in payload:\n self.refresh_token = payload[\"refresh_token\"]\n self.scopes = set(payload[\"scope\"].split(\" \"))\n"
] | class DeviceIDAuthorizer(BaseAuthorizer):
"""Manages app-only OAuth2 for 'installed' applications.
While the '*' scope will be available, some endpoints simply will not work
due to the lack of an associated Reddit account.
"""
AUTHENTICATOR_CLASS = UntrustedAuthenticator
def __init__(self, authenticator, device_id="DO_NOT_TRACK_THIS_DEVICE"):
"""Represent an app-only OAuth2 authorization for 'installed' apps.
:param authenticator: An instance of :class:`UntrustedAuthenticator`.
:param device_id: (optional) A unique ID (20-30 character ASCII string)
(default DO_NOT_TRACK_THIS_DEVICE). For more information about this
parameter, see:
https://github.com/reddit/reddit/wiki/OAuth2#application-only-oauth
"""
super(DeviceIDAuthorizer, self).__init__(authenticator)
self._device_id = device_id
|
praw-dev/prawcore | prawcore/auth.py | ScriptAuthorizer.refresh | python | def refresh(self):
self._request_token(
grant_type="password",
username=self._username,
password=self._password,
) | Obtain a new personal-use script type access token. | train | https://github.com/praw-dev/prawcore/blob/b16ae88a1f2bf98095ed6fe64851cb7add7ed752/prawcore/auth.py#L347-L353 | [
"def _request_token(self, **data):\n url = (\n self._authenticator._requestor.reddit_url + const.ACCESS_TOKEN_PATH\n )\n pre_request_time = time.time()\n response = self._authenticator._post(url, **data)\n payload = response.json()\n if \"error\" in payload: # Why are these OKAY responses?\n raise OAuthException(\n response, payload[\"error\"], payload.get(\"error_description\")\n )\n\n self._expiration_timestamp = (\n pre_request_time - 10 + payload[\"expires_in\"]\n )\n self.access_token = payload[\"access_token\"]\n if \"refresh_token\" in payload:\n self.refresh_token = payload[\"refresh_token\"]\n self.scopes = set(payload[\"scope\"].split(\" \"))\n"
] | class ScriptAuthorizer(Authorizer):
"""Manages personal-use script type authorizations.
Only users who are listed as developers for the application will be
granted access tokens.
"""
AUTHENTICATOR_CLASS = TrustedAuthenticator
def __init__(self, authenticator, username, password):
"""Represent a single personal-use authorization to Reddit's API.
:param authenticator: An instance of :class:`TrustedAuthenticator`.
:param username: The Reddit username of one of the application's
developers.
:param password: The password associated with ``username``.
"""
super(ScriptAuthorizer, self).__init__(authenticator)
self._username = username
self._password = password
|
praw-dev/prawcore | prawcore/requestor.py | Requestor.request | python | def request(self, *args, **kwargs):
try:
return self._http.request(*args, timeout=TIMEOUT, **kwargs)
except Exception as exc:
raise RequestException(exc, args, kwargs) | Issue the HTTP request capturing any errors that may occur. | train | https://github.com/praw-dev/prawcore/blob/b16ae88a1f2bf98095ed6fe64851cb7add7ed752/prawcore/requestor.py#L50-L55 | null | class Requestor(object):
"""Requestor provides an interface to HTTP requests."""
def __getattr__(self, attribute):
"""Pass all undefined attributes to the _http attribute."""
if attribute.startswith("__"):
raise AttributeError
return getattr(self._http, attribute)
def __init__(
self,
user_agent,
oauth_url="https://oauth.reddit.com",
reddit_url="https://www.reddit.com",
session=None,
):
"""Create an instance of the Requestor class.
:param user_agent: The user-agent for your application. Please follow
reddit's user-agent guidlines:
https://github.com/reddit/reddit/wiki/API#rules
:param oauth_url: (Optional) The URL used to make OAuth requests to the
reddit site. (Default: https://oauth.reddit.com)
:param reddit_url: (Optional) The URL used when obtaining access
tokens. (Default: https://www.reddit.com)
:param session: (Optional) A session to handle requests, compatible
with requests.Session(). (Default: None)
"""
if user_agent is None or len(user_agent) < 7:
raise InvalidInvocation("user_agent is not descriptive")
self._http = session or requests.Session()
self._http.headers["User-Agent"] = "{} prawcore/{}".format(
user_agent, __version__
)
self.oauth_url = oauth_url
self.reddit_url = reddit_url
def close(self):
"""Call close on the underlying session."""
return self._http.close()
|
praw-dev/prawcore | prawcore/util.py | authorization_error_class | python | def authorization_error_class(response):
message = response.headers.get("www-authenticate")
if message:
error = message.replace('"', "").rsplit("=", 1)[1]
else:
error = response.status_code
return _auth_error_mapping[error](response) | Return an exception instance that maps to the OAuth Error.
:param response: The HTTP response containing a www-authenticate error. | train | https://github.com/praw-dev/prawcore/blob/b16ae88a1f2bf98095ed6fe64851cb7add7ed752/prawcore/util.py#L12-L23 | null | """Provide utility for the prawcore package."""
from .exceptions import Forbidden, InsufficientScope, InvalidToken
_auth_error_mapping = {
403: Forbidden,
"insufficient_scope": InsufficientScope,
"invalid_token": InvalidToken,
}
|
praw-dev/prawcore | prawcore/rate_limit.py | RateLimiter.call | python | def call(self, request_function, set_header_callback, *args, **kwargs):
self.delay()
kwargs["headers"] = set_header_callback()
response = request_function(*args, **kwargs)
self.update(response.headers)
return response | Rate limit the call to request_function.
:param request_function: A function call that returns an HTTP response
object.
:param set_header_callback: A callback function used to set the request
headers. This callback is called after any necessary sleep time
occurs.
:param *args: The positional arguments to ``request_function``.
:param **kwargs: The keyword arguments to ``request_function``. | train | https://github.com/praw-dev/prawcore/blob/b16ae88a1f2bf98095ed6fe64851cb7add7ed752/prawcore/rate_limit.py#L22-L38 | [
"def delay(self):\n \"\"\"Sleep for an amount of time to remain under the rate limit.\"\"\"\n if self.next_request_timestamp is None:\n return\n sleep_seconds = self.next_request_timestamp - time.time()\n if sleep_seconds <= 0:\n return\n message = \"Sleeping: {:0.2f} seconds prior to\" \" call\".format(\n sleep_seconds\n )\n log.debug(message)\n time.sleep(sleep_seconds)\n",
"def update(self, response_headers):\n \"\"\"Update the state of the rate limiter based on the response headers.\n\n This method should only be called following a HTTP request to reddit.\n\n Response headers that do not contain x-ratelimit fields will be treated\n as a single request. This behavior is to error on the safe-side as such\n responses should trigger exceptions that indicate invalid behavior.\n\n \"\"\"\n if \"x-ratelimit-remaining\" not in response_headers:\n if self.remaining is not None:\n self.remaining -= 1\n self.used += 1\n return\n\n now = time.time()\n prev_remaining = self.remaining\n\n seconds_to_reset = int(response_headers[\"x-ratelimit-reset\"])\n self.remaining = float(response_headers[\"x-ratelimit-remaining\"])\n self.used = int(response_headers[\"x-ratelimit-used\"])\n self.reset_timestamp = now + seconds_to_reset\n\n if self.remaining <= 0:\n self.next_request_timestamp = self.reset_timestamp\n return\n\n if prev_remaining is not None and prev_remaining > self.remaining:\n estimated_clients = prev_remaining - self.remaining\n else:\n estimated_clients = 1.0\n\n self.next_request_timestamp = min(\n self.reset_timestamp,\n now + (estimated_clients * seconds_to_reset / self.remaining),\n )\n"
] | class RateLimiter(object):
"""Facilitates the rate limiting of requests to reddit.
Rate limits are controlled based on feedback from requests to reddit.
"""
def __init__(self):
"""Create an instance of the RateLimit class."""
self.remaining = None
self.next_request_timestamp = None
self.reset_timestamp = None
self.used = None
def delay(self):
"""Sleep for an amount of time to remain under the rate limit."""
if self.next_request_timestamp is None:
return
sleep_seconds = self.next_request_timestamp - time.time()
if sleep_seconds <= 0:
return
message = "Sleeping: {:0.2f} seconds prior to" " call".format(
sleep_seconds
)
log.debug(message)
time.sleep(sleep_seconds)
def update(self, response_headers):
"""Update the state of the rate limiter based on the response headers.
This method should only be called following a HTTP request to reddit.
Response headers that do not contain x-ratelimit fields will be treated
as a single request. This behavior is to error on the safe-side as such
responses should trigger exceptions that indicate invalid behavior.
"""
if "x-ratelimit-remaining" not in response_headers:
if self.remaining is not None:
self.remaining -= 1
self.used += 1
return
now = time.time()
prev_remaining = self.remaining
seconds_to_reset = int(response_headers["x-ratelimit-reset"])
self.remaining = float(response_headers["x-ratelimit-remaining"])
self.used = int(response_headers["x-ratelimit-used"])
self.reset_timestamp = now + seconds_to_reset
if self.remaining <= 0:
self.next_request_timestamp = self.reset_timestamp
return
if prev_remaining is not None and prev_remaining > self.remaining:
estimated_clients = prev_remaining - self.remaining
else:
estimated_clients = 1.0
self.next_request_timestamp = min(
self.reset_timestamp,
now + (estimated_clients * seconds_to_reset / self.remaining),
)
|
praw-dev/prawcore | prawcore/rate_limit.py | RateLimiter.delay | python | def delay(self):
if self.next_request_timestamp is None:
return
sleep_seconds = self.next_request_timestamp - time.time()
if sleep_seconds <= 0:
return
message = "Sleeping: {:0.2f} seconds prior to" " call".format(
sleep_seconds
)
log.debug(message)
time.sleep(sleep_seconds) | Sleep for an amount of time to remain under the rate limit. | train | https://github.com/praw-dev/prawcore/blob/b16ae88a1f2bf98095ed6fe64851cb7add7ed752/prawcore/rate_limit.py#L40-L51 | null | class RateLimiter(object):
"""Facilitates the rate limiting of requests to reddit.
Rate limits are controlled based on feedback from requests to reddit.
"""
def __init__(self):
"""Create an instance of the RateLimit class."""
self.remaining = None
self.next_request_timestamp = None
self.reset_timestamp = None
self.used = None
def call(self, request_function, set_header_callback, *args, **kwargs):
"""Rate limit the call to request_function.
:param request_function: A function call that returns an HTTP response
object.
:param set_header_callback: A callback function used to set the request
headers. This callback is called after any necessary sleep time
occurs.
:param *args: The positional arguments to ``request_function``.
:param **kwargs: The keyword arguments to ``request_function``.
"""
self.delay()
kwargs["headers"] = set_header_callback()
response = request_function(*args, **kwargs)
self.update(response.headers)
return response
def update(self, response_headers):
"""Update the state of the rate limiter based on the response headers.
This method should only be called following a HTTP request to reddit.
Response headers that do not contain x-ratelimit fields will be treated
as a single request. This behavior is to error on the safe-side as such
responses should trigger exceptions that indicate invalid behavior.
"""
if "x-ratelimit-remaining" not in response_headers:
if self.remaining is not None:
self.remaining -= 1
self.used += 1
return
now = time.time()
prev_remaining = self.remaining
seconds_to_reset = int(response_headers["x-ratelimit-reset"])
self.remaining = float(response_headers["x-ratelimit-remaining"])
self.used = int(response_headers["x-ratelimit-used"])
self.reset_timestamp = now + seconds_to_reset
if self.remaining <= 0:
self.next_request_timestamp = self.reset_timestamp
return
if prev_remaining is not None and prev_remaining > self.remaining:
estimated_clients = prev_remaining - self.remaining
else:
estimated_clients = 1.0
self.next_request_timestamp = min(
self.reset_timestamp,
now + (estimated_clients * seconds_to_reset / self.remaining),
)
|
praw-dev/prawcore | prawcore/rate_limit.py | RateLimiter.update | python | def update(self, response_headers):
if "x-ratelimit-remaining" not in response_headers:
if self.remaining is not None:
self.remaining -= 1
self.used += 1
return
now = time.time()
prev_remaining = self.remaining
seconds_to_reset = int(response_headers["x-ratelimit-reset"])
self.remaining = float(response_headers["x-ratelimit-remaining"])
self.used = int(response_headers["x-ratelimit-used"])
self.reset_timestamp = now + seconds_to_reset
if self.remaining <= 0:
self.next_request_timestamp = self.reset_timestamp
return
if prev_remaining is not None and prev_remaining > self.remaining:
estimated_clients = prev_remaining - self.remaining
else:
estimated_clients = 1.0
self.next_request_timestamp = min(
self.reset_timestamp,
now + (estimated_clients * seconds_to_reset / self.remaining),
) | Update the state of the rate limiter based on the response headers.
This method should only be called following a HTTP request to reddit.
Response headers that do not contain x-ratelimit fields will be treated
as a single request. This behavior is to error on the safe-side as such
responses should trigger exceptions that indicate invalid behavior. | train | https://github.com/praw-dev/prawcore/blob/b16ae88a1f2bf98095ed6fe64851cb7add7ed752/prawcore/rate_limit.py#L53-L89 | null | class RateLimiter(object):
"""Facilitates the rate limiting of requests to reddit.
Rate limits are controlled based on feedback from requests to reddit.
"""
def __init__(self):
"""Create an instance of the RateLimit class."""
self.remaining = None
self.next_request_timestamp = None
self.reset_timestamp = None
self.used = None
def call(self, request_function, set_header_callback, *args, **kwargs):
"""Rate limit the call to request_function.
:param request_function: A function call that returns an HTTP response
object.
:param set_header_callback: A callback function used to set the request
headers. This callback is called after any necessary sleep time
occurs.
:param *args: The positional arguments to ``request_function``.
:param **kwargs: The keyword arguments to ``request_function``.
"""
self.delay()
kwargs["headers"] = set_header_callback()
response = request_function(*args, **kwargs)
self.update(response.headers)
return response
def delay(self):
"""Sleep for an amount of time to remain under the rate limit."""
if self.next_request_timestamp is None:
return
sleep_seconds = self.next_request_timestamp - time.time()
if sleep_seconds <= 0:
return
message = "Sleeping: {:0.2f} seconds prior to" " call".format(
sleep_seconds
)
log.debug(message)
time.sleep(sleep_seconds)
|
praw-dev/prawcore | prawcore/sessions.py | Session.request | python | def request(
self, method, path, data=None, files=None, json=None, params=None
):
params = deepcopy(params) or {}
params["raw_json"] = 1
if isinstance(data, dict):
data = deepcopy(data)
data["api_type"] = "json"
data = sorted(data.items())
url = urljoin(self._requestor.oauth_url, path)
return self._request_with_retries(
data=data,
files=files,
json=json,
method=method,
params=params,
url=url,
) | Return the json content from the resource at ``path``.
:param method: The request verb. E.g., get, post, put.
:param path: The path of the request. This path will be combined with
the ``oauth_url`` of the Requestor.
:param data: Dictionary, bytes, or file-like object to send in the body
of the request.
:param files: Dictionary, mapping ``filename`` to file-like object.
:param json: Object to be serialized to JSON in the body of the
request.
:param params: The query parameters to send with the request.
Automatically refreshes the access token if it becomes invalid and a
refresh token is available. Raises InvalidInvocation in such a case if
a refresh token is not available. | train | https://github.com/praw-dev/prawcore/blob/b16ae88a1f2bf98095ed6fe64851cb7add7ed752/prawcore/sessions.py#L226-L260 | [
"def _request_with_retries(\n self, data, files, json, method, params, url, retries=3\n):\n self._retry_sleep(retries)\n self._log_request(data, method, params, url)\n response, saved_exception = self._make_request(\n data, files, json, method, params, retries, url\n )\n\n do_retry = False\n if (\n response is not None\n and response.status_code == codes[\"unauthorized\"]\n ):\n self._authorizer._clear_access_token()\n if hasattr(self._authorizer, \"refresh\"):\n do_retry = True\n\n if retries > 1 and (\n do_retry\n or response is None\n or response.status_code in self.RETRY_STATUSES\n ):\n return self._do_retry(\n data,\n files,\n json,\n method,\n params,\n response,\n retries,\n saved_exception,\n url,\n )\n elif response.status_code in self.STATUS_EXCEPTIONS:\n raise self.STATUS_EXCEPTIONS[response.status_code](response)\n elif response.status_code == codes[\"no_content\"]:\n return\n assert (\n response.status_code in self.SUCCESS_STATUSES\n ), \"Unexpected status code: {}\".format(response.status_code)\n if response.headers.get(\"content-length\") == \"0\":\n return \"\"\n try:\n return response.json()\n except ValueError:\n raise BadJSON(response)\n"
] | class Session(object):
"""The low-level connection interface to reddit's API."""
RETRY_EXCEPTIONS = (ChunkedEncodingError, ConnectionError, ReadTimeout)
RETRY_STATUSES = {
520,
522,
codes["bad_gateway"],
codes["gateway_timeout"],
codes["internal_server_error"],
codes["service_unavailable"],
}
STATUS_EXCEPTIONS = {
codes["bad_gateway"]: ServerError,
codes["bad_request"]: BadRequest,
codes["conflict"]: Conflict,
codes["found"]: Redirect,
codes["forbidden"]: authorization_error_class,
codes["gateway_timeout"]: ServerError,
codes["internal_server_error"]: ServerError,
codes["media_type"]: SpecialError,
codes["not_found"]: NotFound,
codes["request_entity_too_large"]: TooLarge,
codes["service_unavailable"]: ServerError,
codes["unauthorized"]: authorization_error_class,
codes["unavailable_for_legal_reasons"]: UnavailableForLegalReasons,
# CloudFlare status (not named in requests)
520: ServerError,
522: ServerError,
}
SUCCESS_STATUSES = {codes["created"], codes["ok"]}
@staticmethod
def _log_request(data, method, params, url):
log.debug("Fetching: {} {}".format(method, url))
log.debug("Data: {}".format(data))
log.debug("Params: {}".format(params))
@staticmethod
def _retry_sleep(retries):
if retries < 3:
base = 0 if retries == 2 else 2
sleep_seconds = base + 2 * random.random()
message = "Sleeping: {:0.2f} seconds prior to" " retry".format(
sleep_seconds
)
log.debug(message)
time.sleep(sleep_seconds)
def __init__(self, authorizer):
"""Preprare the connection to reddit's API.
:param authorizer: An instance of :class:`Authorizer`.
"""
if not isinstance(authorizer, BaseAuthorizer):
raise InvalidInvocation(
"invalid Authorizer: {}".format(authorizer)
)
self._authorizer = authorizer
self._rate_limiter = RateLimiter()
def __enter__(self):
"""Allow this object to be used as a context manager."""
return self
def __exit__(self, *_args):
"""Allow this object to be used as a context manager."""
self.close()
def _do_retry(
self,
data,
files,
json,
method,
params,
response,
retries,
saved_exception,
url,
):
if saved_exception:
status = repr(saved_exception)
else:
status = response.status_code
log.warning(
"Retrying due to {} status: {} {}".format(status, method, url)
)
return self._request_with_retries(
data=data,
files=files,
json=json,
method=method,
params=params,
url=url,
retries=retries - 1,
)
def _make_request(self, data, files, json, method, params, retries, url):
try:
response = self._rate_limiter.call(
self._requestor.request,
self._set_header_callback,
method,
url,
allow_redirects=False,
data=data,
files=files,
json=json,
params=params,
)
log.debug(
"Response: {} ({} bytes)".format(
response.status_code,
response.headers.get("content-length"),
)
)
return response, None
except RequestException as exception:
if retries <= 1 or not isinstance(
exception.original_exception, self.RETRY_EXCEPTIONS
):
raise
return None, exception.original_exception
def _request_with_retries(
self, data, files, json, method, params, url, retries=3
):
self._retry_sleep(retries)
self._log_request(data, method, params, url)
response, saved_exception = self._make_request(
data, files, json, method, params, retries, url
)
do_retry = False
if (
response is not None
and response.status_code == codes["unauthorized"]
):
self._authorizer._clear_access_token()
if hasattr(self._authorizer, "refresh"):
do_retry = True
if retries > 1 and (
do_retry
or response is None
or response.status_code in self.RETRY_STATUSES
):
return self._do_retry(
data,
files,
json,
method,
params,
response,
retries,
saved_exception,
url,
)
elif response.status_code in self.STATUS_EXCEPTIONS:
raise self.STATUS_EXCEPTIONS[response.status_code](response)
elif response.status_code == codes["no_content"]:
return
assert (
response.status_code in self.SUCCESS_STATUSES
), "Unexpected status code: {}".format(response.status_code)
if response.headers.get("content-length") == "0":
return ""
try:
return response.json()
except ValueError:
raise BadJSON(response)
def _set_header_callback(self):
if not self._authorizer.is_valid() and hasattr(
self._authorizer, "refresh"
):
self._authorizer.refresh()
return {
"Authorization": "bearer {}".format(self._authorizer.access_token)
}
@property
def _requestor(self):
return self._authorizer._authenticator._requestor
def close(self):
"""Close the session and perform any clean up."""
self._requestor.close()
|
praw-dev/prawcore | examples/script_auth_friend_list.py | main | python | def main():
authenticator = prawcore.TrustedAuthenticator(
prawcore.Requestor("prawcore_script_auth_example"),
os.environ["PRAWCORE_CLIENT_ID"],
os.environ["PRAWCORE_CLIENT_SECRET"],
)
authorizer = prawcore.ScriptAuthorizer(
authenticator,
os.environ["PRAWCORE_USERNAME"],
os.environ["PRAWCORE_PASSWORD"],
)
authorizer.refresh()
with prawcore.session(authorizer) as session:
data = session.request("GET", "/api/v1/me/friends")
for friend in data["data"]["children"]:
print(friend["name"])
return 0 | Provide the program's entry point when directly executed. | train | https://github.com/praw-dev/prawcore/blob/b16ae88a1f2bf98095ed6fe64851cb7add7ed752/examples/script_auth_friend_list.py#L15-L35 | [
"def session(authorizer=None):\n \"\"\"Return a :class:`Session` instance.\n\n :param authorizer: An instance of :class:`Authorizer`.\n\n \"\"\"\n return Session(authorizer=authorizer)\n",
"def refresh(self):\n \"\"\"Obtain a new personal-use script type access token.\"\"\"\n self._request_token(\n grant_type=\"password\",\n username=self._username,\n password=self._password,\n )\n"
] | #!/usr/bin/env python
"""script_auth_friend_list.py outputs the authenticated user's list of friends.
This program demonstrates the use of ``prawcore.ScriptAuthorizer``, which
enables those listed as a developer of the application to authenticate using
their username and password.
"""
import os
import prawcore
import sys
if __name__ == "__main__":
sys.exit(main())
|
praw-dev/prawcore | examples/caching_requestor.py | main | python | def main():
if len(sys.argv) != 2:
print("Usage: {} USERNAME".format(sys.argv[0]))
return 1
caching_requestor = prawcore.Requestor(
"prawcore_device_id_auth_example", session=CachingSession()
)
authenticator = prawcore.TrustedAuthenticator(
caching_requestor,
os.environ["PRAWCORE_CLIENT_ID"],
os.environ["PRAWCORE_CLIENT_SECRET"],
)
authorizer = prawcore.ReadOnlyAuthorizer(authenticator)
authorizer.refresh()
user = sys.argv[1]
with prawcore.session(authorizer) as session:
data1 = session.request("GET", "/api/v1/user/{}/trophies".format(user))
with prawcore.session(authorizer) as session:
data2 = session.request("GET", "/api/v1/user/{}/trophies".format(user))
for trophy in data1["data"]["trophies"]:
description = trophy["data"]["description"]
print(
"Original:",
trophy["data"]["name"]
+ (" ({})".format(description) if description else ""),
)
for trophy in data2["data"]["trophies"]:
description = trophy["data"]["description"]
print(
"Cached:",
trophy["data"]["name"]
+ (" ({})".format(description) if description else ""),
)
print(
"----\nCached == Original:",
data2["data"]["trophies"] == data2["data"]["trophies"],
)
return 0 | Provide the program's entry point when directly executed. | train | https://github.com/praw-dev/prawcore/blob/b16ae88a1f2bf98095ed6fe64851cb7add7ed752/examples/caching_requestor.py#L39-L83 | [
"def session(authorizer=None):\n \"\"\"Return a :class:`Session` instance.\n\n :param authorizer: An instance of :class:`Authorizer`.\n\n \"\"\"\n return Session(authorizer=authorizer)\n",
"def refresh(self):\n \"\"\"Obtain a new ReadOnly access token.\"\"\"\n self._request_token(grant_type=\"client_credentials\")\n"
] | #!/usr/bin/env python
"""This example shows how simple in-memory caching can be used.
Demonstrates the use of custom sessions with ``Requestor``. It's an adaptation
of ``read_only_auth_trophies.py``.
"""
import requests
import prawcore
import os
import sys
class CachingSession(requests.Session):
"""Cache GETs in memory.
Toy example of custom session to showcase the ``session`` parameter of
``Requestor``.
"""
get_cache = {}
def request(self, method, url, params=None, **kwargs):
"""Perform a request, or return a cached response if available."""
params_key = tuple(params.items()) if params else ()
if method.upper() == "GET":
if (url, params_key) in self.get_cache:
print("Returning cached response for:", method, url, params)
return self.get_cache[(url, params_key)]
result = super().request(method, url, params, **kwargs)
if method.upper() == "GET":
self.get_cache[(url, params_key)] = result
print("Adding entry to the cache:", method, url, params)
return result
if __name__ == "__main__":
sys.exit(main())
|
praw-dev/prawcore | examples/caching_requestor.py | CachingSession.request | python | def request(self, method, url, params=None, **kwargs):
params_key = tuple(params.items()) if params else ()
if method.upper() == "GET":
if (url, params_key) in self.get_cache:
print("Returning cached response for:", method, url, params)
return self.get_cache[(url, params_key)]
result = super().request(method, url, params, **kwargs)
if method.upper() == "GET":
self.get_cache[(url, params_key)] = result
print("Adding entry to the cache:", method, url, params)
return result | Perform a request, or return a cached response if available. | train | https://github.com/praw-dev/prawcore/blob/b16ae88a1f2bf98095ed6fe64851cb7add7ed752/examples/caching_requestor.py#L25-L36 | null | class CachingSession(requests.Session):
"""Cache GETs in memory.
Toy example of custom session to showcase the ``session`` parameter of
``Requestor``.
"""
get_cache = {}
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.