repository_name
stringclasses 316
values | func_path_in_repository
stringlengths 6
223
| func_name
stringlengths 1
134
| language
stringclasses 1
value | func_code_string
stringlengths 57
65.5k
| func_documentation_string
stringlengths 1
46.3k
| split_name
stringclasses 1
value | func_code_url
stringlengths 91
315
| called_functions
listlengths 1
156
⌀ | enclosing_scope
stringlengths 2
1.48M
|
|---|---|---|---|---|---|---|---|---|---|
aleontiev/dj
|
dj/commands/init.py
|
init
|
python
|
def init(name, runtime):
runtime = click.unstyle(runtime)
stdout.write(
style.format_command(
'Initializing',
'%s %s %s' % (name, style.gray('@'), style.green(runtime))
)
)
config = Config(os.getcwd())
config.set('runtime', runtime)
config.save()
generate.main(['init', name], standalone_mode=False)
run.main(['python', 'manage.py', 'migrate'])
|
Create a new Django app.
|
train
|
https://github.com/aleontiev/dj/blob/0612d442fdd8d472aea56466568b9857556ecb51/dj/commands/init.py#L18-L34
|
[
"def format_command(a, b='', prefix=''):\n return (\n white(prefix) +\n blue('%s: ' % a) +\n white(b)\n )\n",
"def green(message):\n return click.style(message, fg='green', bold=True)\n",
"def gray(message):\n return click.style(message, fg='white', bold=False)\n",
"def write(self, message, **kwargs):\n copy = self.kwargs.copy()\n copy.update(kwargs)\n click.echo(click.style(message, **copy))\n"
] |
from __future__ import absolute_import
import os
import click
from dj.config import Config
from dj.utils import style
from .generate import generate
from .base import stdout
from .run import run
@click.command()
@click.argument('name')
@click.option(
'--runtime',
prompt=style.prompt('Python version'),
default=style.default(Config.defaults['runtime'])
)
|
aleontiev/dj
|
dj/commands/add.py
|
add
|
python
|
def add(addon, dev, interactive):
application = get_current_application()
application.add(
addon,
dev=dev,
interactive=interactive
)
|
Add a dependency.
Examples:
$ django add dynamic-rest==1.5.0
+ dynamic-rest == 1.5.0
|
train
|
https://github.com/aleontiev/dj/blob/0612d442fdd8d472aea56466568b9857556ecb51/dj/commands/add.py#L14-L28
|
[
"def get_current_application():\n global current_application\n if not current_application:\n current_application = Application()\n return current_application\n",
"def add(self, addon, dev=False, interactive=True):\n \"\"\"Add a new dependency and install it.\"\"\"\n dependencies = self.get_dependency_manager(dev=dev)\n other_dependencies = self.get_dependency_manager(dev=not dev)\n existing = dependencies.get(addon)\n self.stdout.write(style.format_command('Adding', addon))\n dependencies.add(addon)\n try:\n # try running the build\n self.build()\n self.refresh()\n\n # remove version of this in other requirements file\n other_dependencies.remove(addon, warn=False)\n\n # run new addon constructor\n constructor_name = '%s.init' % Dependency(addon).module_name\n constructor = self.blueprints.get(constructor_name)\n\n if constructor:\n context = constructor.load_context().main(\n [], standalone_mode=False\n )\n self.generate(constructor, context, interactive=interactive)\n except Exception as e:\n # restore original settings\n self.stdout.write(style.red(str(e)))\n self.stdout.write(\n style.yellow('Could not find %s' % addon)\n )\n dependencies.remove(addon)\n if existing:\n dependencies.add(existing)\n return\n"
] |
from __future__ import absolute_import
import click
from dj.application import get_current_application
@click.argument('addon')
@click.option('--dev', is_flag=True)
@click.option(
'--interactive/--not-interactive',
is_flag=True,
default=True
)
@click.command()
|
aleontiev/dj
|
dj/blueprints/command/context.py
|
get_context
|
python
|
def get_context(name, doc):
name = inflection.underscore(name)
return {
'name': name,
'doc': doc or name
}
|
Generate a command with given name.
The command can be run immediately after generation.
For example:
dj generate command bar
dj run manage.py bar
|
train
|
https://github.com/aleontiev/dj/blob/0612d442fdd8d472aea56466568b9857556ecb51/dj/blueprints/command/context.py#L8-L22
| null |
import click
import inflection
@click.command()
@click.argument('name')
@click.option('--doc')
|
aleontiev/dj
|
dj/utils/system.py
|
execute
|
python
|
def execute(
command,
abort=True,
capture=False,
verbose=False,
echo=False,
stream=None,
):
stream = stream or sys.stdout
if echo:
out = stream
out.write(u'$ %s' % command)
# Capture stdout and stderr in the same stream
command = u'%s 2>&1' % command
if verbose:
out = stream
err = stream
else:
out = subprocess.PIPE
err = subprocess.PIPE
process = subprocess.Popen(
command,
shell=True,
stdout=out,
stderr=err,
)
# propagate SIGTERM to all child processes within
# the process group. this prevents subprocesses from
# being orphaned when the current process is terminated
signal.signal(
signal.SIGTERM,
make_terminate_handler(process)
)
# Wait for the process to complete
stdout, _ = process.communicate()
stdout = stdout.strip() if stdout else ''
if not isinstance(stdout, unicode):
stdout = stdout.decode('utf-8')
if abort and process.returncode != 0:
message = (
u'Error #%d running "%s"%s' % (
process.returncode,
command,
':\n====================\n'
'%s\n'
'====================\n' % (
stdout
) if stdout else ''
)
)
raise Exception(message)
if capture:
return stdout
else:
return process
|
Run a command locally.
Arguments:
command: a command to execute.
abort: If True, a non-zero return code will trigger an exception.
capture: If True, returns the output of the command.
If False, returns a subprocess result.
echo: if True, prints the command before executing it.
verbose: If True, prints the output of the command.
stream: If set, stdout/stderr will be redirected to the given stream.
Ignored if `capture` is True.
|
train
|
https://github.com/aleontiev/dj/blob/0612d442fdd8d472aea56466568b9857556ecb51/dj/utils/system.py#L76-L148
|
[
"def make_terminate_handler(process, signal=signal.SIGTERM):\n def inner(*args):\n try:\n os.killpg(os.getpgid(process.pid), signal)\n except OSError:\n pass\n return inner\n"
] |
import os
import click
import sys
import subprocess
import signal
def get_platform_os():
if not exists('uname'):
return 'Windows'
return execute(
'uname -s',
capture=True
)
def get(
directory,
filter=None,
depth=0,
include_files=False,
include_dirs=False
):
if isinstance(filter, basestring):
flt = lambda x: x == filter
if not callable(filter):
# if filter is None/unsupported type, allow all
flt = lambda x: True
else:
flt = filter
for root, dirs, files in os.walk(directory):
search = []
if include_files:
search.extend(files)
if include_dirs:
search.extend(dirs)
for file in search:
file = os.path.join(root, file)
if flt(file):
yield file
depth -= 1
if depth == 0:
break
def get_files(directory, filter=None, depth=0):
return get(directory, filter, depth, include_files=True)
def get_directories(directory, filter=None, depth=0):
return get(directory, filter, depth, include_dirs=True)
def get_last_touched(file):
return os.path.getmtime(file) if os.path.exists(file) else None
def touch(file):
with open(file, 'a'):
os.utime(file, None)
def make_terminate_handler(process, signal=signal.SIGTERM):
def inner(*args):
try:
os.killpg(os.getpgid(process.pid), signal)
except OSError:
pass
return inner
def find_nearest(directory, filename):
full_path = os.path.join(directory, filename)
exists = os.path.exists(full_path)
if exists:
return full_path
if directory == '/':
return None
return find_nearest(
os.path.abspath(os.path.join(directory, os.pardir)),
filename
)
def exists(program):
try:
execute("command -v %s" % program, verbose=False)
return True
except:
return False
class StyleStdout(object):
def __init__(self, **kwargs):
self.kwargs = kwargs
def write(self, message, **kwargs):
copy = self.kwargs.copy()
copy.update(kwargs)
click.echo(click.style(message, **copy))
stdout = StyleStdout(fg='white', bold=True)
|
ttinies/sc2players
|
sc2players/playerRecord.py
|
PlayerRecord.attrs
|
python
|
def attrs(self):
ret = dict(self.__dict__) # obtain copy of internal __dict__
del ret["_matches"] # match history is specifically distinguished from player information (and stored separately)
if self.type != c.COMPUTER: # difficulty only matters for computer playres
del ret["difficulty"]
return ret
|
provide a copy of this player's attributes as a dictionary
|
train
|
https://github.com/ttinies/sc2players/blob/fd9b37c268bf1005d9ef73a25e65ed97c8b7895f/sc2players/playerRecord.py#L97-L103
| null |
class PlayerRecord(object):
"""manage the out-of-game meta data of a given player"""
AVAILABLE_KEYS = [
"name",
"type",
"difficulty",
"initCmd",
"initOptions",
"raceDefault",
"rating",
]
############################################################################
def __init__(self, source=None, **override):
# define default values and their type
self.name = ""
self.type = c.PlayerDesigns(c.HUMAN)
self.difficulty = c.ComputerDifficulties(None) # only matters if type is a computer
self.initCmd = "" # only used if self.type is an AI or bot
self.initOptions = {}
self.rating = c.DEFAULT_RATING
self.created = time.time() # origination timestamp
self.raceDefault = c.RANDOM
self._matches = [] # match history
# initialize with new values
if isinstance(source, str): self.load(source) # assume a player file to load
elif isinstance(source, dict): self.update(source) # assume attribute dictionary
elif isinstance(source, PlayerRecord): self.update(source.__dict__) # copy constructor
self.update(override)
if not self.name:
raise ValueError("must define 'name' parameter as part of %s source settings"%(self.__class__.__name__))
if self.type in [c.BOT, c.AI] and not self.initCmd:
raise ValueError("must provide initCmd attribute when specifying type=%s"%self.type)
############################################################################
def __str__(self): return self.__repr__()
def __repr__(self):
if self.isComputer: diff = "-%s"%self.difficulty.type
elif self.rating: diff = "-%d"%self.rating
else: diff = ""
return "<%s %s %s%s>"%(self.__class__.__name__, self.name, self.type.type, diff)
############################################################################
def __call__(self, attrs, **kwargs):
"""update internals according to parameters"""
self.update(attrs) # allow a dictionary to be passed
self.update(kwargs)
return self
############################################################################
@property
def initOptStr(self):
return " ".join(["%s=%s"%(k,v) for k, v in self.initOptions.items()])
############################################################################
@property
def isAI(self): return self.type == c.AI
############################################################################
@property
def isBot(self): return self.type == c.BOT # an AI with pre-defined, scripted actions
############################################################################
@property
def isHuman(self): return self.type == c.HUMAN
############################################################################
@property
def isComputer(self): return self.type == c.COMPUTER
############################################################################
@property
def isMulti(self): return self.type == c.ARCHON
############################################################################
@property
def isStoredLocal(self):
"""determine whether this player can be run locally"""
raise NotImplementedError("TODO -- determine whether this player is an already known player")
############################################################################
@property
def filename(self):
"""return the absolute path to the object's filename"""
return os.path.join(c.PLAYERS_FOLDER, "player_%s.json"%(self.name))
############################################################################
@property
############################################################################
@property
def simpleAttrs(self):
"""provide a copy of this player's attributes as a dictionary, but with objects flattened into a string representation of the object"""
simpleAttrs = {}
for k,v in iteritems(self.attrs):
if k in ["_matches"]: continue # attributes to specifically ignore
try: simpleAttrs[k] = v.type
except: simpleAttrs[k] = v
return simpleAttrs
############################################################################
@property
def matches(self):
"""retrieve the match history for this player from the matchHistory repo and cache the result"""
return []
#raise NotImplementedError("must finish player history first")
#if not self._matches: # load match History applicable to this player
# self._matches = getPlayerHistory(self.name)
#return self._matches
############################################################################
def _validateAttrs(self, keys):
"""prove that all attributes are defined appropriately"""
badAttrs = []
for k in keys:
if k not in self.__dict__:
badAttrs.append("Attribute key '%s' is not a valid attribute"%(k))
badAttrsMsg = os.linesep.join(badAttrs)
if not keys: return # is iterable, but didn't contain any keys
if badAttrsMsg:
raise ValueError("Encountered invalid attributes. ALLOWED: %s%s%s"\
%(list(self.__dict__), os.linesep, badAttrsMsg))
############################################################################
@property
def control(self):
"""the type of control this player exhibits"""
if self.isComputer: value = c.COMPUTER
else: value = c.PARTICIPANT
return c.PlayerControls(value)
############################################################################
def load(self, playerName=None):
"""retrieve the PlayerRecord settings from saved disk file"""
if playerName: # switch the PlayerRecord this object describes
self.name = playerName # preset value to load self.filename
try:
with open(self.filename, "rb") as f:
data = f.read()
except Exception:
raise ValueError("invalid profile, '%s'. file does not exist: %s"%(self.name, self.filename))
self.update(json.loads(data))
self._matches = [] # mandate match history be recalculated for this newly loaded player
############################################################################
def save(self):
"""save PlayerRecord settings to disk"""
data = str.encode( json.dumps(self.simpleAttrs, indent=4, sort_keys=True) )
with open(self.filename, "wb") as f:
f.write(data)
############################################################################
def update(self, attrs):
"""update attributes initialized with the proper type"""
########################################################################
def convertStrToDict(strVal):
if isinstance(strVal, dict): return strVal
strVal = re.sub("[\{\}]+", "", str(strVal))
regexCol = re.compile(":")
terms = re.split("[,\s]+", strVal)
keyvals = [re.split(regexCol, t) for t in terms]
x = re.compile("['\"]")
ret = {}
boolTrue = re.compile("true" , flags=re.IGNORECASE)
boolFalse = re.compile("false", flags=re.IGNORECASE)
for k, v in keyvals:
k = re.sub(x, "", k)
v = re.sub(x, "", v)
if re.search(boolTrue, v): v = True
elif re.search(boolFalse, v): v = False
else:
if '.' in v:
try: v = float(v)
except: pass
else:
try: v = int(v)
except: pass
ret[k] = v
return ret
########################################################################
self._validateAttrs(attrs)
for k,v in iteritems(attrs):
typecast = type( getattr(self, k) )
if typecast==bool and v=="False": newval = False # "False" evalued as boolean is True because its length > 0
elif issubclass(typecast, c.RestrictedType): # let the RestrictedType handle the type setting, value matching
newval = typecast(v)
elif "<" in str(v) or v==None: newval = typecast(v)
elif k == "initCmd": newval = str(v) # specifically don't mangle the command as specified
elif k == "initOptions": newval = convertStrToDict(v)
else: newval = typecast(str(v).lower())
setattr(self, k, newval)
if self.isComputer: pass
elif "difficulty" in attrs and attrs["difficulty"]!=None: # the final state of this PlayerRecord cannot be a non-computer and specify a difficulty
raise ValueError("%s type %s=%s does not have a difficulty"%(
self.__class__.__name__, self.type.__class__.__name__, self.type.type))
else: self.difficulty = c.ComputerDifficulties(None)
############################################################################
def matchSubset(**kwargs):
"""extract matches from player's entire match history given matching criteria kwargs"""
ret = []
for m in self.matches:
allMatched = True
for k,v in iteritems(kwargs):
mVal = getattr(m, k)
try:
if v == mVal or v in mVal: continue # this check passed
except Exception: pass # if attempting to check __contains__ and exception is raised, it's assumed to be false
allMatched = False
break
if allMatched: ret.append(m)
return ret
############################################################################
def apmRecent(self, maxMatches=c.RECENT_MATCHES, **criteria):
"""collect recent match history's apm data to report player's calculated MMR"""
if not self.matches: return 0 # no apm information without match history
#try: maxMatches = criteria["maxMatches"]
#except: maxMatches = c.RECENT_MATCHES
apms = [m.apm(self) for m in self.recentMatches(maxMatches=maxMatches, **criteria)]
return sum(apms) / len(apms)
############################################################################
def apmAggregate(self, **criteria):
"""collect all match history's apm data to report player's calculated MMR"""
apms = [m.apm(self) for m in self.matchSubset(**criteria)]
if not apms: return 0 # no apm information without match history
return sum(apms) / len(apms)
############################################################################
def recentMatches(self, **criteria):
"""identify all recent matches for player given optional, additional criteria"""
if not self.matches: return [] # no match history
try: # maxMatches is a specially handled parameter (not true criteria)
maxMatches = criteria["maxMatches"]
del criteria["maxMatches"]
except AttributeError:
maxMatches = c.RECENT_MATCHES
alLMatches = self.matchSubset(**criteria)
matchTimes = [(m.endTime, m) for m in matches]
selMatches = sorted(matchTimes)[:maxMatches] # slice off X most recet matches
retMatches = [m for endTime,m in selMatches] # extract matches only
return retMatches
|
ttinies/sc2players
|
sc2players/playerRecord.py
|
PlayerRecord.simpleAttrs
|
python
|
def simpleAttrs(self):
simpleAttrs = {}
for k,v in iteritems(self.attrs):
if k in ["_matches"]: continue # attributes to specifically ignore
try: simpleAttrs[k] = v.type
except: simpleAttrs[k] = v
return simpleAttrs
|
provide a copy of this player's attributes as a dictionary, but with objects flattened into a string representation of the object
|
train
|
https://github.com/ttinies/sc2players/blob/fd9b37c268bf1005d9ef73a25e65ed97c8b7895f/sc2players/playerRecord.py#L106-L113
| null |
class PlayerRecord(object):
"""manage the out-of-game meta data of a given player"""
AVAILABLE_KEYS = [
"name",
"type",
"difficulty",
"initCmd",
"initOptions",
"raceDefault",
"rating",
]
############################################################################
def __init__(self, source=None, **override):
# define default values and their type
self.name = ""
self.type = c.PlayerDesigns(c.HUMAN)
self.difficulty = c.ComputerDifficulties(None) # only matters if type is a computer
self.initCmd = "" # only used if self.type is an AI or bot
self.initOptions = {}
self.rating = c.DEFAULT_RATING
self.created = time.time() # origination timestamp
self.raceDefault = c.RANDOM
self._matches = [] # match history
# initialize with new values
if isinstance(source, str): self.load(source) # assume a player file to load
elif isinstance(source, dict): self.update(source) # assume attribute dictionary
elif isinstance(source, PlayerRecord): self.update(source.__dict__) # copy constructor
self.update(override)
if not self.name:
raise ValueError("must define 'name' parameter as part of %s source settings"%(self.__class__.__name__))
if self.type in [c.BOT, c.AI] and not self.initCmd:
raise ValueError("must provide initCmd attribute when specifying type=%s"%self.type)
############################################################################
def __str__(self): return self.__repr__()
def __repr__(self):
if self.isComputer: diff = "-%s"%self.difficulty.type
elif self.rating: diff = "-%d"%self.rating
else: diff = ""
return "<%s %s %s%s>"%(self.__class__.__name__, self.name, self.type.type, diff)
############################################################################
def __call__(self, attrs, **kwargs):
"""update internals according to parameters"""
self.update(attrs) # allow a dictionary to be passed
self.update(kwargs)
return self
############################################################################
@property
def initOptStr(self):
return " ".join(["%s=%s"%(k,v) for k, v in self.initOptions.items()])
############################################################################
@property
def isAI(self): return self.type == c.AI
############################################################################
@property
def isBot(self): return self.type == c.BOT # an AI with pre-defined, scripted actions
############################################################################
@property
def isHuman(self): return self.type == c.HUMAN
############################################################################
@property
def isComputer(self): return self.type == c.COMPUTER
############################################################################
@property
def isMulti(self): return self.type == c.ARCHON
############################################################################
@property
def isStoredLocal(self):
"""determine whether this player can be run locally"""
raise NotImplementedError("TODO -- determine whether this player is an already known player")
############################################################################
@property
def filename(self):
"""return the absolute path to the object's filename"""
return os.path.join(c.PLAYERS_FOLDER, "player_%s.json"%(self.name))
############################################################################
@property
def attrs(self):
"""provide a copy of this player's attributes as a dictionary"""
ret = dict(self.__dict__) # obtain copy of internal __dict__
del ret["_matches"] # match history is specifically distinguished from player information (and stored separately)
if self.type != c.COMPUTER: # difficulty only matters for computer playres
del ret["difficulty"]
return ret
############################################################################
@property
############################################################################
@property
def matches(self):
"""retrieve the match history for this player from the matchHistory repo and cache the result"""
return []
#raise NotImplementedError("must finish player history first")
#if not self._matches: # load match History applicable to this player
# self._matches = getPlayerHistory(self.name)
#return self._matches
############################################################################
def _validateAttrs(self, keys):
"""prove that all attributes are defined appropriately"""
badAttrs = []
for k in keys:
if k not in self.__dict__:
badAttrs.append("Attribute key '%s' is not a valid attribute"%(k))
badAttrsMsg = os.linesep.join(badAttrs)
if not keys: return # is iterable, but didn't contain any keys
if badAttrsMsg:
raise ValueError("Encountered invalid attributes. ALLOWED: %s%s%s"\
%(list(self.__dict__), os.linesep, badAttrsMsg))
############################################################################
@property
def control(self):
"""the type of control this player exhibits"""
if self.isComputer: value = c.COMPUTER
else: value = c.PARTICIPANT
return c.PlayerControls(value)
############################################################################
def load(self, playerName=None):
"""retrieve the PlayerRecord settings from saved disk file"""
if playerName: # switch the PlayerRecord this object describes
self.name = playerName # preset value to load self.filename
try:
with open(self.filename, "rb") as f:
data = f.read()
except Exception:
raise ValueError("invalid profile, '%s'. file does not exist: %s"%(self.name, self.filename))
self.update(json.loads(data))
self._matches = [] # mandate match history be recalculated for this newly loaded player
############################################################################
def save(self):
"""save PlayerRecord settings to disk"""
data = str.encode( json.dumps(self.simpleAttrs, indent=4, sort_keys=True) )
with open(self.filename, "wb") as f:
f.write(data)
############################################################################
def update(self, attrs):
"""update attributes initialized with the proper type"""
########################################################################
def convertStrToDict(strVal):
if isinstance(strVal, dict): return strVal
strVal = re.sub("[\{\}]+", "", str(strVal))
regexCol = re.compile(":")
terms = re.split("[,\s]+", strVal)
keyvals = [re.split(regexCol, t) for t in terms]
x = re.compile("['\"]")
ret = {}
boolTrue = re.compile("true" , flags=re.IGNORECASE)
boolFalse = re.compile("false", flags=re.IGNORECASE)
for k, v in keyvals:
k = re.sub(x, "", k)
v = re.sub(x, "", v)
if re.search(boolTrue, v): v = True
elif re.search(boolFalse, v): v = False
else:
if '.' in v:
try: v = float(v)
except: pass
else:
try: v = int(v)
except: pass
ret[k] = v
return ret
########################################################################
self._validateAttrs(attrs)
for k,v in iteritems(attrs):
typecast = type( getattr(self, k) )
if typecast==bool and v=="False": newval = False # "False" evalued as boolean is True because its length > 0
elif issubclass(typecast, c.RestrictedType): # let the RestrictedType handle the type setting, value matching
newval = typecast(v)
elif "<" in str(v) or v==None: newval = typecast(v)
elif k == "initCmd": newval = str(v) # specifically don't mangle the command as specified
elif k == "initOptions": newval = convertStrToDict(v)
else: newval = typecast(str(v).lower())
setattr(self, k, newval)
if self.isComputer: pass
elif "difficulty" in attrs and attrs["difficulty"]!=None: # the final state of this PlayerRecord cannot be a non-computer and specify a difficulty
raise ValueError("%s type %s=%s does not have a difficulty"%(
self.__class__.__name__, self.type.__class__.__name__, self.type.type))
else: self.difficulty = c.ComputerDifficulties(None)
############################################################################
def matchSubset(**kwargs):
"""extract matches from player's entire match history given matching criteria kwargs"""
ret = []
for m in self.matches:
allMatched = True
for k,v in iteritems(kwargs):
mVal = getattr(m, k)
try:
if v == mVal or v in mVal: continue # this check passed
except Exception: pass # if attempting to check __contains__ and exception is raised, it's assumed to be false
allMatched = False
break
if allMatched: ret.append(m)
return ret
############################################################################
def apmRecent(self, maxMatches=c.RECENT_MATCHES, **criteria):
"""collect recent match history's apm data to report player's calculated MMR"""
if not self.matches: return 0 # no apm information without match history
#try: maxMatches = criteria["maxMatches"]
#except: maxMatches = c.RECENT_MATCHES
apms = [m.apm(self) for m in self.recentMatches(maxMatches=maxMatches, **criteria)]
return sum(apms) / len(apms)
############################################################################
def apmAggregate(self, **criteria):
"""collect all match history's apm data to report player's calculated MMR"""
apms = [m.apm(self) for m in self.matchSubset(**criteria)]
if not apms: return 0 # no apm information without match history
return sum(apms) / len(apms)
############################################################################
def recentMatches(self, **criteria):
"""identify all recent matches for player given optional, additional criteria"""
if not self.matches: return [] # no match history
try: # maxMatches is a specially handled parameter (not true criteria)
maxMatches = criteria["maxMatches"]
del criteria["maxMatches"]
except AttributeError:
maxMatches = c.RECENT_MATCHES
alLMatches = self.matchSubset(**criteria)
matchTimes = [(m.endTime, m) for m in matches]
selMatches = sorted(matchTimes)[:maxMatches] # slice off X most recet matches
retMatches = [m for endTime,m in selMatches] # extract matches only
return retMatches
|
ttinies/sc2players
|
sc2players/playerRecord.py
|
PlayerRecord._validateAttrs
|
python
|
def _validateAttrs(self, keys):
badAttrs = []
for k in keys:
if k not in self.__dict__:
badAttrs.append("Attribute key '%s' is not a valid attribute"%(k))
badAttrsMsg = os.linesep.join(badAttrs)
if not keys: return # is iterable, but didn't contain any keys
if badAttrsMsg:
raise ValueError("Encountered invalid attributes. ALLOWED: %s%s%s"\
%(list(self.__dict__), os.linesep, badAttrsMsg))
|
prove that all attributes are defined appropriately
|
train
|
https://github.com/ttinies/sc2players/blob/fd9b37c268bf1005d9ef73a25e65ed97c8b7895f/sc2players/playerRecord.py#L124-L134
| null |
class PlayerRecord(object):
"""manage the out-of-game meta data of a given player"""
AVAILABLE_KEYS = [
"name",
"type",
"difficulty",
"initCmd",
"initOptions",
"raceDefault",
"rating",
]
############################################################################
def __init__(self, source=None, **override):
# define default values and their type
self.name = ""
self.type = c.PlayerDesigns(c.HUMAN)
self.difficulty = c.ComputerDifficulties(None) # only matters if type is a computer
self.initCmd = "" # only used if self.type is an AI or bot
self.initOptions = {}
self.rating = c.DEFAULT_RATING
self.created = time.time() # origination timestamp
self.raceDefault = c.RANDOM
self._matches = [] # match history
# initialize with new values
if isinstance(source, str): self.load(source) # assume a player file to load
elif isinstance(source, dict): self.update(source) # assume attribute dictionary
elif isinstance(source, PlayerRecord): self.update(source.__dict__) # copy constructor
self.update(override)
if not self.name:
raise ValueError("must define 'name' parameter as part of %s source settings"%(self.__class__.__name__))
if self.type in [c.BOT, c.AI] and not self.initCmd:
raise ValueError("must provide initCmd attribute when specifying type=%s"%self.type)
############################################################################
def __str__(self): return self.__repr__()
def __repr__(self):
if self.isComputer: diff = "-%s"%self.difficulty.type
elif self.rating: diff = "-%d"%self.rating
else: diff = ""
return "<%s %s %s%s>"%(self.__class__.__name__, self.name, self.type.type, diff)
############################################################################
def __call__(self, attrs, **kwargs):
"""update internals according to parameters"""
self.update(attrs) # allow a dictionary to be passed
self.update(kwargs)
return self
############################################################################
@property
def initOptStr(self):
return " ".join(["%s=%s"%(k,v) for k, v in self.initOptions.items()])
############################################################################
@property
def isAI(self): return self.type == c.AI
############################################################################
@property
def isBot(self): return self.type == c.BOT # an AI with pre-defined, scripted actions
############################################################################
@property
def isHuman(self): return self.type == c.HUMAN
############################################################################
@property
def isComputer(self): return self.type == c.COMPUTER
############################################################################
@property
def isMulti(self): return self.type == c.ARCHON
############################################################################
@property
def isStoredLocal(self):
"""determine whether this player can be run locally"""
raise NotImplementedError("TODO -- determine whether this player is an already known player")
############################################################################
@property
def filename(self):
"""return the absolute path to the object's filename"""
return os.path.join(c.PLAYERS_FOLDER, "player_%s.json"%(self.name))
############################################################################
@property
def attrs(self):
"""provide a copy of this player's attributes as a dictionary"""
ret = dict(self.__dict__) # obtain copy of internal __dict__
del ret["_matches"] # match history is specifically distinguished from player information (and stored separately)
if self.type != c.COMPUTER: # difficulty only matters for computer playres
del ret["difficulty"]
return ret
############################################################################
@property
def simpleAttrs(self):
"""provide a copy of this player's attributes as a dictionary, but with objects flattened into a string representation of the object"""
simpleAttrs = {}
for k,v in iteritems(self.attrs):
if k in ["_matches"]: continue # attributes to specifically ignore
try: simpleAttrs[k] = v.type
except: simpleAttrs[k] = v
return simpleAttrs
############################################################################
@property
def matches(self):
"""retrieve the match history for this player from the matchHistory repo and cache the result"""
return []
#raise NotImplementedError("must finish player history first")
#if not self._matches: # load match History applicable to this player
# self._matches = getPlayerHistory(self.name)
#return self._matches
############################################################################
############################################################################
@property
def control(self):
"""the type of control this player exhibits"""
if self.isComputer: value = c.COMPUTER
else: value = c.PARTICIPANT
return c.PlayerControls(value)
############################################################################
def load(self, playerName=None):
"""retrieve the PlayerRecord settings from saved disk file"""
if playerName: # switch the PlayerRecord this object describes
self.name = playerName # preset value to load self.filename
try:
with open(self.filename, "rb") as f:
data = f.read()
except Exception:
raise ValueError("invalid profile, '%s'. file does not exist: %s"%(self.name, self.filename))
self.update(json.loads(data))
self._matches = [] # mandate match history be recalculated for this newly loaded player
############################################################################
def save(self):
"""save PlayerRecord settings to disk"""
data = str.encode( json.dumps(self.simpleAttrs, indent=4, sort_keys=True) )
with open(self.filename, "wb") as f:
f.write(data)
############################################################################
def update(self, attrs):
"""update attributes initialized with the proper type"""
########################################################################
def convertStrToDict(strVal):
if isinstance(strVal, dict): return strVal
strVal = re.sub("[\{\}]+", "", str(strVal))
regexCol = re.compile(":")
terms = re.split("[,\s]+", strVal)
keyvals = [re.split(regexCol, t) for t in terms]
x = re.compile("['\"]")
ret = {}
boolTrue = re.compile("true" , flags=re.IGNORECASE)
boolFalse = re.compile("false", flags=re.IGNORECASE)
for k, v in keyvals:
k = re.sub(x, "", k)
v = re.sub(x, "", v)
if re.search(boolTrue, v): v = True
elif re.search(boolFalse, v): v = False
else:
if '.' in v:
try: v = float(v)
except: pass
else:
try: v = int(v)
except: pass
ret[k] = v
return ret
########################################################################
self._validateAttrs(attrs)
for k,v in iteritems(attrs):
typecast = type( getattr(self, k) )
if typecast==bool and v=="False": newval = False # "False" evalued as boolean is True because its length > 0
elif issubclass(typecast, c.RestrictedType): # let the RestrictedType handle the type setting, value matching
newval = typecast(v)
elif "<" in str(v) or v==None: newval = typecast(v)
elif k == "initCmd": newval = str(v) # specifically don't mangle the command as specified
elif k == "initOptions": newval = convertStrToDict(v)
else: newval = typecast(str(v).lower())
setattr(self, k, newval)
if self.isComputer: pass
elif "difficulty" in attrs and attrs["difficulty"]!=None: # the final state of this PlayerRecord cannot be a non-computer and specify a difficulty
raise ValueError("%s type %s=%s does not have a difficulty"%(
self.__class__.__name__, self.type.__class__.__name__, self.type.type))
else: self.difficulty = c.ComputerDifficulties(None)
############################################################################
def matchSubset(**kwargs):
"""extract matches from player's entire match history given matching criteria kwargs"""
ret = []
for m in self.matches:
allMatched = True
for k,v in iteritems(kwargs):
mVal = getattr(m, k)
try:
if v == mVal or v in mVal: continue # this check passed
except Exception: pass # if attempting to check __contains__ and exception is raised, it's assumed to be false
allMatched = False
break
if allMatched: ret.append(m)
return ret
############################################################################
def apmRecent(self, maxMatches=c.RECENT_MATCHES, **criteria):
"""collect recent match history's apm data to report player's calculated MMR"""
if not self.matches: return 0 # no apm information without match history
#try: maxMatches = criteria["maxMatches"]
#except: maxMatches = c.RECENT_MATCHES
apms = [m.apm(self) for m in self.recentMatches(maxMatches=maxMatches, **criteria)]
return sum(apms) / len(apms)
############################################################################
def apmAggregate(self, **criteria):
"""collect all match history's apm data to report player's calculated MMR"""
apms = [m.apm(self) for m in self.matchSubset(**criteria)]
if not apms: return 0 # no apm information without match history
return sum(apms) / len(apms)
############################################################################
def recentMatches(self, **criteria):
"""identify all recent matches for player given optional, additional criteria"""
if not self.matches: return [] # no match history
try: # maxMatches is a specially handled parameter (not true criteria)
maxMatches = criteria["maxMatches"]
del criteria["maxMatches"]
except AttributeError:
maxMatches = c.RECENT_MATCHES
alLMatches = self.matchSubset(**criteria)
matchTimes = [(m.endTime, m) for m in matches]
selMatches = sorted(matchTimes)[:maxMatches] # slice off X most recet matches
retMatches = [m for endTime,m in selMatches] # extract matches only
return retMatches
|
ttinies/sc2players
|
sc2players/playerRecord.py
|
PlayerRecord.control
|
python
|
def control(self):
if self.isComputer: value = c.COMPUTER
else: value = c.PARTICIPANT
return c.PlayerControls(value)
|
the type of control this player exhibits
|
train
|
https://github.com/ttinies/sc2players/blob/fd9b37c268bf1005d9ef73a25e65ed97c8b7895f/sc2players/playerRecord.py#L137-L141
| null |
class PlayerRecord(object):
"""manage the out-of-game meta data of a given player"""
AVAILABLE_KEYS = [
"name",
"type",
"difficulty",
"initCmd",
"initOptions",
"raceDefault",
"rating",
]
############################################################################
def __init__(self, source=None, **override):
# define default values and their type
self.name = ""
self.type = c.PlayerDesigns(c.HUMAN)
self.difficulty = c.ComputerDifficulties(None) # only matters if type is a computer
self.initCmd = "" # only used if self.type is an AI or bot
self.initOptions = {}
self.rating = c.DEFAULT_RATING
self.created = time.time() # origination timestamp
self.raceDefault = c.RANDOM
self._matches = [] # match history
# initialize with new values
if isinstance(source, str): self.load(source) # assume a player file to load
elif isinstance(source, dict): self.update(source) # assume attribute dictionary
elif isinstance(source, PlayerRecord): self.update(source.__dict__) # copy constructor
self.update(override)
if not self.name:
raise ValueError("must define 'name' parameter as part of %s source settings"%(self.__class__.__name__))
if self.type in [c.BOT, c.AI] and not self.initCmd:
raise ValueError("must provide initCmd attribute when specifying type=%s"%self.type)
############################################################################
def __str__(self): return self.__repr__()
def __repr__(self):
if self.isComputer: diff = "-%s"%self.difficulty.type
elif self.rating: diff = "-%d"%self.rating
else: diff = ""
return "<%s %s %s%s>"%(self.__class__.__name__, self.name, self.type.type, diff)
############################################################################
def __call__(self, attrs, **kwargs):
"""update internals according to parameters"""
self.update(attrs) # allow a dictionary to be passed
self.update(kwargs)
return self
############################################################################
@property
def initOptStr(self):
return " ".join(["%s=%s"%(k,v) for k, v in self.initOptions.items()])
############################################################################
@property
def isAI(self): return self.type == c.AI
############################################################################
@property
def isBot(self): return self.type == c.BOT # an AI with pre-defined, scripted actions
############################################################################
@property
def isHuman(self): return self.type == c.HUMAN
############################################################################
@property
def isComputer(self): return self.type == c.COMPUTER
############################################################################
@property
def isMulti(self): return self.type == c.ARCHON
############################################################################
@property
def isStoredLocal(self):
"""determine whether this player can be run locally"""
raise NotImplementedError("TODO -- determine whether this player is an already known player")
############################################################################
@property
def filename(self):
"""return the absolute path to the object's filename"""
return os.path.join(c.PLAYERS_FOLDER, "player_%s.json"%(self.name))
############################################################################
@property
def attrs(self):
"""provide a copy of this player's attributes as a dictionary"""
ret = dict(self.__dict__) # obtain copy of internal __dict__
del ret["_matches"] # match history is specifically distinguished from player information (and stored separately)
if self.type != c.COMPUTER: # difficulty only matters for computer playres
del ret["difficulty"]
return ret
############################################################################
@property
def simpleAttrs(self):
"""provide a copy of this player's attributes as a dictionary, but with objects flattened into a string representation of the object"""
simpleAttrs = {}
for k,v in iteritems(self.attrs):
if k in ["_matches"]: continue # attributes to specifically ignore
try: simpleAttrs[k] = v.type
except: simpleAttrs[k] = v
return simpleAttrs
############################################################################
@property
def matches(self):
"""retrieve the match history for this player from the matchHistory repo and cache the result"""
return []
#raise NotImplementedError("must finish player history first")
#if not self._matches: # load match History applicable to this player
# self._matches = getPlayerHistory(self.name)
#return self._matches
############################################################################
def _validateAttrs(self, keys):
"""prove that all attributes are defined appropriately"""
badAttrs = []
for k in keys:
if k not in self.__dict__:
badAttrs.append("Attribute key '%s' is not a valid attribute"%(k))
badAttrsMsg = os.linesep.join(badAttrs)
if not keys: return # is iterable, but didn't contain any keys
if badAttrsMsg:
raise ValueError("Encountered invalid attributes. ALLOWED: %s%s%s"\
%(list(self.__dict__), os.linesep, badAttrsMsg))
############################################################################
@property
############################################################################
def load(self, playerName=None):
"""retrieve the PlayerRecord settings from saved disk file"""
if playerName: # switch the PlayerRecord this object describes
self.name = playerName # preset value to load self.filename
try:
with open(self.filename, "rb") as f:
data = f.read()
except Exception:
raise ValueError("invalid profile, '%s'. file does not exist: %s"%(self.name, self.filename))
self.update(json.loads(data))
self._matches = [] # mandate match history be recalculated for this newly loaded player
############################################################################
def save(self):
"""save PlayerRecord settings to disk"""
data = str.encode( json.dumps(self.simpleAttrs, indent=4, sort_keys=True) )
with open(self.filename, "wb") as f:
f.write(data)
############################################################################
def update(self, attrs):
"""update attributes initialized with the proper type"""
########################################################################
def convertStrToDict(strVal):
if isinstance(strVal, dict): return strVal
strVal = re.sub("[\{\}]+", "", str(strVal))
regexCol = re.compile(":")
terms = re.split("[,\s]+", strVal)
keyvals = [re.split(regexCol, t) for t in terms]
x = re.compile("['\"]")
ret = {}
boolTrue = re.compile("true" , flags=re.IGNORECASE)
boolFalse = re.compile("false", flags=re.IGNORECASE)
for k, v in keyvals:
k = re.sub(x, "", k)
v = re.sub(x, "", v)
if re.search(boolTrue, v): v = True
elif re.search(boolFalse, v): v = False
else:
if '.' in v:
try: v = float(v)
except: pass
else:
try: v = int(v)
except: pass
ret[k] = v
return ret
########################################################################
self._validateAttrs(attrs)
for k,v in iteritems(attrs):
typecast = type( getattr(self, k) )
if typecast==bool and v=="False": newval = False # "False" evalued as boolean is True because its length > 0
elif issubclass(typecast, c.RestrictedType): # let the RestrictedType handle the type setting, value matching
newval = typecast(v)
elif "<" in str(v) or v==None: newval = typecast(v)
elif k == "initCmd": newval = str(v) # specifically don't mangle the command as specified
elif k == "initOptions": newval = convertStrToDict(v)
else: newval = typecast(str(v).lower())
setattr(self, k, newval)
if self.isComputer: pass
elif "difficulty" in attrs and attrs["difficulty"]!=None: # the final state of this PlayerRecord cannot be a non-computer and specify a difficulty
raise ValueError("%s type %s=%s does not have a difficulty"%(
self.__class__.__name__, self.type.__class__.__name__, self.type.type))
else: self.difficulty = c.ComputerDifficulties(None)
############################################################################
def matchSubset(**kwargs):
"""extract matches from player's entire match history given matching criteria kwargs"""
ret = []
for m in self.matches:
allMatched = True
for k,v in iteritems(kwargs):
mVal = getattr(m, k)
try:
if v == mVal or v in mVal: continue # this check passed
except Exception: pass # if attempting to check __contains__ and exception is raised, it's assumed to be false
allMatched = False
break
if allMatched: ret.append(m)
return ret
############################################################################
def apmRecent(self, maxMatches=c.RECENT_MATCHES, **criteria):
"""collect recent match history's apm data to report player's calculated MMR"""
if not self.matches: return 0 # no apm information without match history
#try: maxMatches = criteria["maxMatches"]
#except: maxMatches = c.RECENT_MATCHES
apms = [m.apm(self) for m in self.recentMatches(maxMatches=maxMatches, **criteria)]
return sum(apms) / len(apms)
############################################################################
def apmAggregate(self, **criteria):
"""collect all match history's apm data to report player's calculated MMR"""
apms = [m.apm(self) for m in self.matchSubset(**criteria)]
if not apms: return 0 # no apm information without match history
return sum(apms) / len(apms)
############################################################################
def recentMatches(self, **criteria):
"""identify all recent matches for player given optional, additional criteria"""
if not self.matches: return [] # no match history
try: # maxMatches is a specially handled parameter (not true criteria)
maxMatches = criteria["maxMatches"]
del criteria["maxMatches"]
except AttributeError:
maxMatches = c.RECENT_MATCHES
alLMatches = self.matchSubset(**criteria)
matchTimes = [(m.endTime, m) for m in matches]
selMatches = sorted(matchTimes)[:maxMatches] # slice off X most recet matches
retMatches = [m for endTime,m in selMatches] # extract matches only
return retMatches
|
ttinies/sc2players
|
sc2players/playerRecord.py
|
PlayerRecord.load
|
python
|
def load(self, playerName=None):
if playerName: # switch the PlayerRecord this object describes
self.name = playerName # preset value to load self.filename
try:
with open(self.filename, "rb") as f:
data = f.read()
except Exception:
raise ValueError("invalid profile, '%s'. file does not exist: %s"%(self.name, self.filename))
self.update(json.loads(data))
self._matches = [] # mandate match history be recalculated for this newly loaded player
|
retrieve the PlayerRecord settings from saved disk file
|
train
|
https://github.com/ttinies/sc2players/blob/fd9b37c268bf1005d9ef73a25e65ed97c8b7895f/sc2players/playerRecord.py#L143-L153
|
[
"def update(self, attrs):\n \"\"\"update attributes initialized with the proper type\"\"\"\n ########################################################################\n def convertStrToDict(strVal):\n if isinstance(strVal, dict): return strVal\n strVal = re.sub(\"[\\{\\}]+\", \"\", str(strVal))\n regexCol = re.compile(\":\")\n terms = re.split(\"[,\\s]+\", strVal)\n keyvals = [re.split(regexCol, t) for t in terms]\n x = re.compile(\"['\\\"]\")\n ret = {}\n boolTrue = re.compile(\"true\" , flags=re.IGNORECASE)\n boolFalse = re.compile(\"false\", flags=re.IGNORECASE)\n for k, v in keyvals:\n k = re.sub(x, \"\", k)\n v = re.sub(x, \"\", v)\n if re.search(boolTrue, v): v = True\n elif re.search(boolFalse, v): v = False\n else:\n if '.' in v:\n try: v = float(v)\n except: pass\n else:\n try: v = int(v)\n except: pass\n ret[k] = v\n return ret\n ########################################################################\n self._validateAttrs(attrs)\n for k,v in iteritems(attrs):\n typecast = type( getattr(self, k) )\n if typecast==bool and v==\"False\": newval = False # \"False\" evalued as boolean is True because its length > 0\n elif issubclass(typecast, c.RestrictedType): # let the RestrictedType handle the type setting, value matching\n newval = typecast(v)\n elif \"<\" in str(v) or v==None: newval = typecast(v)\n elif k == \"initCmd\": newval = str(v) # specifically don't mangle the command as specified\n elif k == \"initOptions\": newval = convertStrToDict(v)\n else: newval = typecast(str(v).lower())\n setattr(self, k, newval)\n if self.isComputer: pass\n elif \"difficulty\" in attrs and attrs[\"difficulty\"]!=None: # the final state of this PlayerRecord cannot be a non-computer and specify a difficulty\n raise ValueError(\"%s type %s=%s does not have a difficulty\"%(\n self.__class__.__name__, self.type.__class__.__name__, self.type.type))\n else: self.difficulty = c.ComputerDifficulties(None)\n"
] |
class PlayerRecord(object):
"""manage the out-of-game meta data of a given player"""
AVAILABLE_KEYS = [
"name",
"type",
"difficulty",
"initCmd",
"initOptions",
"raceDefault",
"rating",
]
############################################################################
def __init__(self, source=None, **override):
# define default values and their type
self.name = ""
self.type = c.PlayerDesigns(c.HUMAN)
self.difficulty = c.ComputerDifficulties(None) # only matters if type is a computer
self.initCmd = "" # only used if self.type is an AI or bot
self.initOptions = {}
self.rating = c.DEFAULT_RATING
self.created = time.time() # origination timestamp
self.raceDefault = c.RANDOM
self._matches = [] # match history
# initialize with new values
if isinstance(source, str): self.load(source) # assume a player file to load
elif isinstance(source, dict): self.update(source) # assume attribute dictionary
elif isinstance(source, PlayerRecord): self.update(source.__dict__) # copy constructor
self.update(override)
if not self.name:
raise ValueError("must define 'name' parameter as part of %s source settings"%(self.__class__.__name__))
if self.type in [c.BOT, c.AI] and not self.initCmd:
raise ValueError("must provide initCmd attribute when specifying type=%s"%self.type)
############################################################################
def __str__(self): return self.__repr__()
def __repr__(self):
if self.isComputer: diff = "-%s"%self.difficulty.type
elif self.rating: diff = "-%d"%self.rating
else: diff = ""
return "<%s %s %s%s>"%(self.__class__.__name__, self.name, self.type.type, diff)
############################################################################
def __call__(self, attrs, **kwargs):
"""update internals according to parameters"""
self.update(attrs) # allow a dictionary to be passed
self.update(kwargs)
return self
############################################################################
@property
def initOptStr(self):
return " ".join(["%s=%s"%(k,v) for k, v in self.initOptions.items()])
############################################################################
@property
def isAI(self): return self.type == c.AI
############################################################################
@property
def isBot(self): return self.type == c.BOT # an AI with pre-defined, scripted actions
############################################################################
@property
def isHuman(self): return self.type == c.HUMAN
############################################################################
@property
def isComputer(self): return self.type == c.COMPUTER
############################################################################
@property
def isMulti(self): return self.type == c.ARCHON
############################################################################
@property
def isStoredLocal(self):
"""determine whether this player can be run locally"""
raise NotImplementedError("TODO -- determine whether this player is an already known player")
############################################################################
@property
def filename(self):
"""return the absolute path to the object's filename"""
return os.path.join(c.PLAYERS_FOLDER, "player_%s.json"%(self.name))
############################################################################
@property
def attrs(self):
"""provide a copy of this player's attributes as a dictionary"""
ret = dict(self.__dict__) # obtain copy of internal __dict__
del ret["_matches"] # match history is specifically distinguished from player information (and stored separately)
if self.type != c.COMPUTER: # difficulty only matters for computer playres
del ret["difficulty"]
return ret
############################################################################
@property
def simpleAttrs(self):
"""provide a copy of this player's attributes as a dictionary, but with objects flattened into a string representation of the object"""
simpleAttrs = {}
for k,v in iteritems(self.attrs):
if k in ["_matches"]: continue # attributes to specifically ignore
try: simpleAttrs[k] = v.type
except: simpleAttrs[k] = v
return simpleAttrs
############################################################################
@property
def matches(self):
"""retrieve the match history for this player from the matchHistory repo and cache the result"""
return []
#raise NotImplementedError("must finish player history first")
#if not self._matches: # load match History applicable to this player
# self._matches = getPlayerHistory(self.name)
#return self._matches
############################################################################
def _validateAttrs(self, keys):
"""prove that all attributes are defined appropriately"""
badAttrs = []
for k in keys:
if k not in self.__dict__:
badAttrs.append("Attribute key '%s' is not a valid attribute"%(k))
badAttrsMsg = os.linesep.join(badAttrs)
if not keys: return # is iterable, but didn't contain any keys
if badAttrsMsg:
raise ValueError("Encountered invalid attributes. ALLOWED: %s%s%s"\
%(list(self.__dict__), os.linesep, badAttrsMsg))
############################################################################
@property
def control(self):
"""the type of control this player exhibits"""
if self.isComputer: value = c.COMPUTER
else: value = c.PARTICIPANT
return c.PlayerControls(value)
############################################################################
############################################################################
def save(self):
"""save PlayerRecord settings to disk"""
data = str.encode( json.dumps(self.simpleAttrs, indent=4, sort_keys=True) )
with open(self.filename, "wb") as f:
f.write(data)
############################################################################
def update(self, attrs):
"""update attributes initialized with the proper type"""
########################################################################
def convertStrToDict(strVal):
if isinstance(strVal, dict): return strVal
strVal = re.sub("[\{\}]+", "", str(strVal))
regexCol = re.compile(":")
terms = re.split("[,\s]+", strVal)
keyvals = [re.split(regexCol, t) for t in terms]
x = re.compile("['\"]")
ret = {}
boolTrue = re.compile("true" , flags=re.IGNORECASE)
boolFalse = re.compile("false", flags=re.IGNORECASE)
for k, v in keyvals:
k = re.sub(x, "", k)
v = re.sub(x, "", v)
if re.search(boolTrue, v): v = True
elif re.search(boolFalse, v): v = False
else:
if '.' in v:
try: v = float(v)
except: pass
else:
try: v = int(v)
except: pass
ret[k] = v
return ret
########################################################################
self._validateAttrs(attrs)
for k,v in iteritems(attrs):
typecast = type( getattr(self, k) )
if typecast==bool and v=="False": newval = False # "False" evalued as boolean is True because its length > 0
elif issubclass(typecast, c.RestrictedType): # let the RestrictedType handle the type setting, value matching
newval = typecast(v)
elif "<" in str(v) or v==None: newval = typecast(v)
elif k == "initCmd": newval = str(v) # specifically don't mangle the command as specified
elif k == "initOptions": newval = convertStrToDict(v)
else: newval = typecast(str(v).lower())
setattr(self, k, newval)
if self.isComputer: pass
elif "difficulty" in attrs and attrs["difficulty"]!=None: # the final state of this PlayerRecord cannot be a non-computer and specify a difficulty
raise ValueError("%s type %s=%s does not have a difficulty"%(
self.__class__.__name__, self.type.__class__.__name__, self.type.type))
else: self.difficulty = c.ComputerDifficulties(None)
############################################################################
def matchSubset(**kwargs):
"""extract matches from player's entire match history given matching criteria kwargs"""
ret = []
for m in self.matches:
allMatched = True
for k,v in iteritems(kwargs):
mVal = getattr(m, k)
try:
if v == mVal or v in mVal: continue # this check passed
except Exception: pass # if attempting to check __contains__ and exception is raised, it's assumed to be false
allMatched = False
break
if allMatched: ret.append(m)
return ret
############################################################################
def apmRecent(self, maxMatches=c.RECENT_MATCHES, **criteria):
"""collect recent match history's apm data to report player's calculated MMR"""
if not self.matches: return 0 # no apm information without match history
#try: maxMatches = criteria["maxMatches"]
#except: maxMatches = c.RECENT_MATCHES
apms = [m.apm(self) for m in self.recentMatches(maxMatches=maxMatches, **criteria)]
return sum(apms) / len(apms)
############################################################################
def apmAggregate(self, **criteria):
"""collect all match history's apm data to report player's calculated MMR"""
apms = [m.apm(self) for m in self.matchSubset(**criteria)]
if not apms: return 0 # no apm information without match history
return sum(apms) / len(apms)
############################################################################
def recentMatches(self, **criteria):
"""identify all recent matches for player given optional, additional criteria"""
if not self.matches: return [] # no match history
try: # maxMatches is a specially handled parameter (not true criteria)
maxMatches = criteria["maxMatches"]
del criteria["maxMatches"]
except AttributeError:
maxMatches = c.RECENT_MATCHES
alLMatches = self.matchSubset(**criteria)
matchTimes = [(m.endTime, m) for m in matches]
selMatches = sorted(matchTimes)[:maxMatches] # slice off X most recet matches
retMatches = [m for endTime,m in selMatches] # extract matches only
return retMatches
|
ttinies/sc2players
|
sc2players/playerRecord.py
|
PlayerRecord.save
|
python
|
def save(self):
data = str.encode( json.dumps(self.simpleAttrs, indent=4, sort_keys=True) )
with open(self.filename, "wb") as f:
f.write(data)
|
save PlayerRecord settings to disk
|
train
|
https://github.com/ttinies/sc2players/blob/fd9b37c268bf1005d9ef73a25e65ed97c8b7895f/sc2players/playerRecord.py#L155-L159
| null |
class PlayerRecord(object):
"""manage the out-of-game meta data of a given player"""
AVAILABLE_KEYS = [
"name",
"type",
"difficulty",
"initCmd",
"initOptions",
"raceDefault",
"rating",
]
############################################################################
def __init__(self, source=None, **override):
# define default values and their type
self.name = ""
self.type = c.PlayerDesigns(c.HUMAN)
self.difficulty = c.ComputerDifficulties(None) # only matters if type is a computer
self.initCmd = "" # only used if self.type is an AI or bot
self.initOptions = {}
self.rating = c.DEFAULT_RATING
self.created = time.time() # origination timestamp
self.raceDefault = c.RANDOM
self._matches = [] # match history
# initialize with new values
if isinstance(source, str): self.load(source) # assume a player file to load
elif isinstance(source, dict): self.update(source) # assume attribute dictionary
elif isinstance(source, PlayerRecord): self.update(source.__dict__) # copy constructor
self.update(override)
if not self.name:
raise ValueError("must define 'name' parameter as part of %s source settings"%(self.__class__.__name__))
if self.type in [c.BOT, c.AI] and not self.initCmd:
raise ValueError("must provide initCmd attribute when specifying type=%s"%self.type)
############################################################################
def __str__(self): return self.__repr__()
def __repr__(self):
if self.isComputer: diff = "-%s"%self.difficulty.type
elif self.rating: diff = "-%d"%self.rating
else: diff = ""
return "<%s %s %s%s>"%(self.__class__.__name__, self.name, self.type.type, diff)
############################################################################
def __call__(self, attrs, **kwargs):
"""update internals according to parameters"""
self.update(attrs) # allow a dictionary to be passed
self.update(kwargs)
return self
############################################################################
@property
def initOptStr(self):
return " ".join(["%s=%s"%(k,v) for k, v in self.initOptions.items()])
############################################################################
@property
def isAI(self): return self.type == c.AI
############################################################################
@property
def isBot(self): return self.type == c.BOT # an AI with pre-defined, scripted actions
############################################################################
@property
def isHuman(self): return self.type == c.HUMAN
############################################################################
@property
def isComputer(self): return self.type == c.COMPUTER
############################################################################
@property
def isMulti(self): return self.type == c.ARCHON
############################################################################
@property
def isStoredLocal(self):
"""determine whether this player can be run locally"""
raise NotImplementedError("TODO -- determine whether this player is an already known player")
############################################################################
@property
def filename(self):
"""return the absolute path to the object's filename"""
return os.path.join(c.PLAYERS_FOLDER, "player_%s.json"%(self.name))
############################################################################
@property
def attrs(self):
"""provide a copy of this player's attributes as a dictionary"""
ret = dict(self.__dict__) # obtain copy of internal __dict__
del ret["_matches"] # match history is specifically distinguished from player information (and stored separately)
if self.type != c.COMPUTER: # difficulty only matters for computer playres
del ret["difficulty"]
return ret
############################################################################
@property
def simpleAttrs(self):
"""provide a copy of this player's attributes as a dictionary, but with objects flattened into a string representation of the object"""
simpleAttrs = {}
for k,v in iteritems(self.attrs):
if k in ["_matches"]: continue # attributes to specifically ignore
try: simpleAttrs[k] = v.type
except: simpleAttrs[k] = v
return simpleAttrs
############################################################################
@property
def matches(self):
"""retrieve the match history for this player from the matchHistory repo and cache the result"""
return []
#raise NotImplementedError("must finish player history first")
#if not self._matches: # load match History applicable to this player
# self._matches = getPlayerHistory(self.name)
#return self._matches
############################################################################
def _validateAttrs(self, keys):
"""prove that all attributes are defined appropriately"""
badAttrs = []
for k in keys:
if k not in self.__dict__:
badAttrs.append("Attribute key '%s' is not a valid attribute"%(k))
badAttrsMsg = os.linesep.join(badAttrs)
if not keys: return # is iterable, but didn't contain any keys
if badAttrsMsg:
raise ValueError("Encountered invalid attributes. ALLOWED: %s%s%s"\
%(list(self.__dict__), os.linesep, badAttrsMsg))
############################################################################
@property
def control(self):
"""the type of control this player exhibits"""
if self.isComputer: value = c.COMPUTER
else: value = c.PARTICIPANT
return c.PlayerControls(value)
############################################################################
def load(self, playerName=None):
"""retrieve the PlayerRecord settings from saved disk file"""
if playerName: # switch the PlayerRecord this object describes
self.name = playerName # preset value to load self.filename
try:
with open(self.filename, "rb") as f:
data = f.read()
except Exception:
raise ValueError("invalid profile, '%s'. file does not exist: %s"%(self.name, self.filename))
self.update(json.loads(data))
self._matches = [] # mandate match history be recalculated for this newly loaded player
############################################################################
############################################################################
def update(self, attrs):
"""update attributes initialized with the proper type"""
########################################################################
def convertStrToDict(strVal):
if isinstance(strVal, dict): return strVal
strVal = re.sub("[\{\}]+", "", str(strVal))
regexCol = re.compile(":")
terms = re.split("[,\s]+", strVal)
keyvals = [re.split(regexCol, t) for t in terms]
x = re.compile("['\"]")
ret = {}
boolTrue = re.compile("true" , flags=re.IGNORECASE)
boolFalse = re.compile("false", flags=re.IGNORECASE)
for k, v in keyvals:
k = re.sub(x, "", k)
v = re.sub(x, "", v)
if re.search(boolTrue, v): v = True
elif re.search(boolFalse, v): v = False
else:
if '.' in v:
try: v = float(v)
except: pass
else:
try: v = int(v)
except: pass
ret[k] = v
return ret
########################################################################
self._validateAttrs(attrs)
for k,v in iteritems(attrs):
typecast = type( getattr(self, k) )
if typecast==bool and v=="False": newval = False # "False" evalued as boolean is True because its length > 0
elif issubclass(typecast, c.RestrictedType): # let the RestrictedType handle the type setting, value matching
newval = typecast(v)
elif "<" in str(v) or v==None: newval = typecast(v)
elif k == "initCmd": newval = str(v) # specifically don't mangle the command as specified
elif k == "initOptions": newval = convertStrToDict(v)
else: newval = typecast(str(v).lower())
setattr(self, k, newval)
if self.isComputer: pass
elif "difficulty" in attrs and attrs["difficulty"]!=None: # the final state of this PlayerRecord cannot be a non-computer and specify a difficulty
raise ValueError("%s type %s=%s does not have a difficulty"%(
self.__class__.__name__, self.type.__class__.__name__, self.type.type))
else: self.difficulty = c.ComputerDifficulties(None)
############################################################################
def matchSubset(**kwargs):
"""extract matches from player's entire match history given matching criteria kwargs"""
ret = []
for m in self.matches:
allMatched = True
for k,v in iteritems(kwargs):
mVal = getattr(m, k)
try:
if v == mVal or v in mVal: continue # this check passed
except Exception: pass # if attempting to check __contains__ and exception is raised, it's assumed to be false
allMatched = False
break
if allMatched: ret.append(m)
return ret
############################################################################
def apmRecent(self, maxMatches=c.RECENT_MATCHES, **criteria):
"""collect recent match history's apm data to report player's calculated MMR"""
if not self.matches: return 0 # no apm information without match history
#try: maxMatches = criteria["maxMatches"]
#except: maxMatches = c.RECENT_MATCHES
apms = [m.apm(self) for m in self.recentMatches(maxMatches=maxMatches, **criteria)]
return sum(apms) / len(apms)
############################################################################
def apmAggregate(self, **criteria):
"""collect all match history's apm data to report player's calculated MMR"""
apms = [m.apm(self) for m in self.matchSubset(**criteria)]
if not apms: return 0 # no apm information without match history
return sum(apms) / len(apms)
############################################################################
def recentMatches(self, **criteria):
"""identify all recent matches for player given optional, additional criteria"""
if not self.matches: return [] # no match history
try: # maxMatches is a specially handled parameter (not true criteria)
maxMatches = criteria["maxMatches"]
del criteria["maxMatches"]
except AttributeError:
maxMatches = c.RECENT_MATCHES
alLMatches = self.matchSubset(**criteria)
matchTimes = [(m.endTime, m) for m in matches]
selMatches = sorted(matchTimes)[:maxMatches] # slice off X most recet matches
retMatches = [m for endTime,m in selMatches] # extract matches only
return retMatches
|
ttinies/sc2players
|
sc2players/playerRecord.py
|
PlayerRecord.update
|
python
|
def update(self, attrs):
########################################################################
def convertStrToDict(strVal):
if isinstance(strVal, dict): return strVal
strVal = re.sub("[\{\}]+", "", str(strVal))
regexCol = re.compile(":")
terms = re.split("[,\s]+", strVal)
keyvals = [re.split(regexCol, t) for t in terms]
x = re.compile("['\"]")
ret = {}
boolTrue = re.compile("true" , flags=re.IGNORECASE)
boolFalse = re.compile("false", flags=re.IGNORECASE)
for k, v in keyvals:
k = re.sub(x, "", k)
v = re.sub(x, "", v)
if re.search(boolTrue, v): v = True
elif re.search(boolFalse, v): v = False
else:
if '.' in v:
try: v = float(v)
except: pass
else:
try: v = int(v)
except: pass
ret[k] = v
return ret
########################################################################
self._validateAttrs(attrs)
for k,v in iteritems(attrs):
typecast = type( getattr(self, k) )
if typecast==bool and v=="False": newval = False # "False" evalued as boolean is True because its length > 0
elif issubclass(typecast, c.RestrictedType): # let the RestrictedType handle the type setting, value matching
newval = typecast(v)
elif "<" in str(v) or v==None: newval = typecast(v)
elif k == "initCmd": newval = str(v) # specifically don't mangle the command as specified
elif k == "initOptions": newval = convertStrToDict(v)
else: newval = typecast(str(v).lower())
setattr(self, k, newval)
if self.isComputer: pass
elif "difficulty" in attrs and attrs["difficulty"]!=None: # the final state of this PlayerRecord cannot be a non-computer and specify a difficulty
raise ValueError("%s type %s=%s does not have a difficulty"%(
self.__class__.__name__, self.type.__class__.__name__, self.type.type))
else: self.difficulty = c.ComputerDifficulties(None)
|
update attributes initialized with the proper type
|
train
|
https://github.com/ttinies/sc2players/blob/fd9b37c268bf1005d9ef73a25e65ed97c8b7895f/sc2players/playerRecord.py#L161-L204
|
[
"def _validateAttrs(self, keys):\n \"\"\"prove that all attributes are defined appropriately\"\"\"\n badAttrs = []\n for k in keys:\n if k not in self.__dict__:\n badAttrs.append(\"Attribute key '%s' is not a valid attribute\"%(k))\n badAttrsMsg = os.linesep.join(badAttrs)\n if not keys: return # is iterable, but didn't contain any keys\n if badAttrsMsg:\n raise ValueError(\"Encountered invalid attributes. ALLOWED: %s%s%s\"\\\n %(list(self.__dict__), os.linesep, badAttrsMsg))\n"
] |
class PlayerRecord(object):
"""manage the out-of-game meta data of a given player"""
AVAILABLE_KEYS = [
"name",
"type",
"difficulty",
"initCmd",
"initOptions",
"raceDefault",
"rating",
]
############################################################################
def __init__(self, source=None, **override):
# define default values and their type
self.name = ""
self.type = c.PlayerDesigns(c.HUMAN)
self.difficulty = c.ComputerDifficulties(None) # only matters if type is a computer
self.initCmd = "" # only used if self.type is an AI or bot
self.initOptions = {}
self.rating = c.DEFAULT_RATING
self.created = time.time() # origination timestamp
self.raceDefault = c.RANDOM
self._matches = [] # match history
# initialize with new values
if isinstance(source, str): self.load(source) # assume a player file to load
elif isinstance(source, dict): self.update(source) # assume attribute dictionary
elif isinstance(source, PlayerRecord): self.update(source.__dict__) # copy constructor
self.update(override)
if not self.name:
raise ValueError("must define 'name' parameter as part of %s source settings"%(self.__class__.__name__))
if self.type in [c.BOT, c.AI] and not self.initCmd:
raise ValueError("must provide initCmd attribute when specifying type=%s"%self.type)
############################################################################
def __str__(self): return self.__repr__()
def __repr__(self):
if self.isComputer: diff = "-%s"%self.difficulty.type
elif self.rating: diff = "-%d"%self.rating
else: diff = ""
return "<%s %s %s%s>"%(self.__class__.__name__, self.name, self.type.type, diff)
############################################################################
def __call__(self, attrs, **kwargs):
"""update internals according to parameters"""
self.update(attrs) # allow a dictionary to be passed
self.update(kwargs)
return self
############################################################################
@property
def initOptStr(self):
return " ".join(["%s=%s"%(k,v) for k, v in self.initOptions.items()])
############################################################################
@property
def isAI(self): return self.type == c.AI
############################################################################
@property
def isBot(self): return self.type == c.BOT # an AI with pre-defined, scripted actions
############################################################################
@property
def isHuman(self): return self.type == c.HUMAN
############################################################################
@property
def isComputer(self): return self.type == c.COMPUTER
############################################################################
@property
def isMulti(self): return self.type == c.ARCHON
############################################################################
@property
def isStoredLocal(self):
"""determine whether this player can be run locally"""
raise NotImplementedError("TODO -- determine whether this player is an already known player")
############################################################################
@property
def filename(self):
"""return the absolute path to the object's filename"""
return os.path.join(c.PLAYERS_FOLDER, "player_%s.json"%(self.name))
############################################################################
@property
def attrs(self):
"""provide a copy of this player's attributes as a dictionary"""
ret = dict(self.__dict__) # obtain copy of internal __dict__
del ret["_matches"] # match history is specifically distinguished from player information (and stored separately)
if self.type != c.COMPUTER: # difficulty only matters for computer playres
del ret["difficulty"]
return ret
############################################################################
@property
def simpleAttrs(self):
"""provide a copy of this player's attributes as a dictionary, but with objects flattened into a string representation of the object"""
simpleAttrs = {}
for k,v in iteritems(self.attrs):
if k in ["_matches"]: continue # attributes to specifically ignore
try: simpleAttrs[k] = v.type
except: simpleAttrs[k] = v
return simpleAttrs
############################################################################
@property
def matches(self):
"""retrieve the match history for this player from the matchHistory repo and cache the result"""
return []
#raise NotImplementedError("must finish player history first")
#if not self._matches: # load match History applicable to this player
# self._matches = getPlayerHistory(self.name)
#return self._matches
############################################################################
def _validateAttrs(self, keys):
"""prove that all attributes are defined appropriately"""
badAttrs = []
for k in keys:
if k not in self.__dict__:
badAttrs.append("Attribute key '%s' is not a valid attribute"%(k))
badAttrsMsg = os.linesep.join(badAttrs)
if not keys: return # is iterable, but didn't contain any keys
if badAttrsMsg:
raise ValueError("Encountered invalid attributes. ALLOWED: %s%s%s"\
%(list(self.__dict__), os.linesep, badAttrsMsg))
############################################################################
@property
def control(self):
"""the type of control this player exhibits"""
if self.isComputer: value = c.COMPUTER
else: value = c.PARTICIPANT
return c.PlayerControls(value)
############################################################################
def load(self, playerName=None):
"""retrieve the PlayerRecord settings from saved disk file"""
if playerName: # switch the PlayerRecord this object describes
self.name = playerName # preset value to load self.filename
try:
with open(self.filename, "rb") as f:
data = f.read()
except Exception:
raise ValueError("invalid profile, '%s'. file does not exist: %s"%(self.name, self.filename))
self.update(json.loads(data))
self._matches = [] # mandate match history be recalculated for this newly loaded player
############################################################################
def save(self):
"""save PlayerRecord settings to disk"""
data = str.encode( json.dumps(self.simpleAttrs, indent=4, sort_keys=True) )
with open(self.filename, "wb") as f:
f.write(data)
############################################################################
############################################################################
def matchSubset(**kwargs):
"""extract matches from player's entire match history given matching criteria kwargs"""
ret = []
for m in self.matches:
allMatched = True
for k,v in iteritems(kwargs):
mVal = getattr(m, k)
try:
if v == mVal or v in mVal: continue # this check passed
except Exception: pass # if attempting to check __contains__ and exception is raised, it's assumed to be false
allMatched = False
break
if allMatched: ret.append(m)
return ret
############################################################################
def apmRecent(self, maxMatches=c.RECENT_MATCHES, **criteria):
"""collect recent match history's apm data to report player's calculated MMR"""
if not self.matches: return 0 # no apm information without match history
#try: maxMatches = criteria["maxMatches"]
#except: maxMatches = c.RECENT_MATCHES
apms = [m.apm(self) for m in self.recentMatches(maxMatches=maxMatches, **criteria)]
return sum(apms) / len(apms)
############################################################################
def apmAggregate(self, **criteria):
"""collect all match history's apm data to report player's calculated MMR"""
apms = [m.apm(self) for m in self.matchSubset(**criteria)]
if not apms: return 0 # no apm information without match history
return sum(apms) / len(apms)
############################################################################
def recentMatches(self, **criteria):
"""identify all recent matches for player given optional, additional criteria"""
if not self.matches: return [] # no match history
try: # maxMatches is a specially handled parameter (not true criteria)
maxMatches = criteria["maxMatches"]
del criteria["maxMatches"]
except AttributeError:
maxMatches = c.RECENT_MATCHES
alLMatches = self.matchSubset(**criteria)
matchTimes = [(m.endTime, m) for m in matches]
selMatches = sorted(matchTimes)[:maxMatches] # slice off X most recet matches
retMatches = [m for endTime,m in selMatches] # extract matches only
return retMatches
|
ttinies/sc2players
|
sc2players/playerRecord.py
|
PlayerRecord.matchSubset
|
python
|
def matchSubset(**kwargs):
ret = []
for m in self.matches:
allMatched = True
for k,v in iteritems(kwargs):
mVal = getattr(m, k)
try:
if v == mVal or v in mVal: continue # this check passed
except Exception: pass # if attempting to check __contains__ and exception is raised, it's assumed to be false
allMatched = False
break
if allMatched: ret.append(m)
return ret
|
extract matches from player's entire match history given matching criteria kwargs
|
train
|
https://github.com/ttinies/sc2players/blob/fd9b37c268bf1005d9ef73a25e65ed97c8b7895f/sc2players/playerRecord.py#L206-L219
| null |
class PlayerRecord(object):
"""manage the out-of-game meta data of a given player"""
AVAILABLE_KEYS = [
"name",
"type",
"difficulty",
"initCmd",
"initOptions",
"raceDefault",
"rating",
]
############################################################################
def __init__(self, source=None, **override):
# define default values and their type
self.name = ""
self.type = c.PlayerDesigns(c.HUMAN)
self.difficulty = c.ComputerDifficulties(None) # only matters if type is a computer
self.initCmd = "" # only used if self.type is an AI or bot
self.initOptions = {}
self.rating = c.DEFAULT_RATING
self.created = time.time() # origination timestamp
self.raceDefault = c.RANDOM
self._matches = [] # match history
# initialize with new values
if isinstance(source, str): self.load(source) # assume a player file to load
elif isinstance(source, dict): self.update(source) # assume attribute dictionary
elif isinstance(source, PlayerRecord): self.update(source.__dict__) # copy constructor
self.update(override)
if not self.name:
raise ValueError("must define 'name' parameter as part of %s source settings"%(self.__class__.__name__))
if self.type in [c.BOT, c.AI] and not self.initCmd:
raise ValueError("must provide initCmd attribute when specifying type=%s"%self.type)
############################################################################
def __str__(self): return self.__repr__()
def __repr__(self):
if self.isComputer: diff = "-%s"%self.difficulty.type
elif self.rating: diff = "-%d"%self.rating
else: diff = ""
return "<%s %s %s%s>"%(self.__class__.__name__, self.name, self.type.type, diff)
############################################################################
def __call__(self, attrs, **kwargs):
"""update internals according to parameters"""
self.update(attrs) # allow a dictionary to be passed
self.update(kwargs)
return self
############################################################################
@property
def initOptStr(self):
return " ".join(["%s=%s"%(k,v) for k, v in self.initOptions.items()])
############################################################################
@property
def isAI(self): return self.type == c.AI
############################################################################
@property
def isBot(self): return self.type == c.BOT # an AI with pre-defined, scripted actions
############################################################################
@property
def isHuman(self): return self.type == c.HUMAN
############################################################################
@property
def isComputer(self): return self.type == c.COMPUTER
############################################################################
@property
def isMulti(self): return self.type == c.ARCHON
############################################################################
@property
def isStoredLocal(self):
"""determine whether this player can be run locally"""
raise NotImplementedError("TODO -- determine whether this player is an already known player")
############################################################################
@property
def filename(self):
"""return the absolute path to the object's filename"""
return os.path.join(c.PLAYERS_FOLDER, "player_%s.json"%(self.name))
############################################################################
@property
def attrs(self):
"""provide a copy of this player's attributes as a dictionary"""
ret = dict(self.__dict__) # obtain copy of internal __dict__
del ret["_matches"] # match history is specifically distinguished from player information (and stored separately)
if self.type != c.COMPUTER: # difficulty only matters for computer playres
del ret["difficulty"]
return ret
############################################################################
@property
def simpleAttrs(self):
"""provide a copy of this player's attributes as a dictionary, but with objects flattened into a string representation of the object"""
simpleAttrs = {}
for k,v in iteritems(self.attrs):
if k in ["_matches"]: continue # attributes to specifically ignore
try: simpleAttrs[k] = v.type
except: simpleAttrs[k] = v
return simpleAttrs
############################################################################
@property
def matches(self):
"""retrieve the match history for this player from the matchHistory repo and cache the result"""
return []
#raise NotImplementedError("must finish player history first")
#if not self._matches: # load match History applicable to this player
# self._matches = getPlayerHistory(self.name)
#return self._matches
############################################################################
def _validateAttrs(self, keys):
"""prove that all attributes are defined appropriately"""
badAttrs = []
for k in keys:
if k not in self.__dict__:
badAttrs.append("Attribute key '%s' is not a valid attribute"%(k))
badAttrsMsg = os.linesep.join(badAttrs)
if not keys: return # is iterable, but didn't contain any keys
if badAttrsMsg:
raise ValueError("Encountered invalid attributes. ALLOWED: %s%s%s"\
%(list(self.__dict__), os.linesep, badAttrsMsg))
############################################################################
@property
def control(self):
"""the type of control this player exhibits"""
if self.isComputer: value = c.COMPUTER
else: value = c.PARTICIPANT
return c.PlayerControls(value)
############################################################################
def load(self, playerName=None):
"""retrieve the PlayerRecord settings from saved disk file"""
if playerName: # switch the PlayerRecord this object describes
self.name = playerName # preset value to load self.filename
try:
with open(self.filename, "rb") as f:
data = f.read()
except Exception:
raise ValueError("invalid profile, '%s'. file does not exist: %s"%(self.name, self.filename))
self.update(json.loads(data))
self._matches = [] # mandate match history be recalculated for this newly loaded player
############################################################################
def save(self):
"""save PlayerRecord settings to disk"""
data = str.encode( json.dumps(self.simpleAttrs, indent=4, sort_keys=True) )
with open(self.filename, "wb") as f:
f.write(data)
############################################################################
def update(self, attrs):
"""update attributes initialized with the proper type"""
########################################################################
def convertStrToDict(strVal):
if isinstance(strVal, dict): return strVal
strVal = re.sub("[\{\}]+", "", str(strVal))
regexCol = re.compile(":")
terms = re.split("[,\s]+", strVal)
keyvals = [re.split(regexCol, t) for t in terms]
x = re.compile("['\"]")
ret = {}
boolTrue = re.compile("true" , flags=re.IGNORECASE)
boolFalse = re.compile("false", flags=re.IGNORECASE)
for k, v in keyvals:
k = re.sub(x, "", k)
v = re.sub(x, "", v)
if re.search(boolTrue, v): v = True
elif re.search(boolFalse, v): v = False
else:
if '.' in v:
try: v = float(v)
except: pass
else:
try: v = int(v)
except: pass
ret[k] = v
return ret
########################################################################
self._validateAttrs(attrs)
for k,v in iteritems(attrs):
typecast = type( getattr(self, k) )
if typecast==bool and v=="False": newval = False # "False" evalued as boolean is True because its length > 0
elif issubclass(typecast, c.RestrictedType): # let the RestrictedType handle the type setting, value matching
newval = typecast(v)
elif "<" in str(v) or v==None: newval = typecast(v)
elif k == "initCmd": newval = str(v) # specifically don't mangle the command as specified
elif k == "initOptions": newval = convertStrToDict(v)
else: newval = typecast(str(v).lower())
setattr(self, k, newval)
if self.isComputer: pass
elif "difficulty" in attrs and attrs["difficulty"]!=None: # the final state of this PlayerRecord cannot be a non-computer and specify a difficulty
raise ValueError("%s type %s=%s does not have a difficulty"%(
self.__class__.__name__, self.type.__class__.__name__, self.type.type))
else: self.difficulty = c.ComputerDifficulties(None)
############################################################################
############################################################################
def apmRecent(self, maxMatches=c.RECENT_MATCHES, **criteria):
"""collect recent match history's apm data to report player's calculated MMR"""
if not self.matches: return 0 # no apm information without match history
#try: maxMatches = criteria["maxMatches"]
#except: maxMatches = c.RECENT_MATCHES
apms = [m.apm(self) for m in self.recentMatches(maxMatches=maxMatches, **criteria)]
return sum(apms) / len(apms)
############################################################################
def apmAggregate(self, **criteria):
"""collect all match history's apm data to report player's calculated MMR"""
apms = [m.apm(self) for m in self.matchSubset(**criteria)]
if not apms: return 0 # no apm information without match history
return sum(apms) / len(apms)
############################################################################
def recentMatches(self, **criteria):
"""identify all recent matches for player given optional, additional criteria"""
if not self.matches: return [] # no match history
try: # maxMatches is a specially handled parameter (not true criteria)
maxMatches = criteria["maxMatches"]
del criteria["maxMatches"]
except AttributeError:
maxMatches = c.RECENT_MATCHES
alLMatches = self.matchSubset(**criteria)
matchTimes = [(m.endTime, m) for m in matches]
selMatches = sorted(matchTimes)[:maxMatches] # slice off X most recet matches
retMatches = [m for endTime,m in selMatches] # extract matches only
return retMatches
|
ttinies/sc2players
|
sc2players/playerRecord.py
|
PlayerRecord.apmRecent
|
python
|
def apmRecent(self, maxMatches=c.RECENT_MATCHES, **criteria):
if not self.matches: return 0 # no apm information without match history
#try: maxMatches = criteria["maxMatches"]
#except: maxMatches = c.RECENT_MATCHES
apms = [m.apm(self) for m in self.recentMatches(maxMatches=maxMatches, **criteria)]
return sum(apms) / len(apms)
|
collect recent match history's apm data to report player's calculated MMR
|
train
|
https://github.com/ttinies/sc2players/blob/fd9b37c268bf1005d9ef73a25e65ed97c8b7895f/sc2players/playerRecord.py#L221-L227
|
[
"def recentMatches(self, **criteria):\n \"\"\"identify all recent matches for player given optional, additional criteria\"\"\"\n if not self.matches: return [] # no match history\n try: # maxMatches is a specially handled parameter (not true criteria)\n maxMatches = criteria[\"maxMatches\"]\n del criteria[\"maxMatches\"]\n except AttributeError:\n maxMatches = c.RECENT_MATCHES\n alLMatches = self.matchSubset(**criteria)\n matchTimes = [(m.endTime, m) for m in matches]\n selMatches = sorted(matchTimes)[:maxMatches] # slice off X most recet matches\n retMatches = [m for endTime,m in selMatches] # extract matches only\n return retMatches\n"
] |
class PlayerRecord(object):
"""manage the out-of-game meta data of a given player"""
AVAILABLE_KEYS = [
"name",
"type",
"difficulty",
"initCmd",
"initOptions",
"raceDefault",
"rating",
]
############################################################################
def __init__(self, source=None, **override):
# define default values and their type
self.name = ""
self.type = c.PlayerDesigns(c.HUMAN)
self.difficulty = c.ComputerDifficulties(None) # only matters if type is a computer
self.initCmd = "" # only used if self.type is an AI or bot
self.initOptions = {}
self.rating = c.DEFAULT_RATING
self.created = time.time() # origination timestamp
self.raceDefault = c.RANDOM
self._matches = [] # match history
# initialize with new values
if isinstance(source, str): self.load(source) # assume a player file to load
elif isinstance(source, dict): self.update(source) # assume attribute dictionary
elif isinstance(source, PlayerRecord): self.update(source.__dict__) # copy constructor
self.update(override)
if not self.name:
raise ValueError("must define 'name' parameter as part of %s source settings"%(self.__class__.__name__))
if self.type in [c.BOT, c.AI] and not self.initCmd:
raise ValueError("must provide initCmd attribute when specifying type=%s"%self.type)
############################################################################
def __str__(self): return self.__repr__()
def __repr__(self):
if self.isComputer: diff = "-%s"%self.difficulty.type
elif self.rating: diff = "-%d"%self.rating
else: diff = ""
return "<%s %s %s%s>"%(self.__class__.__name__, self.name, self.type.type, diff)
############################################################################
def __call__(self, attrs, **kwargs):
"""update internals according to parameters"""
self.update(attrs) # allow a dictionary to be passed
self.update(kwargs)
return self
############################################################################
@property
def initOptStr(self):
return " ".join(["%s=%s"%(k,v) for k, v in self.initOptions.items()])
############################################################################
@property
def isAI(self): return self.type == c.AI
############################################################################
@property
def isBot(self): return self.type == c.BOT # an AI with pre-defined, scripted actions
############################################################################
@property
def isHuman(self): return self.type == c.HUMAN
############################################################################
@property
def isComputer(self): return self.type == c.COMPUTER
############################################################################
@property
def isMulti(self): return self.type == c.ARCHON
############################################################################
@property
def isStoredLocal(self):
"""determine whether this player can be run locally"""
raise NotImplementedError("TODO -- determine whether this player is an already known player")
############################################################################
@property
def filename(self):
"""return the absolute path to the object's filename"""
return os.path.join(c.PLAYERS_FOLDER, "player_%s.json"%(self.name))
############################################################################
@property
def attrs(self):
"""provide a copy of this player's attributes as a dictionary"""
ret = dict(self.__dict__) # obtain copy of internal __dict__
del ret["_matches"] # match history is specifically distinguished from player information (and stored separately)
if self.type != c.COMPUTER: # difficulty only matters for computer playres
del ret["difficulty"]
return ret
############################################################################
@property
def simpleAttrs(self):
"""provide a copy of this player's attributes as a dictionary, but with objects flattened into a string representation of the object"""
simpleAttrs = {}
for k,v in iteritems(self.attrs):
if k in ["_matches"]: continue # attributes to specifically ignore
try: simpleAttrs[k] = v.type
except: simpleAttrs[k] = v
return simpleAttrs
############################################################################
@property
def matches(self):
"""retrieve the match history for this player from the matchHistory repo and cache the result"""
return []
#raise NotImplementedError("must finish player history first")
#if not self._matches: # load match History applicable to this player
# self._matches = getPlayerHistory(self.name)
#return self._matches
############################################################################
def _validateAttrs(self, keys):
"""prove that all attributes are defined appropriately"""
badAttrs = []
for k in keys:
if k not in self.__dict__:
badAttrs.append("Attribute key '%s' is not a valid attribute"%(k))
badAttrsMsg = os.linesep.join(badAttrs)
if not keys: return # is iterable, but didn't contain any keys
if badAttrsMsg:
raise ValueError("Encountered invalid attributes. ALLOWED: %s%s%s"\
%(list(self.__dict__), os.linesep, badAttrsMsg))
############################################################################
@property
def control(self):
"""the type of control this player exhibits"""
if self.isComputer: value = c.COMPUTER
else: value = c.PARTICIPANT
return c.PlayerControls(value)
############################################################################
def load(self, playerName=None):
"""retrieve the PlayerRecord settings from saved disk file"""
if playerName: # switch the PlayerRecord this object describes
self.name = playerName # preset value to load self.filename
try:
with open(self.filename, "rb") as f:
data = f.read()
except Exception:
raise ValueError("invalid profile, '%s'. file does not exist: %s"%(self.name, self.filename))
self.update(json.loads(data))
self._matches = [] # mandate match history be recalculated for this newly loaded player
############################################################################
def save(self):
"""save PlayerRecord settings to disk"""
data = str.encode( json.dumps(self.simpleAttrs, indent=4, sort_keys=True) )
with open(self.filename, "wb") as f:
f.write(data)
############################################################################
def update(self, attrs):
"""update attributes initialized with the proper type"""
########################################################################
def convertStrToDict(strVal):
if isinstance(strVal, dict): return strVal
strVal = re.sub("[\{\}]+", "", str(strVal))
regexCol = re.compile(":")
terms = re.split("[,\s]+", strVal)
keyvals = [re.split(regexCol, t) for t in terms]
x = re.compile("['\"]")
ret = {}
boolTrue = re.compile("true" , flags=re.IGNORECASE)
boolFalse = re.compile("false", flags=re.IGNORECASE)
for k, v in keyvals:
k = re.sub(x, "", k)
v = re.sub(x, "", v)
if re.search(boolTrue, v): v = True
elif re.search(boolFalse, v): v = False
else:
if '.' in v:
try: v = float(v)
except: pass
else:
try: v = int(v)
except: pass
ret[k] = v
return ret
########################################################################
self._validateAttrs(attrs)
for k,v in iteritems(attrs):
typecast = type( getattr(self, k) )
if typecast==bool and v=="False": newval = False # "False" evalued as boolean is True because its length > 0
elif issubclass(typecast, c.RestrictedType): # let the RestrictedType handle the type setting, value matching
newval = typecast(v)
elif "<" in str(v) or v==None: newval = typecast(v)
elif k == "initCmd": newval = str(v) # specifically don't mangle the command as specified
elif k == "initOptions": newval = convertStrToDict(v)
else: newval = typecast(str(v).lower())
setattr(self, k, newval)
if self.isComputer: pass
elif "difficulty" in attrs and attrs["difficulty"]!=None: # the final state of this PlayerRecord cannot be a non-computer and specify a difficulty
raise ValueError("%s type %s=%s does not have a difficulty"%(
self.__class__.__name__, self.type.__class__.__name__, self.type.type))
else: self.difficulty = c.ComputerDifficulties(None)
############################################################################
def matchSubset(**kwargs):
"""extract matches from player's entire match history given matching criteria kwargs"""
ret = []
for m in self.matches:
allMatched = True
for k,v in iteritems(kwargs):
mVal = getattr(m, k)
try:
if v == mVal or v in mVal: continue # this check passed
except Exception: pass # if attempting to check __contains__ and exception is raised, it's assumed to be false
allMatched = False
break
if allMatched: ret.append(m)
return ret
############################################################################
############################################################################
def apmAggregate(self, **criteria):
"""collect all match history's apm data to report player's calculated MMR"""
apms = [m.apm(self) for m in self.matchSubset(**criteria)]
if not apms: return 0 # no apm information without match history
return sum(apms) / len(apms)
############################################################################
def recentMatches(self, **criteria):
"""identify all recent matches for player given optional, additional criteria"""
if not self.matches: return [] # no match history
try: # maxMatches is a specially handled parameter (not true criteria)
maxMatches = criteria["maxMatches"]
del criteria["maxMatches"]
except AttributeError:
maxMatches = c.RECENT_MATCHES
alLMatches = self.matchSubset(**criteria)
matchTimes = [(m.endTime, m) for m in matches]
selMatches = sorted(matchTimes)[:maxMatches] # slice off X most recet matches
retMatches = [m for endTime,m in selMatches] # extract matches only
return retMatches
|
ttinies/sc2players
|
sc2players/playerRecord.py
|
PlayerRecord.apmAggregate
|
python
|
def apmAggregate(self, **criteria):
apms = [m.apm(self) for m in self.matchSubset(**criteria)]
if not apms: return 0 # no apm information without match history
return sum(apms) / len(apms)
|
collect all match history's apm data to report player's calculated MMR
|
train
|
https://github.com/ttinies/sc2players/blob/fd9b37c268bf1005d9ef73a25e65ed97c8b7895f/sc2players/playerRecord.py#L229-L233
|
[
"def matchSubset(**kwargs):\n \"\"\"extract matches from player's entire match history given matching criteria kwargs\"\"\"\n ret = []\n for m in self.matches:\n allMatched = True\n for k,v in iteritems(kwargs):\n mVal = getattr(m, k)\n try:\n if v == mVal or v in mVal: continue # this check passed\n except Exception: pass # if attempting to check __contains__ and exception is raised, it's assumed to be false\n allMatched = False\n break\n if allMatched: ret.append(m)\n return ret\n"
] |
class PlayerRecord(object):
"""manage the out-of-game meta data of a given player"""
AVAILABLE_KEYS = [
"name",
"type",
"difficulty",
"initCmd",
"initOptions",
"raceDefault",
"rating",
]
############################################################################
def __init__(self, source=None, **override):
# define default values and their type
self.name = ""
self.type = c.PlayerDesigns(c.HUMAN)
self.difficulty = c.ComputerDifficulties(None) # only matters if type is a computer
self.initCmd = "" # only used if self.type is an AI or bot
self.initOptions = {}
self.rating = c.DEFAULT_RATING
self.created = time.time() # origination timestamp
self.raceDefault = c.RANDOM
self._matches = [] # match history
# initialize with new values
if isinstance(source, str): self.load(source) # assume a player file to load
elif isinstance(source, dict): self.update(source) # assume attribute dictionary
elif isinstance(source, PlayerRecord): self.update(source.__dict__) # copy constructor
self.update(override)
if not self.name:
raise ValueError("must define 'name' parameter as part of %s source settings"%(self.__class__.__name__))
if self.type in [c.BOT, c.AI] and not self.initCmd:
raise ValueError("must provide initCmd attribute when specifying type=%s"%self.type)
############################################################################
def __str__(self): return self.__repr__()
def __repr__(self):
if self.isComputer: diff = "-%s"%self.difficulty.type
elif self.rating: diff = "-%d"%self.rating
else: diff = ""
return "<%s %s %s%s>"%(self.__class__.__name__, self.name, self.type.type, diff)
############################################################################
def __call__(self, attrs, **kwargs):
"""update internals according to parameters"""
self.update(attrs) # allow a dictionary to be passed
self.update(kwargs)
return self
############################################################################
@property
def initOptStr(self):
return " ".join(["%s=%s"%(k,v) for k, v in self.initOptions.items()])
############################################################################
@property
def isAI(self): return self.type == c.AI
############################################################################
@property
def isBot(self): return self.type == c.BOT # an AI with pre-defined, scripted actions
############################################################################
@property
def isHuman(self): return self.type == c.HUMAN
############################################################################
@property
def isComputer(self): return self.type == c.COMPUTER
############################################################################
@property
def isMulti(self): return self.type == c.ARCHON
############################################################################
@property
def isStoredLocal(self):
"""determine whether this player can be run locally"""
raise NotImplementedError("TODO -- determine whether this player is an already known player")
############################################################################
@property
def filename(self):
"""return the absolute path to the object's filename"""
return os.path.join(c.PLAYERS_FOLDER, "player_%s.json"%(self.name))
############################################################################
@property
def attrs(self):
"""provide a copy of this player's attributes as a dictionary"""
ret = dict(self.__dict__) # obtain copy of internal __dict__
del ret["_matches"] # match history is specifically distinguished from player information (and stored separately)
if self.type != c.COMPUTER: # difficulty only matters for computer playres
del ret["difficulty"]
return ret
############################################################################
@property
def simpleAttrs(self):
"""provide a copy of this player's attributes as a dictionary, but with objects flattened into a string representation of the object"""
simpleAttrs = {}
for k,v in iteritems(self.attrs):
if k in ["_matches"]: continue # attributes to specifically ignore
try: simpleAttrs[k] = v.type
except: simpleAttrs[k] = v
return simpleAttrs
############################################################################
@property
def matches(self):
"""retrieve the match history for this player from the matchHistory repo and cache the result"""
return []
#raise NotImplementedError("must finish player history first")
#if not self._matches: # load match History applicable to this player
# self._matches = getPlayerHistory(self.name)
#return self._matches
############################################################################
def _validateAttrs(self, keys):
"""prove that all attributes are defined appropriately"""
badAttrs = []
for k in keys:
if k not in self.__dict__:
badAttrs.append("Attribute key '%s' is not a valid attribute"%(k))
badAttrsMsg = os.linesep.join(badAttrs)
if not keys: return # is iterable, but didn't contain any keys
if badAttrsMsg:
raise ValueError("Encountered invalid attributes. ALLOWED: %s%s%s"\
%(list(self.__dict__), os.linesep, badAttrsMsg))
############################################################################
@property
def control(self):
"""the type of control this player exhibits"""
if self.isComputer: value = c.COMPUTER
else: value = c.PARTICIPANT
return c.PlayerControls(value)
############################################################################
def load(self, playerName=None):
"""retrieve the PlayerRecord settings from saved disk file"""
if playerName: # switch the PlayerRecord this object describes
self.name = playerName # preset value to load self.filename
try:
with open(self.filename, "rb") as f:
data = f.read()
except Exception:
raise ValueError("invalid profile, '%s'. file does not exist: %s"%(self.name, self.filename))
self.update(json.loads(data))
self._matches = [] # mandate match history be recalculated for this newly loaded player
############################################################################
def save(self):
"""save PlayerRecord settings to disk"""
data = str.encode( json.dumps(self.simpleAttrs, indent=4, sort_keys=True) )
with open(self.filename, "wb") as f:
f.write(data)
############################################################################
def update(self, attrs):
"""update attributes initialized with the proper type"""
########################################################################
def convertStrToDict(strVal):
if isinstance(strVal, dict): return strVal
strVal = re.sub("[\{\}]+", "", str(strVal))
regexCol = re.compile(":")
terms = re.split("[,\s]+", strVal)
keyvals = [re.split(regexCol, t) for t in terms]
x = re.compile("['\"]")
ret = {}
boolTrue = re.compile("true" , flags=re.IGNORECASE)
boolFalse = re.compile("false", flags=re.IGNORECASE)
for k, v in keyvals:
k = re.sub(x, "", k)
v = re.sub(x, "", v)
if re.search(boolTrue, v): v = True
elif re.search(boolFalse, v): v = False
else:
if '.' in v:
try: v = float(v)
except: pass
else:
try: v = int(v)
except: pass
ret[k] = v
return ret
########################################################################
self._validateAttrs(attrs)
for k,v in iteritems(attrs):
typecast = type( getattr(self, k) )
if typecast==bool and v=="False": newval = False # "False" evalued as boolean is True because its length > 0
elif issubclass(typecast, c.RestrictedType): # let the RestrictedType handle the type setting, value matching
newval = typecast(v)
elif "<" in str(v) or v==None: newval = typecast(v)
elif k == "initCmd": newval = str(v) # specifically don't mangle the command as specified
elif k == "initOptions": newval = convertStrToDict(v)
else: newval = typecast(str(v).lower())
setattr(self, k, newval)
if self.isComputer: pass
elif "difficulty" in attrs and attrs["difficulty"]!=None: # the final state of this PlayerRecord cannot be a non-computer and specify a difficulty
raise ValueError("%s type %s=%s does not have a difficulty"%(
self.__class__.__name__, self.type.__class__.__name__, self.type.type))
else: self.difficulty = c.ComputerDifficulties(None)
############################################################################
def matchSubset(**kwargs):
"""extract matches from player's entire match history given matching criteria kwargs"""
ret = []
for m in self.matches:
allMatched = True
for k,v in iteritems(kwargs):
mVal = getattr(m, k)
try:
if v == mVal or v in mVal: continue # this check passed
except Exception: pass # if attempting to check __contains__ and exception is raised, it's assumed to be false
allMatched = False
break
if allMatched: ret.append(m)
return ret
############################################################################
def apmRecent(self, maxMatches=c.RECENT_MATCHES, **criteria):
"""collect recent match history's apm data to report player's calculated MMR"""
if not self.matches: return 0 # no apm information without match history
#try: maxMatches = criteria["maxMatches"]
#except: maxMatches = c.RECENT_MATCHES
apms = [m.apm(self) for m in self.recentMatches(maxMatches=maxMatches, **criteria)]
return sum(apms) / len(apms)
############################################################################
############################################################################
def recentMatches(self, **criteria):
"""identify all recent matches for player given optional, additional criteria"""
if not self.matches: return [] # no match history
try: # maxMatches is a specially handled parameter (not true criteria)
maxMatches = criteria["maxMatches"]
del criteria["maxMatches"]
except AttributeError:
maxMatches = c.RECENT_MATCHES
alLMatches = self.matchSubset(**criteria)
matchTimes = [(m.endTime, m) for m in matches]
selMatches = sorted(matchTimes)[:maxMatches] # slice off X most recet matches
retMatches = [m for endTime,m in selMatches] # extract matches only
return retMatches
|
ttinies/sc2players
|
sc2players/playerRecord.py
|
PlayerRecord.recentMatches
|
python
|
def recentMatches(self, **criteria):
if not self.matches: return [] # no match history
try: # maxMatches is a specially handled parameter (not true criteria)
maxMatches = criteria["maxMatches"]
del criteria["maxMatches"]
except AttributeError:
maxMatches = c.RECENT_MATCHES
alLMatches = self.matchSubset(**criteria)
matchTimes = [(m.endTime, m) for m in matches]
selMatches = sorted(matchTimes)[:maxMatches] # slice off X most recet matches
retMatches = [m for endTime,m in selMatches] # extract matches only
return retMatches
|
identify all recent matches for player given optional, additional criteria
|
train
|
https://github.com/ttinies/sc2players/blob/fd9b37c268bf1005d9ef73a25e65ed97c8b7895f/sc2players/playerRecord.py#L235-L247
|
[
"def matchSubset(**kwargs):\n \"\"\"extract matches from player's entire match history given matching criteria kwargs\"\"\"\n ret = []\n for m in self.matches:\n allMatched = True\n for k,v in iteritems(kwargs):\n mVal = getattr(m, k)\n try:\n if v == mVal or v in mVal: continue # this check passed\n except Exception: pass # if attempting to check __contains__ and exception is raised, it's assumed to be false\n allMatched = False\n break\n if allMatched: ret.append(m)\n return ret\n"
] |
class PlayerRecord(object):
"""manage the out-of-game meta data of a given player"""
AVAILABLE_KEYS = [
"name",
"type",
"difficulty",
"initCmd",
"initOptions",
"raceDefault",
"rating",
]
############################################################################
def __init__(self, source=None, **override):
# define default values and their type
self.name = ""
self.type = c.PlayerDesigns(c.HUMAN)
self.difficulty = c.ComputerDifficulties(None) # only matters if type is a computer
self.initCmd = "" # only used if self.type is an AI or bot
self.initOptions = {}
self.rating = c.DEFAULT_RATING
self.created = time.time() # origination timestamp
self.raceDefault = c.RANDOM
self._matches = [] # match history
# initialize with new values
if isinstance(source, str): self.load(source) # assume a player file to load
elif isinstance(source, dict): self.update(source) # assume attribute dictionary
elif isinstance(source, PlayerRecord): self.update(source.__dict__) # copy constructor
self.update(override)
if not self.name:
raise ValueError("must define 'name' parameter as part of %s source settings"%(self.__class__.__name__))
if self.type in [c.BOT, c.AI] and not self.initCmd:
raise ValueError("must provide initCmd attribute when specifying type=%s"%self.type)
############################################################################
def __str__(self): return self.__repr__()
def __repr__(self):
if self.isComputer: diff = "-%s"%self.difficulty.type
elif self.rating: diff = "-%d"%self.rating
else: diff = ""
return "<%s %s %s%s>"%(self.__class__.__name__, self.name, self.type.type, diff)
############################################################################
def __call__(self, attrs, **kwargs):
"""update internals according to parameters"""
self.update(attrs) # allow a dictionary to be passed
self.update(kwargs)
return self
############################################################################
@property
def initOptStr(self):
return " ".join(["%s=%s"%(k,v) for k, v in self.initOptions.items()])
############################################################################
@property
def isAI(self): return self.type == c.AI
############################################################################
@property
def isBot(self): return self.type == c.BOT # an AI with pre-defined, scripted actions
############################################################################
@property
def isHuman(self): return self.type == c.HUMAN
############################################################################
@property
def isComputer(self): return self.type == c.COMPUTER
############################################################################
@property
def isMulti(self): return self.type == c.ARCHON
############################################################################
@property
def isStoredLocal(self):
"""determine whether this player can be run locally"""
raise NotImplementedError("TODO -- determine whether this player is an already known player")
############################################################################
@property
def filename(self):
"""return the absolute path to the object's filename"""
return os.path.join(c.PLAYERS_FOLDER, "player_%s.json"%(self.name))
############################################################################
@property
def attrs(self):
"""provide a copy of this player's attributes as a dictionary"""
ret = dict(self.__dict__) # obtain copy of internal __dict__
del ret["_matches"] # match history is specifically distinguished from player information (and stored separately)
if self.type != c.COMPUTER: # difficulty only matters for computer playres
del ret["difficulty"]
return ret
############################################################################
@property
def simpleAttrs(self):
"""provide a copy of this player's attributes as a dictionary, but with objects flattened into a string representation of the object"""
simpleAttrs = {}
for k,v in iteritems(self.attrs):
if k in ["_matches"]: continue # attributes to specifically ignore
try: simpleAttrs[k] = v.type
except: simpleAttrs[k] = v
return simpleAttrs
############################################################################
@property
def matches(self):
"""retrieve the match history for this player from the matchHistory repo and cache the result"""
return []
#raise NotImplementedError("must finish player history first")
#if not self._matches: # load match History applicable to this player
# self._matches = getPlayerHistory(self.name)
#return self._matches
############################################################################
def _validateAttrs(self, keys):
"""prove that all attributes are defined appropriately"""
badAttrs = []
for k in keys:
if k not in self.__dict__:
badAttrs.append("Attribute key '%s' is not a valid attribute"%(k))
badAttrsMsg = os.linesep.join(badAttrs)
if not keys: return # is iterable, but didn't contain any keys
if badAttrsMsg:
raise ValueError("Encountered invalid attributes. ALLOWED: %s%s%s"\
%(list(self.__dict__), os.linesep, badAttrsMsg))
############################################################################
@property
def control(self):
"""the type of control this player exhibits"""
if self.isComputer: value = c.COMPUTER
else: value = c.PARTICIPANT
return c.PlayerControls(value)
############################################################################
def load(self, playerName=None):
"""retrieve the PlayerRecord settings from saved disk file"""
if playerName: # switch the PlayerRecord this object describes
self.name = playerName # preset value to load self.filename
try:
with open(self.filename, "rb") as f:
data = f.read()
except Exception:
raise ValueError("invalid profile, '%s'. file does not exist: %s"%(self.name, self.filename))
self.update(json.loads(data))
self._matches = [] # mandate match history be recalculated for this newly loaded player
############################################################################
def save(self):
"""save PlayerRecord settings to disk"""
data = str.encode( json.dumps(self.simpleAttrs, indent=4, sort_keys=True) )
with open(self.filename, "wb") as f:
f.write(data)
############################################################################
def update(self, attrs):
"""update attributes initialized with the proper type"""
########################################################################
def convertStrToDict(strVal):
if isinstance(strVal, dict): return strVal
strVal = re.sub("[\{\}]+", "", str(strVal))
regexCol = re.compile(":")
terms = re.split("[,\s]+", strVal)
keyvals = [re.split(regexCol, t) for t in terms]
x = re.compile("['\"]")
ret = {}
boolTrue = re.compile("true" , flags=re.IGNORECASE)
boolFalse = re.compile("false", flags=re.IGNORECASE)
for k, v in keyvals:
k = re.sub(x, "", k)
v = re.sub(x, "", v)
if re.search(boolTrue, v): v = True
elif re.search(boolFalse, v): v = False
else:
if '.' in v:
try: v = float(v)
except: pass
else:
try: v = int(v)
except: pass
ret[k] = v
return ret
########################################################################
self._validateAttrs(attrs)
for k,v in iteritems(attrs):
typecast = type( getattr(self, k) )
if typecast==bool and v=="False": newval = False # "False" evalued as boolean is True because its length > 0
elif issubclass(typecast, c.RestrictedType): # let the RestrictedType handle the type setting, value matching
newval = typecast(v)
elif "<" in str(v) or v==None: newval = typecast(v)
elif k == "initCmd": newval = str(v) # specifically don't mangle the command as specified
elif k == "initOptions": newval = convertStrToDict(v)
else: newval = typecast(str(v).lower())
setattr(self, k, newval)
if self.isComputer: pass
elif "difficulty" in attrs and attrs["difficulty"]!=None: # the final state of this PlayerRecord cannot be a non-computer and specify a difficulty
raise ValueError("%s type %s=%s does not have a difficulty"%(
self.__class__.__name__, self.type.__class__.__name__, self.type.type))
else: self.difficulty = c.ComputerDifficulties(None)
############################################################################
def matchSubset(**kwargs):
"""extract matches from player's entire match history given matching criteria kwargs"""
ret = []
for m in self.matches:
allMatched = True
for k,v in iteritems(kwargs):
mVal = getattr(m, k)
try:
if v == mVal or v in mVal: continue # this check passed
except Exception: pass # if attempting to check __contains__ and exception is raised, it's assumed to be false
allMatched = False
break
if allMatched: ret.append(m)
return ret
############################################################################
def apmRecent(self, maxMatches=c.RECENT_MATCHES, **criteria):
"""collect recent match history's apm data to report player's calculated MMR"""
if not self.matches: return 0 # no apm information without match history
#try: maxMatches = criteria["maxMatches"]
#except: maxMatches = c.RECENT_MATCHES
apms = [m.apm(self) for m in self.recentMatches(maxMatches=maxMatches, **criteria)]
return sum(apms) / len(apms)
############################################################################
def apmAggregate(self, **criteria):
"""collect all match history's apm data to report player's calculated MMR"""
apms = [m.apm(self) for m in self.matchSubset(**criteria)]
if not apms: return 0 # no apm information without match history
return sum(apms) / len(apms)
############################################################################
|
ttinies/sc2players
|
sc2players/playerManagement.py
|
addPlayer
|
python
|
def addPlayer(settings):
_validate(settings)
player = PlayerRecord(settings)
player.save()
getKnownPlayers()[player.name] = player
return player
|
define a new PlayerRecord setting and save to disk file
|
train
|
https://github.com/ttinies/sc2players/blob/fd9b37c268bf1005d9ef73a25e65ed97c8b7895f/sc2players/playerManagement.py#L26-L32
|
[
"def getKnownPlayers(reset=False):\n \"\"\"identify all of the currently defined players\"\"\"\n global playerCache\n if not playerCache or reset:\n jsonFiles = os.path.join(c.PLAYERS_FOLDER, \"*.json\")\n for playerFilepath in glob.glob(jsonFiles):\n filename = os.path.basename(playerFilepath)\n name = re.sub(\"^player_\", \"\", filename)\n name = re.sub(\"\\.json$\", \"\", name)\n player = PlayerRecord(name)\n playerCache[player.name] = player\n return playerCache\n",
"def _validate(settings):\n if \"created\" in settings: raise ValueError(\"parameter 'created' is expected to be automatmically generated.\")\n if \"_matches\" in settings: raise ValueError(\"matches are declared after playing matches, not during init.\")\n",
"def save(self):\n \"\"\"save PlayerRecord settings to disk\"\"\"\n data = str.encode( json.dumps(self.simpleAttrs, indent=4, sort_keys=True) )\n with open(self.filename, \"wb\") as f:\n f.write(data)\n"
] |
"""
PURPOSE: manage records of all known players, both local and remote
"""
from __future__ import absolute_import
from __future__ import division # python 2/3 compatibility
from __future__ import print_function # python 2/3 compatibility
from six import iteritems, itervalues # python 2/3 compatibility
import glob
import os
import re
import time
from sc2players import constants as c
from sc2players.playerRecord import PlayerRecord
from sc2players.playerPreGame import PlayerPreGame
################################################################################
playerCache = {} # mapping of player names to PlayerRecord objects
################################################################################
################################################################################
def updatePlayer(name, settings):
"""update an existing PlayerRecord setting and save to disk file"""
player = delPlayer(name) # remove the existing record
_validate(settings)
player.update(settings)
player.save()
getKnownPlayers()[player.name] = player
return player
################################################################################
def getPlayer(name):
"""obtain a specific PlayerRecord settings file"""
if isinstance(name, PlayerRecord): return name
try: return getKnownPlayers()[name.lower()]
except KeyError:
raise ValueError("given player name '%s' is not a known player definition"%(name))
################################################################################
def delPlayer(name):
"""forget about a previously defined PlayerRecord setting by deleting its disk file"""
player = getPlayer(name)
try: os.remove(player.filename) # delete from disk
except IOError: pass # shouldn't happen, but don't crash if the disk data doesn't exist
try: del getKnownPlayers()[player.name] # forget object from cache
except: pass
return player # leave it to the caller to process further or allow deallocation
################################################################################
def buildPlayer(name, ptype, cmd='', options={}, difficulty=None, rating=None,
race=None, obs=False, pid=0, raceDefault=c.RANDOM):
newRating = rating or c.DEFAULT_RATING
if not isinstance(difficulty, c.ComputerDifficulties):
newDiff = c.ComputerDifficulties(difficulty)
else: newDiff = difficulty
ret = PlayerRecord(name=name, type=ptype, initCmd=cmd, initOptions=options,
difficulty=newDiff, rating=newRating, raceDefault=raceDefault)
if bool(race or obs or pid):
return PlayerPreGame(ret, selectedRace=race, observe=obs, playerID=pid)
else: return ret
################################################################################
def getKnownPlayers(reset=False):
"""identify all of the currently defined players"""
global playerCache
if not playerCache or reset:
jsonFiles = os.path.join(c.PLAYERS_FOLDER, "*.json")
for playerFilepath in glob.glob(jsonFiles):
filename = os.path.basename(playerFilepath)
name = re.sub("^player_", "", filename)
name = re.sub("\.json$", "", name)
player = PlayerRecord(name)
playerCache[player.name] = player
return playerCache
################################################################################
def getBlizzBotPlayers():
"""identify all of Blizzard's built-in bots"""
ret = {}
for pName,p in iteritems(getKnownPlayers()):
if p.isComputer:
ret[pName] = p
return ret
################################################################################
def getStaleRecords(limit=c.DEFAULT_TIME_LIMIT):
ret = []
now = time.time()
seconds = float(limit) * 24 * 60 * 60 # convert days to seconds
maxNoAct= min(seconds, c.NO_ACTIVITY_LIMIT * 24 * 60 * 60) # convert days to seconds
for player in itervalues(getKnownPlayers()):
if player.matches: # can only determine time since last match if matches exist
sinceLastMatch, match = sorted( # player's last match is the shortest time since now
[(now - m.endTime, m) for m in player.matches])[0]
if sinceLastMatch > seconds:
ret.append(player)
else: # if no matches, verify player's time since creation for sufficient time to play a match
sinceCreation = now - player.created
if sinceCreation > maxNoAct: # players created > 10 days ago without any recorded matches are identifed as stale
ret.append(player)
return ret
################################################################################
def removeStaleRecords(**kwargs):
"""identify all currently stale records and remove them"""
return [delPlayer(record) for record in getStaleRecords(**kwargs)]
################################################################################
def _validate(settings):
if "created" in settings: raise ValueError("parameter 'created' is expected to be automatmically generated.")
if "_matches" in settings: raise ValueError("matches are declared after playing matches, not during init.")
################################################################################
__all__ = ["addPlayer", "getPlayer", "delPlayer", "getKnownPlayers", "getBlizzBotPlayers",
"updatePlayer", "getStaleRecords", "removeStaleRecords"]
|
ttinies/sc2players
|
sc2players/playerManagement.py
|
updatePlayer
|
python
|
def updatePlayer(name, settings):
player = delPlayer(name) # remove the existing record
_validate(settings)
player.update(settings)
player.save()
getKnownPlayers()[player.name] = player
return player
|
update an existing PlayerRecord setting and save to disk file
|
train
|
https://github.com/ttinies/sc2players/blob/fd9b37c268bf1005d9ef73a25e65ed97c8b7895f/sc2players/playerManagement.py#L36-L43
|
[
"def delPlayer(name):\n \"\"\"forget about a previously defined PlayerRecord setting by deleting its disk file\"\"\"\n player = getPlayer(name)\n try: os.remove(player.filename) # delete from disk\n except IOError: pass # shouldn't happen, but don't crash if the disk data doesn't exist\n try: del getKnownPlayers()[player.name] # forget object from cache\n except: pass\n return player # leave it to the caller to process further or allow deallocation \n",
"def getKnownPlayers(reset=False):\n \"\"\"identify all of the currently defined players\"\"\"\n global playerCache\n if not playerCache or reset:\n jsonFiles = os.path.join(c.PLAYERS_FOLDER, \"*.json\")\n for playerFilepath in glob.glob(jsonFiles):\n filename = os.path.basename(playerFilepath)\n name = re.sub(\"^player_\", \"\", filename)\n name = re.sub(\"\\.json$\", \"\", name)\n player = PlayerRecord(name)\n playerCache[player.name] = player\n return playerCache\n",
"def _validate(settings):\n if \"created\" in settings: raise ValueError(\"parameter 'created' is expected to be automatmically generated.\")\n if \"_matches\" in settings: raise ValueError(\"matches are declared after playing matches, not during init.\")\n"
] |
"""
PURPOSE: manage records of all known players, both local and remote
"""
from __future__ import absolute_import
from __future__ import division # python 2/3 compatibility
from __future__ import print_function # python 2/3 compatibility
from six import iteritems, itervalues # python 2/3 compatibility
import glob
import os
import re
import time
from sc2players import constants as c
from sc2players.playerRecord import PlayerRecord
from sc2players.playerPreGame import PlayerPreGame
################################################################################
playerCache = {} # mapping of player names to PlayerRecord objects
################################################################################
def addPlayer(settings):
"""define a new PlayerRecord setting and save to disk file"""
_validate(settings)
player = PlayerRecord(settings)
player.save()
getKnownPlayers()[player.name] = player
return player
################################################################################
################################################################################
def getPlayer(name):
"""obtain a specific PlayerRecord settings file"""
if isinstance(name, PlayerRecord): return name
try: return getKnownPlayers()[name.lower()]
except KeyError:
raise ValueError("given player name '%s' is not a known player definition"%(name))
################################################################################
def delPlayer(name):
"""forget about a previously defined PlayerRecord setting by deleting its disk file"""
player = getPlayer(name)
try: os.remove(player.filename) # delete from disk
except IOError: pass # shouldn't happen, but don't crash if the disk data doesn't exist
try: del getKnownPlayers()[player.name] # forget object from cache
except: pass
return player # leave it to the caller to process further or allow deallocation
################################################################################
def buildPlayer(name, ptype, cmd='', options={}, difficulty=None, rating=None,
race=None, obs=False, pid=0, raceDefault=c.RANDOM):
newRating = rating or c.DEFAULT_RATING
if not isinstance(difficulty, c.ComputerDifficulties):
newDiff = c.ComputerDifficulties(difficulty)
else: newDiff = difficulty
ret = PlayerRecord(name=name, type=ptype, initCmd=cmd, initOptions=options,
difficulty=newDiff, rating=newRating, raceDefault=raceDefault)
if bool(race or obs or pid):
return PlayerPreGame(ret, selectedRace=race, observe=obs, playerID=pid)
else: return ret
################################################################################
def getKnownPlayers(reset=False):
"""identify all of the currently defined players"""
global playerCache
if not playerCache or reset:
jsonFiles = os.path.join(c.PLAYERS_FOLDER, "*.json")
for playerFilepath in glob.glob(jsonFiles):
filename = os.path.basename(playerFilepath)
name = re.sub("^player_", "", filename)
name = re.sub("\.json$", "", name)
player = PlayerRecord(name)
playerCache[player.name] = player
return playerCache
################################################################################
def getBlizzBotPlayers():
"""identify all of Blizzard's built-in bots"""
ret = {}
for pName,p in iteritems(getKnownPlayers()):
if p.isComputer:
ret[pName] = p
return ret
################################################################################
def getStaleRecords(limit=c.DEFAULT_TIME_LIMIT):
ret = []
now = time.time()
seconds = float(limit) * 24 * 60 * 60 # convert days to seconds
maxNoAct= min(seconds, c.NO_ACTIVITY_LIMIT * 24 * 60 * 60) # convert days to seconds
for player in itervalues(getKnownPlayers()):
if player.matches: # can only determine time since last match if matches exist
sinceLastMatch, match = sorted( # player's last match is the shortest time since now
[(now - m.endTime, m) for m in player.matches])[0]
if sinceLastMatch > seconds:
ret.append(player)
else: # if no matches, verify player's time since creation for sufficient time to play a match
sinceCreation = now - player.created
if sinceCreation > maxNoAct: # players created > 10 days ago without any recorded matches are identifed as stale
ret.append(player)
return ret
################################################################################
def removeStaleRecords(**kwargs):
"""identify all currently stale records and remove them"""
return [delPlayer(record) for record in getStaleRecords(**kwargs)]
################################################################################
def _validate(settings):
if "created" in settings: raise ValueError("parameter 'created' is expected to be automatmically generated.")
if "_matches" in settings: raise ValueError("matches are declared after playing matches, not during init.")
################################################################################
__all__ = ["addPlayer", "getPlayer", "delPlayer", "getKnownPlayers", "getBlizzBotPlayers",
"updatePlayer", "getStaleRecords", "removeStaleRecords"]
|
ttinies/sc2players
|
sc2players/playerManagement.py
|
getPlayer
|
python
|
def getPlayer(name):
if isinstance(name, PlayerRecord): return name
try: return getKnownPlayers()[name.lower()]
except KeyError:
raise ValueError("given player name '%s' is not a known player definition"%(name))
|
obtain a specific PlayerRecord settings file
|
train
|
https://github.com/ttinies/sc2players/blob/fd9b37c268bf1005d9ef73a25e65ed97c8b7895f/sc2players/playerManagement.py#L47-L52
|
[
"def getKnownPlayers(reset=False):\n \"\"\"identify all of the currently defined players\"\"\"\n global playerCache\n if not playerCache or reset:\n jsonFiles = os.path.join(c.PLAYERS_FOLDER, \"*.json\")\n for playerFilepath in glob.glob(jsonFiles):\n filename = os.path.basename(playerFilepath)\n name = re.sub(\"^player_\", \"\", filename)\n name = re.sub(\"\\.json$\", \"\", name)\n player = PlayerRecord(name)\n playerCache[player.name] = player\n return playerCache\n"
] |
"""
PURPOSE: manage records of all known players, both local and remote
"""
from __future__ import absolute_import
from __future__ import division # python 2/3 compatibility
from __future__ import print_function # python 2/3 compatibility
from six import iteritems, itervalues # python 2/3 compatibility
import glob
import os
import re
import time
from sc2players import constants as c
from sc2players.playerRecord import PlayerRecord
from sc2players.playerPreGame import PlayerPreGame
################################################################################
playerCache = {} # mapping of player names to PlayerRecord objects
################################################################################
def addPlayer(settings):
"""define a new PlayerRecord setting and save to disk file"""
_validate(settings)
player = PlayerRecord(settings)
player.save()
getKnownPlayers()[player.name] = player
return player
################################################################################
def updatePlayer(name, settings):
"""update an existing PlayerRecord setting and save to disk file"""
player = delPlayer(name) # remove the existing record
_validate(settings)
player.update(settings)
player.save()
getKnownPlayers()[player.name] = player
return player
################################################################################
################################################################################
def delPlayer(name):
"""forget about a previously defined PlayerRecord setting by deleting its disk file"""
player = getPlayer(name)
try: os.remove(player.filename) # delete from disk
except IOError: pass # shouldn't happen, but don't crash if the disk data doesn't exist
try: del getKnownPlayers()[player.name] # forget object from cache
except: pass
return player # leave it to the caller to process further or allow deallocation
################################################################################
def buildPlayer(name, ptype, cmd='', options={}, difficulty=None, rating=None,
race=None, obs=False, pid=0, raceDefault=c.RANDOM):
newRating = rating or c.DEFAULT_RATING
if not isinstance(difficulty, c.ComputerDifficulties):
newDiff = c.ComputerDifficulties(difficulty)
else: newDiff = difficulty
ret = PlayerRecord(name=name, type=ptype, initCmd=cmd, initOptions=options,
difficulty=newDiff, rating=newRating, raceDefault=raceDefault)
if bool(race or obs or pid):
return PlayerPreGame(ret, selectedRace=race, observe=obs, playerID=pid)
else: return ret
################################################################################
def getKnownPlayers(reset=False):
"""identify all of the currently defined players"""
global playerCache
if not playerCache or reset:
jsonFiles = os.path.join(c.PLAYERS_FOLDER, "*.json")
for playerFilepath in glob.glob(jsonFiles):
filename = os.path.basename(playerFilepath)
name = re.sub("^player_", "", filename)
name = re.sub("\.json$", "", name)
player = PlayerRecord(name)
playerCache[player.name] = player
return playerCache
################################################################################
def getBlizzBotPlayers():
"""identify all of Blizzard's built-in bots"""
ret = {}
for pName,p in iteritems(getKnownPlayers()):
if p.isComputer:
ret[pName] = p
return ret
################################################################################
def getStaleRecords(limit=c.DEFAULT_TIME_LIMIT):
ret = []
now = time.time()
seconds = float(limit) * 24 * 60 * 60 # convert days to seconds
maxNoAct= min(seconds, c.NO_ACTIVITY_LIMIT * 24 * 60 * 60) # convert days to seconds
for player in itervalues(getKnownPlayers()):
if player.matches: # can only determine time since last match if matches exist
sinceLastMatch, match = sorted( # player's last match is the shortest time since now
[(now - m.endTime, m) for m in player.matches])[0]
if sinceLastMatch > seconds:
ret.append(player)
else: # if no matches, verify player's time since creation for sufficient time to play a match
sinceCreation = now - player.created
if sinceCreation > maxNoAct: # players created > 10 days ago without any recorded matches are identifed as stale
ret.append(player)
return ret
################################################################################
def removeStaleRecords(**kwargs):
"""identify all currently stale records and remove them"""
return [delPlayer(record) for record in getStaleRecords(**kwargs)]
################################################################################
def _validate(settings):
if "created" in settings: raise ValueError("parameter 'created' is expected to be automatmically generated.")
if "_matches" in settings: raise ValueError("matches are declared after playing matches, not during init.")
################################################################################
__all__ = ["addPlayer", "getPlayer", "delPlayer", "getKnownPlayers", "getBlizzBotPlayers",
"updatePlayer", "getStaleRecords", "removeStaleRecords"]
|
ttinies/sc2players
|
sc2players/playerManagement.py
|
delPlayer
|
python
|
def delPlayer(name):
player = getPlayer(name)
try: os.remove(player.filename) # delete from disk
except IOError: pass # shouldn't happen, but don't crash if the disk data doesn't exist
try: del getKnownPlayers()[player.name] # forget object from cache
except: pass
return player
|
forget about a previously defined PlayerRecord setting by deleting its disk file
|
train
|
https://github.com/ttinies/sc2players/blob/fd9b37c268bf1005d9ef73a25e65ed97c8b7895f/sc2players/playerManagement.py#L56-L63
|
[
"def getPlayer(name):\n \"\"\"obtain a specific PlayerRecord settings file\"\"\"\n if isinstance(name, PlayerRecord): return name\n try: return getKnownPlayers()[name.lower()]\n except KeyError:\n raise ValueError(\"given player name '%s' is not a known player definition\"%(name))\n",
"def getKnownPlayers(reset=False):\n \"\"\"identify all of the currently defined players\"\"\"\n global playerCache\n if not playerCache or reset:\n jsonFiles = os.path.join(c.PLAYERS_FOLDER, \"*.json\")\n for playerFilepath in glob.glob(jsonFiles):\n filename = os.path.basename(playerFilepath)\n name = re.sub(\"^player_\", \"\", filename)\n name = re.sub(\"\\.json$\", \"\", name)\n player = PlayerRecord(name)\n playerCache[player.name] = player\n return playerCache\n"
] |
"""
PURPOSE: manage records of all known players, both local and remote
"""
from __future__ import absolute_import
from __future__ import division # python 2/3 compatibility
from __future__ import print_function # python 2/3 compatibility
from six import iteritems, itervalues # python 2/3 compatibility
import glob
import os
import re
import time
from sc2players import constants as c
from sc2players.playerRecord import PlayerRecord
from sc2players.playerPreGame import PlayerPreGame
################################################################################
playerCache = {} # mapping of player names to PlayerRecord objects
################################################################################
def addPlayer(settings):
"""define a new PlayerRecord setting and save to disk file"""
_validate(settings)
player = PlayerRecord(settings)
player.save()
getKnownPlayers()[player.name] = player
return player
################################################################################
def updatePlayer(name, settings):
"""update an existing PlayerRecord setting and save to disk file"""
player = delPlayer(name) # remove the existing record
_validate(settings)
player.update(settings)
player.save()
getKnownPlayers()[player.name] = player
return player
################################################################################
def getPlayer(name):
"""obtain a specific PlayerRecord settings file"""
if isinstance(name, PlayerRecord): return name
try: return getKnownPlayers()[name.lower()]
except KeyError:
raise ValueError("given player name '%s' is not a known player definition"%(name))
################################################################################
# leave it to the caller to process further or allow deallocation
################################################################################
def buildPlayer(name, ptype, cmd='', options={}, difficulty=None, rating=None,
race=None, obs=False, pid=0, raceDefault=c.RANDOM):
newRating = rating or c.DEFAULT_RATING
if not isinstance(difficulty, c.ComputerDifficulties):
newDiff = c.ComputerDifficulties(difficulty)
else: newDiff = difficulty
ret = PlayerRecord(name=name, type=ptype, initCmd=cmd, initOptions=options,
difficulty=newDiff, rating=newRating, raceDefault=raceDefault)
if bool(race or obs or pid):
return PlayerPreGame(ret, selectedRace=race, observe=obs, playerID=pid)
else: return ret
################################################################################
def getKnownPlayers(reset=False):
"""identify all of the currently defined players"""
global playerCache
if not playerCache or reset:
jsonFiles = os.path.join(c.PLAYERS_FOLDER, "*.json")
for playerFilepath in glob.glob(jsonFiles):
filename = os.path.basename(playerFilepath)
name = re.sub("^player_", "", filename)
name = re.sub("\.json$", "", name)
player = PlayerRecord(name)
playerCache[player.name] = player
return playerCache
################################################################################
def getBlizzBotPlayers():
"""identify all of Blizzard's built-in bots"""
ret = {}
for pName,p in iteritems(getKnownPlayers()):
if p.isComputer:
ret[pName] = p
return ret
################################################################################
def getStaleRecords(limit=c.DEFAULT_TIME_LIMIT):
ret = []
now = time.time()
seconds = float(limit) * 24 * 60 * 60 # convert days to seconds
maxNoAct= min(seconds, c.NO_ACTIVITY_LIMIT * 24 * 60 * 60) # convert days to seconds
for player in itervalues(getKnownPlayers()):
if player.matches: # can only determine time since last match if matches exist
sinceLastMatch, match = sorted( # player's last match is the shortest time since now
[(now - m.endTime, m) for m in player.matches])[0]
if sinceLastMatch > seconds:
ret.append(player)
else: # if no matches, verify player's time since creation for sufficient time to play a match
sinceCreation = now - player.created
if sinceCreation > maxNoAct: # players created > 10 days ago without any recorded matches are identifed as stale
ret.append(player)
return ret
################################################################################
def removeStaleRecords(**kwargs):
"""identify all currently stale records and remove them"""
return [delPlayer(record) for record in getStaleRecords(**kwargs)]
################################################################################
def _validate(settings):
if "created" in settings: raise ValueError("parameter 'created' is expected to be automatmically generated.")
if "_matches" in settings: raise ValueError("matches are declared after playing matches, not during init.")
################################################################################
__all__ = ["addPlayer", "getPlayer", "delPlayer", "getKnownPlayers", "getBlizzBotPlayers",
"updatePlayer", "getStaleRecords", "removeStaleRecords"]
|
ttinies/sc2players
|
sc2players/playerManagement.py
|
getKnownPlayers
|
python
|
def getKnownPlayers(reset=False):
global playerCache
if not playerCache or reset:
jsonFiles = os.path.join(c.PLAYERS_FOLDER, "*.json")
for playerFilepath in glob.glob(jsonFiles):
filename = os.path.basename(playerFilepath)
name = re.sub("^player_", "", filename)
name = re.sub("\.json$", "", name)
player = PlayerRecord(name)
playerCache[player.name] = player
return playerCache
|
identify all of the currently defined players
|
train
|
https://github.com/ttinies/sc2players/blob/fd9b37c268bf1005d9ef73a25e65ed97c8b7895f/sc2players/playerManagement.py#L81-L92
| null |
"""
PURPOSE: manage records of all known players, both local and remote
"""
from __future__ import absolute_import
from __future__ import division # python 2/3 compatibility
from __future__ import print_function # python 2/3 compatibility
from six import iteritems, itervalues # python 2/3 compatibility
import glob
import os
import re
import time
from sc2players import constants as c
from sc2players.playerRecord import PlayerRecord
from sc2players.playerPreGame import PlayerPreGame
################################################################################
playerCache = {} # mapping of player names to PlayerRecord objects
################################################################################
def addPlayer(settings):
"""define a new PlayerRecord setting and save to disk file"""
_validate(settings)
player = PlayerRecord(settings)
player.save()
getKnownPlayers()[player.name] = player
return player
################################################################################
def updatePlayer(name, settings):
"""update an existing PlayerRecord setting and save to disk file"""
player = delPlayer(name) # remove the existing record
_validate(settings)
player.update(settings)
player.save()
getKnownPlayers()[player.name] = player
return player
################################################################################
def getPlayer(name):
"""obtain a specific PlayerRecord settings file"""
if isinstance(name, PlayerRecord): return name
try: return getKnownPlayers()[name.lower()]
except KeyError:
raise ValueError("given player name '%s' is not a known player definition"%(name))
################################################################################
def delPlayer(name):
"""forget about a previously defined PlayerRecord setting by deleting its disk file"""
player = getPlayer(name)
try: os.remove(player.filename) # delete from disk
except IOError: pass # shouldn't happen, but don't crash if the disk data doesn't exist
try: del getKnownPlayers()[player.name] # forget object from cache
except: pass
return player # leave it to the caller to process further or allow deallocation
################################################################################
def buildPlayer(name, ptype, cmd='', options={}, difficulty=None, rating=None,
race=None, obs=False, pid=0, raceDefault=c.RANDOM):
newRating = rating or c.DEFAULT_RATING
if not isinstance(difficulty, c.ComputerDifficulties):
newDiff = c.ComputerDifficulties(difficulty)
else: newDiff = difficulty
ret = PlayerRecord(name=name, type=ptype, initCmd=cmd, initOptions=options,
difficulty=newDiff, rating=newRating, raceDefault=raceDefault)
if bool(race or obs or pid):
return PlayerPreGame(ret, selectedRace=race, observe=obs, playerID=pid)
else: return ret
################################################################################
################################################################################
def getBlizzBotPlayers():
"""identify all of Blizzard's built-in bots"""
ret = {}
for pName,p in iteritems(getKnownPlayers()):
if p.isComputer:
ret[pName] = p
return ret
################################################################################
def getStaleRecords(limit=c.DEFAULT_TIME_LIMIT):
ret = []
now = time.time()
seconds = float(limit) * 24 * 60 * 60 # convert days to seconds
maxNoAct= min(seconds, c.NO_ACTIVITY_LIMIT * 24 * 60 * 60) # convert days to seconds
for player in itervalues(getKnownPlayers()):
if player.matches: # can only determine time since last match if matches exist
sinceLastMatch, match = sorted( # player's last match is the shortest time since now
[(now - m.endTime, m) for m in player.matches])[0]
if sinceLastMatch > seconds:
ret.append(player)
else: # if no matches, verify player's time since creation for sufficient time to play a match
sinceCreation = now - player.created
if sinceCreation > maxNoAct: # players created > 10 days ago without any recorded matches are identifed as stale
ret.append(player)
return ret
################################################################################
def removeStaleRecords(**kwargs):
"""identify all currently stale records and remove them"""
return [delPlayer(record) for record in getStaleRecords(**kwargs)]
################################################################################
def _validate(settings):
if "created" in settings: raise ValueError("parameter 'created' is expected to be automatmically generated.")
if "_matches" in settings: raise ValueError("matches are declared after playing matches, not during init.")
################################################################################
__all__ = ["addPlayer", "getPlayer", "delPlayer", "getKnownPlayers", "getBlizzBotPlayers",
"updatePlayer", "getStaleRecords", "removeStaleRecords"]
|
ttinies/sc2players
|
sc2players/playerManagement.py
|
getBlizzBotPlayers
|
python
|
def getBlizzBotPlayers():
ret = {}
for pName,p in iteritems(getKnownPlayers()):
if p.isComputer:
ret[pName] = p
return ret
|
identify all of Blizzard's built-in bots
|
train
|
https://github.com/ttinies/sc2players/blob/fd9b37c268bf1005d9ef73a25e65ed97c8b7895f/sc2players/playerManagement.py#L96-L102
|
[
"def getKnownPlayers(reset=False):\n \"\"\"identify all of the currently defined players\"\"\"\n global playerCache\n if not playerCache or reset:\n jsonFiles = os.path.join(c.PLAYERS_FOLDER, \"*.json\")\n for playerFilepath in glob.glob(jsonFiles):\n filename = os.path.basename(playerFilepath)\n name = re.sub(\"^player_\", \"\", filename)\n name = re.sub(\"\\.json$\", \"\", name)\n player = PlayerRecord(name)\n playerCache[player.name] = player\n return playerCache\n"
] |
"""
PURPOSE: manage records of all known players, both local and remote
"""
from __future__ import absolute_import
from __future__ import division # python 2/3 compatibility
from __future__ import print_function # python 2/3 compatibility
from six import iteritems, itervalues # python 2/3 compatibility
import glob
import os
import re
import time
from sc2players import constants as c
from sc2players.playerRecord import PlayerRecord
from sc2players.playerPreGame import PlayerPreGame
################################################################################
playerCache = {} # mapping of player names to PlayerRecord objects
################################################################################
def addPlayer(settings):
"""define a new PlayerRecord setting and save to disk file"""
_validate(settings)
player = PlayerRecord(settings)
player.save()
getKnownPlayers()[player.name] = player
return player
################################################################################
def updatePlayer(name, settings):
"""update an existing PlayerRecord setting and save to disk file"""
player = delPlayer(name) # remove the existing record
_validate(settings)
player.update(settings)
player.save()
getKnownPlayers()[player.name] = player
return player
################################################################################
def getPlayer(name):
"""obtain a specific PlayerRecord settings file"""
if isinstance(name, PlayerRecord): return name
try: return getKnownPlayers()[name.lower()]
except KeyError:
raise ValueError("given player name '%s' is not a known player definition"%(name))
################################################################################
def delPlayer(name):
"""forget about a previously defined PlayerRecord setting by deleting its disk file"""
player = getPlayer(name)
try: os.remove(player.filename) # delete from disk
except IOError: pass # shouldn't happen, but don't crash if the disk data doesn't exist
try: del getKnownPlayers()[player.name] # forget object from cache
except: pass
return player # leave it to the caller to process further or allow deallocation
################################################################################
def buildPlayer(name, ptype, cmd='', options={}, difficulty=None, rating=None,
race=None, obs=False, pid=0, raceDefault=c.RANDOM):
newRating = rating or c.DEFAULT_RATING
if not isinstance(difficulty, c.ComputerDifficulties):
newDiff = c.ComputerDifficulties(difficulty)
else: newDiff = difficulty
ret = PlayerRecord(name=name, type=ptype, initCmd=cmd, initOptions=options,
difficulty=newDiff, rating=newRating, raceDefault=raceDefault)
if bool(race or obs or pid):
return PlayerPreGame(ret, selectedRace=race, observe=obs, playerID=pid)
else: return ret
################################################################################
def getKnownPlayers(reset=False):
"""identify all of the currently defined players"""
global playerCache
if not playerCache or reset:
jsonFiles = os.path.join(c.PLAYERS_FOLDER, "*.json")
for playerFilepath in glob.glob(jsonFiles):
filename = os.path.basename(playerFilepath)
name = re.sub("^player_", "", filename)
name = re.sub("\.json$", "", name)
player = PlayerRecord(name)
playerCache[player.name] = player
return playerCache
################################################################################
################################################################################
def getStaleRecords(limit=c.DEFAULT_TIME_LIMIT):
ret = []
now = time.time()
seconds = float(limit) * 24 * 60 * 60 # convert days to seconds
maxNoAct= min(seconds, c.NO_ACTIVITY_LIMIT * 24 * 60 * 60) # convert days to seconds
for player in itervalues(getKnownPlayers()):
if player.matches: # can only determine time since last match if matches exist
sinceLastMatch, match = sorted( # player's last match is the shortest time since now
[(now - m.endTime, m) for m in player.matches])[0]
if sinceLastMatch > seconds:
ret.append(player)
else: # if no matches, verify player's time since creation for sufficient time to play a match
sinceCreation = now - player.created
if sinceCreation > maxNoAct: # players created > 10 days ago without any recorded matches are identifed as stale
ret.append(player)
return ret
################################################################################
def removeStaleRecords(**kwargs):
"""identify all currently stale records and remove them"""
return [delPlayer(record) for record in getStaleRecords(**kwargs)]
################################################################################
def _validate(settings):
if "created" in settings: raise ValueError("parameter 'created' is expected to be automatmically generated.")
if "_matches" in settings: raise ValueError("matches are declared after playing matches, not during init.")
################################################################################
__all__ = ["addPlayer", "getPlayer", "delPlayer", "getKnownPlayers", "getBlizzBotPlayers",
"updatePlayer", "getStaleRecords", "removeStaleRecords"]
|
mattsolo1/hmmerclust
|
hmmerclust/hmmerclust.py
|
fetch_gbwithparts
|
python
|
def fetch_gbwithparts(list_of_NC_accessions, email, folder):
'''Download genbank files from NCBI using Biopython Entrez efetch.
Args:
list_of_NC_accessions (list): a list of strings, e.g ['NC_015758', 'NC_002695']
email (string): NCBI wants your email
folder (string): Where the gb files download to, generally './genomes/'
'''
from Bio import Entrez
from time import sleep
print 'downloading genomes... please wait'
for item in list_of_NC_accessions:
Entrez.email = email
handle = Entrez.efetch(db="nuccore",
id=item,
retmode='full',
rettype='gbwithparts')
data = handle.read()
if not os.path.exists(folder):
os.makedirs(folder)
with open('%s/%s.gb' % (folder,item), 'w') as textfile:
textfile.write(data)
print 'done downloading %s' % item
sleep(2)
|
Download genbank files from NCBI using Biopython Entrez efetch.
Args:
list_of_NC_accessions (list): a list of strings, e.g ['NC_015758', 'NC_002695']
email (string): NCBI wants your email
folder (string): Where the gb files download to, generally './genomes/'
|
train
|
https://github.com/mattsolo1/hmmerclust/blob/471596043a660097ed8b11430d42118a8fd25798/hmmerclust/hmmerclust.py#L40-L71
| null |
'''
The MIT License (MIT)
Copyright (c) 2015 Matthew Solomonson
Permission is hereby granted, free of charge, to any person obtaining a copy
of this software and associated documentation files (the "Software"), to deal
in the Software without restriction, including without limitation the rights
to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
copies of the Software, and to permit persons to whom the Software is
furnished to do so, subject to the following conditions:
The above copyright notice and this permission notice shall be included in
all copies or substantial portions of the Software.
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
THE SOFTWARE.
'''
from Bio import SeqIO, SearchIO
import subprocess
from multiprocessing import cpu_count
from collections import OrderedDict
from pandas import DataFrame
import pprint
import matplotlib.pyplot as plt
import numpy as np
from matplotlib import pylab
from random import random
import os
processors = cpu_count()
def fetch_gbwithparts(list_of_NC_accessions, email, folder):
'''Download genbank files from NCBI using Biopython Entrez efetch.
Args:
list_of_NC_accessions (list): a list of strings, e.g ['NC_015758', 'NC_002695']
email (string): NCBI wants your email
folder (string): Where the gb files download to, generally './genomes/'
'''
from Bio import Entrez
from time import sleep
print 'downloading genomes... please wait'
for item in list_of_NC_accessions:
Entrez.email = email
handle = Entrez.efetch(db="nuccore",
id=item,
retmode='full',
rettype='gbwithparts')
data = handle.read()
if not os.path.exists(folder):
os.makedirs(folder)
with open('%s/%s.gb' % (folder,item), 'w') as textfile:
textfile.write(data)
print 'done downloading %s' % item
sleep(2)
class OrganismDB(object):
"""The database object of hmmerclust.
On initialization, takes a list of genbank accessions and the directory
in which they are stored, generates a list of Organisms, and creates a
combined_fasta text file which is later queried by HMMER.
Args
database_name (string): e.g. 'my_database'
genome_list (list): ['NC_015758.gb', 'NC_002695.gb']
genome_dir (string): path to genome location
freshfasta (bool): decide if a new combined_fasta file needs to be made
Attributes
database_name (string): see
genome_list (list)
genome_dir (string)
organisms (list): List of organism objects populated by the
make_organisms() function
df (DataFrame): why do we need this again?
rRNA16SDB (rRNA16SDB): an rRNA database object
"""
def __init__(self, database_name, genome_list, genome_dir, freshfasta=False):
self.database_name = database_name
self.genome_list = genome_list
self.genome_dir = genome_dir
if freshfasta==True:
self.generate_combined_fasta(self.genome_list, self.genome_dir)
self.organisms = []
self.df = None
self.make_organisms(self.genome_list, self.genome_dir)
self.rRNA16SDB = rRNA16SDB(self)
def generate_combined_fasta(self, genome_list, genome_dir):
'''Generate a combined fasta using the genbank files.
Args
genome_list (list)
genome_dir (string)
'''
fasta = []
for genome in genome_list:
full_path = genome_dir + genome
handle = open(full_path, "rU")
print 'making combined fasta for', genome
try:
seq_record = SeqIO.read(handle, 'genbank')
org_accession = seq_record.name
except AssertionError,e:
print str(e), genome
for feature in seq_record.features:
if feature.type == 'CDS':
try:
prot_accession = feature.qualifiers['protein_id'][0]
prot_translation = feature.qualifiers['translation'][0]
newfast = '>' + org_accession + ',' + prot_accession + \
'\n' + prot_translation + '\n'
#if newfast not in fasta:
fasta.append(newfast)
except AttributeError,e:
print "organism %s, protein %s did not have \
the right attributes" % (org_accession, prot_accession)
print str(e)
except KeyError,e:
print "organism %s, protein %s did not have \
the right key" % (org_accession, prot_accession)
print str(e)
handle.close()
print "%s proteins were added" % len(fasta)
set_fasta = set(fasta)
print "%s unique proteins were added -- dropping redundant ones" % len(set_fasta)
faastring = "".join(set_fasta)
write_fasta = open('combined_fasta', 'w')
write_fasta.write(faastring)
write_fasta.close()
return set_fasta
def make_organisms(self, genome_list, genome_dir):
'''Organism factory method.
Appends organisms to the organisms list.
Args
genome_list (list)
genome_dir (string)
'''
for genome in genome_list:
genome_path = genome_dir + genome
handle = open(genome_path, "rU")
print 'Adding organism attributes for', genome
try:
seq_record = SeqIO.read(handle, "genbank")
self.organisms.append(Organism(seq_record, genome_path, self))
del(seq_record)
except ValueError,e:
print genome, str(e)
except AssertionError,e:
print genome, str(e)
except UnboundLocalError,e:
print genome, str(e)
handle.close()
def add_protein_to_organisms(self, orgprot_list):
'''
Protein factory method.
Iterates through a list of SearchIO hit objects, matches
the accession against SeqRecord features for each organism.
If there is a match, the new Protein object is created and
stored in the protein list of that Organism.
Args
orgprot_list: a list Biopython SearchIO hit objects (I think).
'''
for org in self.organisms:
handle = open(org.genome_path, "rU")
print 'adding proteins to organism', org.accession
try:
seq_record = SeqIO.read(handle, "genbank")
feature_list = []
for id in orgprot_list:
org_id = id.split(',')[0]
prot_id = id.split(',')[1]
if org.accession == org_id:
for feature in seq_record.features:
if feature.type == 'CDS':
feat_prot_acc = feature.qualifiers['protein_id'][0]
if feat_prot_acc == prot_id:
#print 'appending', hit_prot_acc
org.proteins.append(Protein(feature))
del(seq_record)
except ValueError,e:
print 'error for ', org.accession, str(e)
except AssertionError,e:
print 'error for ', org.accession, str(e)
except UnboundLocalError,e:
print 'error for ', org.accession, str(e)
except KeyError,e:
print 'error for ', org.accession, str(e)
handle.close()
def add_hits_to_proteins(self, hmm_hit_list):
'''Add HMMER results to Protein objects'''
for org in self.organisms:
print "adding SearchIO hit objects for", org.accession
for hit in hmm_hit_list:
hit_org_id = hit.id.split(',')[0]
hit_prot_id = hit.id.split(',')[1]
if org.accession == hit_org_id:
for prot in org.proteins:
if prot.accession == hit_prot_id:
prot.hmm_hit_list.append(hit)
def cluster_number(self, data, maxgap):
'''General function that clusters numbers.
Args
data (list): list of integers.
maxgap (int): max gap between numbers in the cluster.
'''
data.sort()
groups = [[data[0]]]
for x in data[1:]:
if abs(x - groups[-1][-1]) <= maxgap:
groups[-1].append(x)
else:
groups.append([x])
return groups
def find_loci(self, cluster_size, maxgap, locusview=False, colordict=None):
'''
Finds the loci of a given cluster size & maximum gap between cluster members.
Args
cluster_size (int): minimum number of genes in the cluster.
maxgap (int): max basepair gap between genes in the cluster.
Kwargs
locusview (bool): whether or not a map is generated for the locus_parent_organism
colordict (list): pass a pre-made color scheme for identified proteins
'''
if colordict != None:
self.search.protein_arrow_color_dict = colordict
for organism in self.organisms:
print 'finding loci for', organism.name
#reset loci if there is something in there already
organism.loci = []
orghits = []
for protein in organism.proteins:
if len(protein.hmm_hit_list) > 0:
orghits.append((organism.accession, protein.accession,
protein.start_bp, protein.end_bp, protein))
bp_start_pooled = [hit[2] for hit in orghits]
try:
clustered_data = self.cluster_number(bp_start_pooled, maxgap)
significant_cluster_list = []
for cluster in clustered_data:
if len(cluster) > cluster_size:
significant_cluster_list.append(cluster)
#print significant_cluster_list
for cluster in significant_cluster_list:
proteins_in_locus = []
cluster.sort()
for bp_start in cluster:
for hit in orghits:
if bp_start == hit[2]:
proteins_in_locus.append(hit[4])
organism.loci.append(Locus(proteins_in_locus,
organism,
self.search.query_names,
locusview))
except IndexError,e:
print 'Index error', str(e), organism.name
print 'total of', str(len(organism.loci)), 'found for', organism.name
def clear_loci():
'''reset loci in the database.'''
for org in self.organisms:
org.loci=None
class Organism(object):
"""
Encapsulates data related to a single organism.
Args:
seq_record (SeqRecord): Biopython SeqRecord object
genome_path (str): The path to genome
OrganismDB (OrganismDB): the parent organism database
Attributes:
genome_path (str).
parent_db (OrganismDB).
various attributes related to the organism.
tree order (int): the position of organism sorted by a phylo tree.
proteins (list): list of Protein objects associated with the Organism.
loci (list): list of Locus objects associated with the Organism.
"""
def __init__(self, seq_record, genome_path, OrganismDB):
self.genome_path = genome_path
self.parent_db = OrganismDB
self.accesion_version = seq_record.id
self.accession = seq_record.name
self.description = seq_record.description
self.name = seq_record.annotations['source']
self.taxonomy = seq_record.annotations['taxonomy']
self.species = " ".join(self.name.split(" ")[0:2])
try:
self.kingdom = self.taxonomy[0]
self.phylum = self.taxonomy[1]
self.clazz = self.taxonomy[2]
self.order = self.taxonomy[3]
self.family = self.taxonomy[4]
self.genus = self.taxonomy[5]
except:
print 'Unable to parse taxonomy for', self.accession
self.taxonomy = None
self.kingdom = None
self.phylum = None
self.clazz = None
self.order = None
self.family = None
self.genus = None
self.rRNA16S_sequence = None
self.tree_order = 0
self.proteins = []
self.genome_length = len(seq_record.seq)
self.loci = []
class Protein(object):
"""Encapsulates data related to a single protein/gene.
Args
feature(SeqIO feature object)
Attributes
accession info
genome location info
hmm_hit_list (list): Complete list of SearchIO hit objects.
data related to the best-scoring hit
is_in_locus (list): locus the protein/gene belongs to.
"""
def __init__(self, feature):
self.accession = feature.qualifiers['protein_id'][0]
self.gi = feature.qualifiers['db_xref'][0].split(':')[1]
self.product = feature.qualifiers['product'][0]
#self.note = feature.qualifiers['note']
self.start_bp = feature.location.start.position
self.end_bp = feature.location.end.position
self.strand = feature.location.strand
self.translation = feature.qualifiers['translation'][0]
self.numb_residues = len(self.translation)
self.hmm_hit_list = []
self.hit_dataframe = None
self.hit_name_best = 'non-hit'
self.hit_evalue_best = 'non-hit'
self.hit_bitscore_best = 'non-hit'
self.hit_bias_best = 'non-hit'
self.hit_start_best = 'non-hit'
self.hit_end_best = 'non-hit'
self.is_in_locus = None
def __repr__(self):
return "%s - %s" % (self.accession, self.product)
def parse_hmm_hit_list(self, hmm_hit_list):
'''
take a list of hmm hit results, take needed info,
'''
tuplist = []
for hit in hmm_hit_list:
for hsp in hit.hsps:
tup = tuplist.append((hit._query_id.split('_')[0],
hit.bitscore,
hit.evalue,
hsp.bias,
hsp.env_start,
hsp.env_end))
cols = ['name','bitscore','evalue', 'bias', 'hsp_start','hsp_end']
df = DataFrame(tuplist, columns=cols)
df.set_index('name', inplace=True)
return df
class rRNA16SDB:
def __init__(self, OrganismDB):
#self.write_16S_rRNA_fasta(OrganismDB.organisms)
self.import_tree_order_from_file(OrganismDB, '16S_aligned.csv')
def write_16S_rRNA_fasta(self, org_list):
'''
Writes a fasta file containing 16S rRNA sequences
for a list of Organism objects,
The first 16S sequence found in the seq record object is used,
since it looks like there are duplicates
'''
fasta = []
for org in org_list:
handle = open(org.genome_path, "rU")
seq_record = SeqIO.read(handle, "genbank")
for feat in seq_record.features:
if feat.type == 'rRNA':
if '16S ribosomal' in feat.qualifiers['product'][0]:
start = feat.location.start.position
end = feat.location.end.position
if ((end - start) > 1400) & ((end - start) < 1700) :
print 'rRNA sequence extracted for', org.accession
fasta.append('>' + org.accession +
'\n' +
str(feat.extract(seq_record.seq)) +
'\n')
org.rRNA16S_sequence = str(feat.extract(seq_record.seq))
break
faastring = "".join(fasta)
filename = '16S-rRNA.fasta'
write_fasta = open(filename, 'w')
write_fasta.write(faastring)
write_fasta.close()
def import_tree_order_from_file(self, MyOrganismDB, filename):
'''
Import the accession list that has been ordered by position
in a phylogenetic tree. Get the index in the list, and
add this to the Organism object. Later we can use this position
to make a heatmap that matches up to a phylogenetic tree.
'''
tree_order = [acc.strip() for acc in open(filename)]
#print tree_order
for org in MyOrganismDB.organisms:
for tree_accession in tree_order:
#print tree_accession
if org.accession == tree_accession:
org.tree_order = tree_order.index(tree_accession)
class HmmSearch:
"""
Give alignment files, name them according to what the names
should be in the analysis.
First the hmm is built with Hmmbuild, and the hmm files output.
Then run Hmmsearch, parse the files, put each result in a list
"""
def __init__(self, OrganismDB, combined_fasta, freshbuild=True, freshsearch=True, ):
self.alignment_dir = './alignments/'
self.alignment_list = [x for x in os.listdir(self.alignment_dir) if '.txt' in x]
self.query_names = []
self.hmm_dir = './hmm/'
if not os.path.exists(self.hmm_dir):
os.makedirs(self.hmm_dir)
self.combined_fasta = combined_fasta
self.hhsearch_result_folder = './hhsearch_results/'
if not os.path.exists(self.hhsearch_result_folder):
os.makedirs(self.hhsearch_result_folder)
self.hmm_result_list=[]
if freshbuild == True:
self.run_hmmbuild()
if freshsearch == True:
self.run_hmmsearch()
self.combined_hit_list = self.extract_hit_list_from_hmmsearch_results()
self.orgprot_list = list(set([x.id for x in self.combined_hit_list]))
OrganismDB.search = self
self.protein_arrow_color_dict = self.make_protein_arrow_color_dict(self.query_names)
OrganismDB.add_protein_to_organisms(self.orgprot_list)
OrganismDB.add_hits_to_proteins(self.combined_hit_list)
self.parse_proteins(OrganismDB)
self.set_best_hit_values_for_proteins(OrganismDB)
def run_hmmbuild(self):
'''
Generate hmm with hhbuild,
output to file. Also stores query names.
'''
for alignment in self.alignment_list:
print 'building Hmm for', alignment
alignment_full_path = self.alignment_dir + alignment
query_name = alignment.split("_")[0]
self.query_names.append(query_name)
new_hmm= self.hmm_dir + query_name + ".hmm"
hmmbuild_output = subprocess.call(["hmmbuild", new_hmm,
alignment_full_path])
print 'hhbuild complete for', self.query_names
def run_hmmsearch(self):
'''
'''
all_searches = []
for name in self.query_names:
print 'running HHsearch on', name
hmm_full_path = self.hmm_dir + name + '.hmm'
hmmsearch_output = subprocess.check_output(["hmmsearch",
"--cpu", str(processors), hmm_full_path,
self.combined_fasta])
hmm_result_file_name = self.hhsearch_result_folder + name + ".out"
self.hmm_result_list.append((name + ".out"))
f = open(hmm_result_file_name, 'w')
f.write(hmmsearch_output)
f.close()
def extract_hit_list_from_hmmsearch_results(self):
'''
Make a giant list of all the hit objects from
our search
'''
combined_list_of_hits = []
for result in self.hmm_result_list:
fullpath = self.hhsearch_result_folder + result
se = SearchIO.read(fullpath, 'hmmer3-text')
sublist = []
for hit in se:
combined_list_of_hits.append(hit)
sublist.append(hit.id)
print 'extracted', str(len(sublist)), 'hits for', result
return combined_list_of_hits
def make_protein_arrow_color_dict(self, query_names):
'''
Generates a random color for all proteins in query_names,
stores these in a dict.
'''
protein_arrow_color_dict = dict()
for protein in self.query_names:
protein_arrow_color_dict[protein] = (random(), random(), random())
return protein_arrow_color_dict
def make_hsps(self, hit):
hit_name = hit._query_id.split("_")[0]
hit_evalue = hit.evalue
hit_bitscore = hit.bitscore
def parse_proteins(self,OrganismDB):
'''
Iterate through all the proteins in the DB,
creates a hit_dataframe for each protein.
'''
for org in OrganismDB.organisms:
for prot in org.proteins:
if len(prot.hmm_hit_list) > 0:
try:
prot.hit_dataframe = prot.parse_hmm_hit_list(prot.hmm_hit_list)
except ValueError,e:
print 'error for', org.name, prot.accession, str(e)
def set_best_hit_values_for_proteins(self, OrganismDB):
'''
Iterate through all proteins in the DB,
drop duplicates in the hit_dataframe, then store the maximum
hit information as protein attributes.
'''
for org in OrganismDB.organisms:
print 'setting best hit values for', org.name
for prot in org.proteins:
if len(prot.hmm_hit_list) > 0:
try:
dd_df = prot.hit_dataframe.drop_duplicates(subset='bitscore')
try:
prot.hit_name_best = dd_df.bitscore.idxmax()
prot.hit_evalue_best = dd_df.ix[prot.hit_name_best].evalue
prot.hit_bitscore_best = dd_df.ix[prot.hit_name_best].bitscore
prot.hit_bias_best = dd_df.ix[prot.hit_name_best].bias
prot.hit_start_best = dd_df.ix[prot.hit_name_best].hsp_start
prot.hit_end_best = dd_df.ix[prot.hit_name_best].hsp_end
except:
print 'could not set best hit values for ', org.name
except AttributeError:
pass
class Locus:
'''
Accepts list of protein objects, typically clustered proteins
generated by the find_loci() method. The first and last proteinsin in
the locus are defined as boundaries
Also add the Locus back to the OrganismDB
'''
def __init__(self, list_of_protein_objects, organism, query_proteins, locusview):
self.locus_hit_membership = list_of_protein_objects
for prot in self.locus_hit_membership:
prot.is_in_locus = self
self.locus_number_of_hits = len(self.locus_hit_membership)
self.locus_min_hit_boundary = self.locus_hit_membership[0].start_bp
self.locus_max_hit_boundary = self.locus_hit_membership[-1].end_bp
self.locus_bp_size = int(self.locus_hit_membership[-1].end_bp) - \
int(self.locus_hit_membership[0].start_bp)
self.locus_total_membership = self.get_total_membership(organism)
self.locus_number_in_total = len(self.locus_total_membership)
self.query_proteins = query_proteins
self.locus_parent_organism = organism
#print organism.proteome.proteins
if locusview == True:
LocusView(self)
self.write_out_locus_fasta()
def get_total_membership(self, organism):
handle = open(organism.genome_path, "rU")
total_membership_list = list(self.locus_hit_membership)
try:
seq_record = SeqIO.read(handle, "genbank")
for feature in seq_record.features:
if feature.type == 'CDS':
locus_hit_accs = [x.accession for x in self.locus_hit_membership]
if feature.qualifiers['protein_id'][0] not in locus_hit_accs:
featstart = feature.location.start.position
featend = feature.location.end.position
if ((featstart >= self.locus_min_hit_boundary) and
(featend <= self.locus_max_hit_boundary)):
newprot = (Protein(feature))
newprot.is_in_locus = self
total_membership_list.append(newprot)
organism.proteins.append(newprot)
del(seq_record)
except ValueError,e:
print str(e), organism.name
except AssertionError,e:
print str(e), organism.name
except UnboundLocalError,e:
print str(e), organism.name
handle.close()
total_membership_list = list(set(total_membership_list))
total_membership_list.sort(key=lambda x: x.start_bp)
return total_membership_list
def write_out_locus_fasta(self):
fasta=[]
for prot in self.locus_total_membership:
fasta.append('>' + prot.accession +
"," + prot.hit_name_best +
"," + prot.product +
'\n' +prot.translation + '\n')
faastring = "".join(fasta)
if not os.path.exists('./locus_fastas/'):
os.makedirs('./locus_fastas/')
filename = ('./locus_fastas/' +
self.locus_parent_organism.accession +
str(self.locus_min_hit_boundary) + '.fasta')
write_fasta = open(filename, 'w')
write_fasta.write(faastring)
write_fasta.close()
class LocusView:
def __init__(self, Locus, hit_detail_table=False, xlims=None):
self.generate_locus_view(Locus, xlims)
if hit_detail_table==True:
self.show_locus_hit_details(Locus)
def generate_locus_dataframe(self, Locus):
data_tuple_list = []
print '\n'
print '-'*70, '\n','-'*70
print "Organism: ", Locus.locus_parent_organism.name
print 'Locus id:', id(Locus)
for protein in Locus.locus_total_membership:
if len(protein.hmm_hit_list) > 0:
#print protein.hmm_hit_list[0].__dict__
protein_hit_query = protein.hit_name_best
protein_hit_evalue = protein.hit_evalue_best
protein_hit_bitscore = protein.hit_bitscore_best
protein_hit_bias = protein.hit_bias_best
#protein_hsps = protein.hmm_hit_list[0]._items
else:
protein_hit_query = '-'
protein_hit_evalue = '-'
protein_hit_bitscore = '-'
protein_hit_bias = '-'
data_tuple_list.append((protein.accession, protein.product[:22],
protein_hit_query,protein_hit_evalue,protein_hit_bitscore,
protein_hit_bias))
cols = ['accession', 'name', 'query', 'evalue', 'bitscore', 'bias']
df = DataFrame(data_tuple_list, columns=cols)
return df[['accession', 'query', 'evalue', 'bitscore', 'bias', 'name']]
def generate_locus_view(self, Locus, xlims):
#set_option('expand_frame_repr', False)
#figsize(20,5)
df = self.generate_locus_dataframe(Locus)
if xlims != None:
xmin_value = xlims[0]
xmax_value = xlims[1]
else:
xmin_value = Locus.locus_min_hit_boundary - Locus.locus_min_hit_boundary
xmax_value = Locus.locus_max_hit_boundary - Locus.locus_min_hit_boundary
ax = plt.axes()
#plt.figure(num=None, figsize=(20,5))
for n, protein in enumerate(Locus.locus_total_membership):
colordict = Locus.locus_parent_organism.parent_db.search.protein_arrow_color_dict
if protein.strand == 1:
arrow_start = protein.start_bp - Locus.locus_min_hit_boundary
arrow_end = protein.end_bp - protein.start_bp
else:
arrow_start = protein.end_bp - Locus.locus_min_hit_boundary
arrow_end = protein.start_bp - protein.end_bp
if len(protein.hmm_hit_list) != 0:
protname = protein.hit_name_best
arrow_color = colordict[protname]
else:
arrow_color = '#EEE7EB'
ax.arrow(arrow_start, 100, arrow_end, 0, head_width=30, width=20,
color=arrow_color, head_length=200, length_includes_head=True,
fill=True, ec="black", lw=1)
if len(protein.hmm_hit_list) > 0:
hitdf = protein.hit_dataframe.reset_index()
ystart = 80 if (n % 2 == 0) else 120
for i in range(0, len(hitdf)):
if protein.strand == 1:
line_start = protein.start_bp - Locus.locus_min_hit_boundary + hitdf.ix[i].hsp_start*3
line_end = protein.end_bp - Locus.locus_min_hit_boundary - (protein.numb_residues - hitdf.ix[i].hsp_end)*3
else:
line_start = protein.start_bp - Locus.locus_min_hit_boundary + (protein.numb_residues - hitdf.ix[i].hsp_end)*3
line_end = protein.end_bp - Locus.locus_min_hit_boundary - hitdf.ix[i].hsp_start*3
plt.plot([line_start, line_end], [ystart, ystart], 'k-', lw=2)
xtextpos = line_start
if (n % 2 == 0):
ytextpos = ystart - 8
else:
ytextpos = ystart+2
label = str(i) + " " + hitdf.ix[i]['name']# + " " + str(hitdf.ix[i]['evalue'])[:3]
plt.annotate(label, xy=(xtextpos, ytextpos))
if (n % 2 == 0):
ystart -= 10
else:
ystart +=10
plt.axis('on')
pylab.ylim([0,200])
pylab.xlim([xmin_value, xmax_value])
#savefig('image.svg', dpi=300, format='pdf')
plt.show()
print df
def show_locus_hit_details(self, Locus):
for hit in Locus.locus_hit_membership:
try:
print hit.hit_dataframe.sort(columns='bitscore', ascending=False)
except AttributeError,e:
print str(e), 'attribute error for ', hit
class FinalDataFrame:
'''
Package all data into pandas.DataFrame
'''
def __init__(self,OrganismDB):
self.df = self.make_df(OrganismDB)
def make_df(self, OrganismDB):
list_of_hit_dicts = []
for i in range(0, len(OrganismDB.organisms)):
organism = OrganismDB.organisms[i]
for j in range(0, len(organism.proteins)):
protein = organism.proteins[j]
if len(protein.hmm_hit_list) != 0:
hit_dict = OrderedDict()
hit_dict['org_name'] = organism.name
hit_dict['org_acc'] = organism.accession
hit_dict['org_phylum'] = organism.phylum
hit_dict['org_class'] = organism.clazz
hit_dict['org_order'] = organism.order
hit_dict['org_family'] = organism.family
hit_dict['org_genus'] = organism.genus
hit_dict['org_species'] = organism.species
hit_dict['org_tree_order'] = organism.tree_order
hit_dict['org_genome_length'] = organism.genome_length
hit_dict['org_prot_count'] = len(organism.proteins)
hit_dict['org_numb_loci'] = len(organism.loci)
hit_dict['prot_acc'] = protein.accession
hit_dict['prot_gi'] = protein.gi
hit_dict['prot_product'] = protein.product
hit_dict['prot_translation'] = protein.translation
hit_dict['prot_numb_of_res'] = protein.numb_residues
hit_dict['hit_query'] = protein.hit_name_best
hit_dict['hit_evalue'] = protein.hit_evalue_best
hit_dict['hit_bitscore'] = protein.hit_bitscore_best
hit_dict['hit_bias'] = protein.hit_bias_best
hit_dict['locus_id'] = protein.is_in_locus
list_of_hit_dicts.append(hit_dict)
df = DataFrame(list_of_hit_dicts)
print df.index
cols = ['org_name',
'org_acc',
'org_phylum',
'org_class',
'org_order',
'org_family',
'org_genus',
'org_species',
'org_tree_order',
'org_genome_length',
'org_prot_count',
'org_numb_loci',
'prot_acc',
'prot_gi',
'prot_product',
'prot_translation',
'prot_numb_of_res',
'hit_query',
'hit_evalue',
'hit_bitscore',
'hit_bias',
'locus_id']
df = df[cols]
return df
class HeatMap:
def __init__(self, DataFrame, by_locus=False, cols=None, subset=None, singleletters=None):
self.unstacked_df = self.unstack_df(DataFrame, by_locus, cols, subset)
self.heatmap = self.make_heatmap(self.unstacked_df, singleletters)
def unstack_df(self, DataFrame, by_locus, cols, subset):
if by_locus == True:
colheads = ['org_species', 'locus_id', 'org_tree_order', 'hit_query']
else:
colheads = ['org_species', 'org_tree_order', 'hit_query']
unstacked_df = DataFrame.groupby(colheads).size().unstack()
if subset != None:
unstacked_df = unstacked_df.dropna(subset=subset)
unstacked_df = unstacked_df.fillna(0).sortlevel('org_tree_order', ascending=False)
if cols != None:
unstacked_df=unstacked_df[cols]
return unstacked_df
def make_heatmap(self, unstacked_df, singleletters):
fig, ax = plt.subplots(num=None, figsize=(10,len(unstacked_df)/3), dpi=80, facecolor='w', edgecolor='k')
#heatmap = ax.pcolor(unstacked_df, cmap=plt.cm.Reds, alpha=2, vmax = 5)
#heatmap = ax.pcolor(unstacked_df, cmap=plt.cm.gist_ncar_r, alpha=20, vmax = 20)
#heatmap = ax.pcolor(unstacked_df, cmap=plt.cm.YlGnBu, alpha=20, vmax = 2)
heatmap = ax.pcolor(unstacked_df, cmap=plt.cm.jet, alpha=10, vmax = 5)
# ax.set_title('140616 - Esp distribution in actinobacteria')
#cb = plt.colorbar(heatmap)
#cb.set_label('# of copies')
species_names_only = ['%s locus:%s' % (x[0],str(x[1])[-12:]) for x in unstacked_df.index.values]
ax.set_aspect('equal')
ax.yaxis.set_ticks(range(0, len(unstacked_df.values)))
ax.xaxis.set_ticks(range(0, len(unstacked_df.columns)))
ax.set_xticklabels(unstacked_df.columns, rotation='90')
ax.set_yticklabels(species_names_only)
#ax.set_yticklabels(unstacked_df.index.values)
ax.tick_params(axis='both', left='off', right='off', bottom='off', top='off')
#ax.set_xticks(np.range(data.shape[0])+0.5, minor=False)
#ax.set_yticks(np.range(data.shape[1])+0.5, minor=False)
#ax.invert_yaxis()
#ax.xaxis.tick_top()
plt.grid(True, color='black', ls='-', linewidth=0.5)
'''exerimental: displaying text on the heatmap'''
if singleletters != None:
for y in range(unstacked_df.values.shape[0]):
for x in range(unstacked_df.values.shape[1]):
plt.text(x + 0.5, y + 0.5, '%.4s' % singleletters[(x)],
horizontalalignment='center',
verticalalignment='center',
)
plt.savefig("out.svg")
plt.show()
#print species_names_only
def make_heatmap_text(self, unstacked_df):
fig, ax = plt.subplots(num=None, figsize=(10,len(unstacked_df)/3), dpi=80, facecolor='w', edgecolor='k')
#heatmap = ax.pcolor(unstacked_df, cmap=plt.cm.Reds, alpha=2, vmax = 5)
#heatmap = ax.pcolor(unstacked_df, cmap=plt.cm.gist_ncar_r, alpha=20, vmax = 20)
#heatmap = ax.pcolor(unstacked_df, cmap=plt.cm.YlGnBu, alpha=20, vmax = 2)
heatmap = ax.pcolor(unstacked_df, cmap=plt.cm.jet, alpha=10, vmax = 5)
# ax.set_title('140616 - Esp distribution in actinobacteria')
#cb = plt.colorbar(heatmap)
#cb.set_label('# of copies')
species_names_only = ['%s locus:%s' % (x[0],str(x[1])[-12:]) for x in unstacked_df.index.values]
ax.set_aspect('equal')
ax.yaxis.set_ticks(range(0, len(unstacked_df.values)))
ax.xaxis.set_ticks(range(0, len(unstacked_df.columns)))
ax.set_xticklabels(unstacked_df.columns, rotation='90')
ax.set_yticklabels(species_names_only)
#ax.set_yticklabels(unstacked_df.index.values)
ax.tick_params(axis='both', left='off', right='off', bottom='off', top='off')
#ax.set_xticks(np.range(data.shape[0])+0.5, minor=False)
#ax.set_yticks(np.range(data.shape[1])+0.5, minor=False)
#ax.invert_yaxis()
#ax.xaxis.tick_top()
plt.grid(True, color='black', ls='-', linewidth=0.5)
'''exerimental: displaying text on the heatmap'''
for y in range(unstacked_df.values.shape[0]):
for x in range(unstacked_df.values.shape[1]):
plt.text(x + 0.5, y + 0.5, '%.4s' % 'A',
horizontalalignment='center',
verticalalignment='center',
)
plt.show()
#print species_names_only
class RelatedProteinGroup:
'''
An object representing a group of related proteins
to be used for generating alignments, phylogeny, etc.
Input is a list of Protein objects, e.g. of the same type that were
identified in the Hmm search & where found in a cluster.
Can output a fasta file for each group for making alignments & trees
'''
def __init__(self, input_df):
self.make_related_protein_fasta_from_dataframe(input_df)
def make_related_protein_fasta_from_dataframe(self, input_df):
'''
DataFrame should have
'''
dirname = './group_fastas'
if not os.path.exists(dirname):
os.makedirs(dirname)
unique_hit_queries = set(input_df.hit_query)
for hq in unique_hit_queries:
fasta = []
subdf = input_df[input_df.hit_query==hq].reset_index()
for i in range(0, len(subdf)):
fasta.append('>' + subdf.ix[i].org_name.replace(" ", "-") +
"," + subdf.ix[i].hit_query +
"," + subdf.ix[i].prot_acc +
'\n' + subdf.ix[i].prot_translation + '\n')
faastring = "".join(fasta)
filename = './group_fastas/' + hq + '.fasta'
write_fasta = open(filename, 'w')
write_fasta.write(faastring)
write_fasta.close()
'''
def make_16S(OrganismDB):
for org in OrganismDB.genome_list:
hmmbuild_output = subprocess.call(["hmmbuild", './16S_rRNA/16S_rRNA.hmm',
'./16S_rRNA/16S_rRNA_alignment.fasta'])
hmmsearch_output = subprocess.check_output(["hmmsearch",
"--cpu", str(processors), './16S_rRNA/16S_rRNA.hmm',
'combined_fasta'])
f = open('./16S_rRNA/16S_rRNA_result.out', 'w')
f.write(hmmsearch_output)
f.close ()
'''
|
mattsolo1/hmmerclust
|
hmmerclust/hmmerclust.py
|
OrganismDB.generate_combined_fasta
|
python
|
def generate_combined_fasta(self, genome_list, genome_dir):
'''Generate a combined fasta using the genbank files.
Args
genome_list (list)
genome_dir (string)
'''
fasta = []
for genome in genome_list:
full_path = genome_dir + genome
handle = open(full_path, "rU")
print 'making combined fasta for', genome
try:
seq_record = SeqIO.read(handle, 'genbank')
org_accession = seq_record.name
except AssertionError,e:
print str(e), genome
for feature in seq_record.features:
if feature.type == 'CDS':
try:
prot_accession = feature.qualifiers['protein_id'][0]
prot_translation = feature.qualifiers['translation'][0]
newfast = '>' + org_accession + ',' + prot_accession + \
'\n' + prot_translation + '\n'
#if newfast not in fasta:
fasta.append(newfast)
except AttributeError,e:
print "organism %s, protein %s did not have \
the right attributes" % (org_accession, prot_accession)
print str(e)
except KeyError,e:
print "organism %s, protein %s did not have \
the right key" % (org_accession, prot_accession)
print str(e)
handle.close()
print "%s proteins were added" % len(fasta)
set_fasta = set(fasta)
print "%s unique proteins were added -- dropping redundant ones" % len(set_fasta)
faastring = "".join(set_fasta)
write_fasta = open('combined_fasta', 'w')
write_fasta.write(faastring)
write_fasta.close()
return set_fasta
|
Generate a combined fasta using the genbank files.
Args
genome_list (list)
genome_dir (string)
|
train
|
https://github.com/mattsolo1/hmmerclust/blob/471596043a660097ed8b11430d42118a8fd25798/hmmerclust/hmmerclust.py#L113-L171
| null |
class OrganismDB(object):
"""The database object of hmmerclust.
On initialization, takes a list of genbank accessions and the directory
in which they are stored, generates a list of Organisms, and creates a
combined_fasta text file which is later queried by HMMER.
Args
database_name (string): e.g. 'my_database'
genome_list (list): ['NC_015758.gb', 'NC_002695.gb']
genome_dir (string): path to genome location
freshfasta (bool): decide if a new combined_fasta file needs to be made
Attributes
database_name (string): see
genome_list (list)
genome_dir (string)
organisms (list): List of organism objects populated by the
make_organisms() function
df (DataFrame): why do we need this again?
rRNA16SDB (rRNA16SDB): an rRNA database object
"""
def __init__(self, database_name, genome_list, genome_dir, freshfasta=False):
self.database_name = database_name
self.genome_list = genome_list
self.genome_dir = genome_dir
if freshfasta==True:
self.generate_combined_fasta(self.genome_list, self.genome_dir)
self.organisms = []
self.df = None
self.make_organisms(self.genome_list, self.genome_dir)
self.rRNA16SDB = rRNA16SDB(self)
def generate_combined_fasta(self, genome_list, genome_dir):
'''Generate a combined fasta using the genbank files.
Args
genome_list (list)
genome_dir (string)
'''
fasta = []
for genome in genome_list:
full_path = genome_dir + genome
handle = open(full_path, "rU")
print 'making combined fasta for', genome
try:
seq_record = SeqIO.read(handle, 'genbank')
org_accession = seq_record.name
except AssertionError,e:
print str(e), genome
for feature in seq_record.features:
if feature.type == 'CDS':
try:
prot_accession = feature.qualifiers['protein_id'][0]
prot_translation = feature.qualifiers['translation'][0]
newfast = '>' + org_accession + ',' + prot_accession + \
'\n' + prot_translation + '\n'
#if newfast not in fasta:
fasta.append(newfast)
except AttributeError,e:
print "organism %s, protein %s did not have \
the right attributes" % (org_accession, prot_accession)
print str(e)
except KeyError,e:
print "organism %s, protein %s did not have \
the right key" % (org_accession, prot_accession)
print str(e)
handle.close()
print "%s proteins were added" % len(fasta)
set_fasta = set(fasta)
print "%s unique proteins were added -- dropping redundant ones" % len(set_fasta)
faastring = "".join(set_fasta)
write_fasta = open('combined_fasta', 'w')
write_fasta.write(faastring)
write_fasta.close()
return set_fasta
def make_organisms(self, genome_list, genome_dir):
'''Organism factory method.
Appends organisms to the organisms list.
Args
genome_list (list)
genome_dir (string)
'''
for genome in genome_list:
genome_path = genome_dir + genome
handle = open(genome_path, "rU")
print 'Adding organism attributes for', genome
try:
seq_record = SeqIO.read(handle, "genbank")
self.organisms.append(Organism(seq_record, genome_path, self))
del(seq_record)
except ValueError,e:
print genome, str(e)
except AssertionError,e:
print genome, str(e)
except UnboundLocalError,e:
print genome, str(e)
handle.close()
def add_protein_to_organisms(self, orgprot_list):
'''
Protein factory method.
Iterates through a list of SearchIO hit objects, matches
the accession against SeqRecord features for each organism.
If there is a match, the new Protein object is created and
stored in the protein list of that Organism.
Args
orgprot_list: a list Biopython SearchIO hit objects (I think).
'''
for org in self.organisms:
handle = open(org.genome_path, "rU")
print 'adding proteins to organism', org.accession
try:
seq_record = SeqIO.read(handle, "genbank")
feature_list = []
for id in orgprot_list:
org_id = id.split(',')[0]
prot_id = id.split(',')[1]
if org.accession == org_id:
for feature in seq_record.features:
if feature.type == 'CDS':
feat_prot_acc = feature.qualifiers['protein_id'][0]
if feat_prot_acc == prot_id:
#print 'appending', hit_prot_acc
org.proteins.append(Protein(feature))
del(seq_record)
except ValueError,e:
print 'error for ', org.accession, str(e)
except AssertionError,e:
print 'error for ', org.accession, str(e)
except UnboundLocalError,e:
print 'error for ', org.accession, str(e)
except KeyError,e:
print 'error for ', org.accession, str(e)
handle.close()
def add_hits_to_proteins(self, hmm_hit_list):
'''Add HMMER results to Protein objects'''
for org in self.organisms:
print "adding SearchIO hit objects for", org.accession
for hit in hmm_hit_list:
hit_org_id = hit.id.split(',')[0]
hit_prot_id = hit.id.split(',')[1]
if org.accession == hit_org_id:
for prot in org.proteins:
if prot.accession == hit_prot_id:
prot.hmm_hit_list.append(hit)
def cluster_number(self, data, maxgap):
'''General function that clusters numbers.
Args
data (list): list of integers.
maxgap (int): max gap between numbers in the cluster.
'''
data.sort()
groups = [[data[0]]]
for x in data[1:]:
if abs(x - groups[-1][-1]) <= maxgap:
groups[-1].append(x)
else:
groups.append([x])
return groups
def find_loci(self, cluster_size, maxgap, locusview=False, colordict=None):
'''
Finds the loci of a given cluster size & maximum gap between cluster members.
Args
cluster_size (int): minimum number of genes in the cluster.
maxgap (int): max basepair gap between genes in the cluster.
Kwargs
locusview (bool): whether or not a map is generated for the locus_parent_organism
colordict (list): pass a pre-made color scheme for identified proteins
'''
if colordict != None:
self.search.protein_arrow_color_dict = colordict
for organism in self.organisms:
print 'finding loci for', organism.name
#reset loci if there is something in there already
organism.loci = []
orghits = []
for protein in organism.proteins:
if len(protein.hmm_hit_list) > 0:
orghits.append((organism.accession, protein.accession,
protein.start_bp, protein.end_bp, protein))
bp_start_pooled = [hit[2] for hit in orghits]
try:
clustered_data = self.cluster_number(bp_start_pooled, maxgap)
significant_cluster_list = []
for cluster in clustered_data:
if len(cluster) > cluster_size:
significant_cluster_list.append(cluster)
#print significant_cluster_list
for cluster in significant_cluster_list:
proteins_in_locus = []
cluster.sort()
for bp_start in cluster:
for hit in orghits:
if bp_start == hit[2]:
proteins_in_locus.append(hit[4])
organism.loci.append(Locus(proteins_in_locus,
organism,
self.search.query_names,
locusview))
except IndexError,e:
print 'Index error', str(e), organism.name
print 'total of', str(len(organism.loci)), 'found for', organism.name
def clear_loci():
'''reset loci in the database.'''
for org in self.organisms:
org.loci=None
|
mattsolo1/hmmerclust
|
hmmerclust/hmmerclust.py
|
OrganismDB.make_organisms
|
python
|
def make_organisms(self, genome_list, genome_dir):
'''Organism factory method.
Appends organisms to the organisms list.
Args
genome_list (list)
genome_dir (string)
'''
for genome in genome_list:
genome_path = genome_dir + genome
handle = open(genome_path, "rU")
print 'Adding organism attributes for', genome
try:
seq_record = SeqIO.read(handle, "genbank")
self.organisms.append(Organism(seq_record, genome_path, self))
del(seq_record)
except ValueError,e:
print genome, str(e)
except AssertionError,e:
print genome, str(e)
except UnboundLocalError,e:
print genome, str(e)
handle.close()
|
Organism factory method.
Appends organisms to the organisms list.
Args
genome_list (list)
genome_dir (string)
|
train
|
https://github.com/mattsolo1/hmmerclust/blob/471596043a660097ed8b11430d42118a8fd25798/hmmerclust/hmmerclust.py#L173-L207
| null |
class OrganismDB(object):
"""The database object of hmmerclust.
On initialization, takes a list of genbank accessions and the directory
in which they are stored, generates a list of Organisms, and creates a
combined_fasta text file which is later queried by HMMER.
Args
database_name (string): e.g. 'my_database'
genome_list (list): ['NC_015758.gb', 'NC_002695.gb']
genome_dir (string): path to genome location
freshfasta (bool): decide if a new combined_fasta file needs to be made
Attributes
database_name (string): see
genome_list (list)
genome_dir (string)
organisms (list): List of organism objects populated by the
make_organisms() function
df (DataFrame): why do we need this again?
rRNA16SDB (rRNA16SDB): an rRNA database object
"""
def __init__(self, database_name, genome_list, genome_dir, freshfasta=False):
self.database_name = database_name
self.genome_list = genome_list
self.genome_dir = genome_dir
if freshfasta==True:
self.generate_combined_fasta(self.genome_list, self.genome_dir)
self.organisms = []
self.df = None
self.make_organisms(self.genome_list, self.genome_dir)
self.rRNA16SDB = rRNA16SDB(self)
def generate_combined_fasta(self, genome_list, genome_dir):
'''Generate a combined fasta using the genbank files.
Args
genome_list (list)
genome_dir (string)
'''
fasta = []
for genome in genome_list:
full_path = genome_dir + genome
handle = open(full_path, "rU")
print 'making combined fasta for', genome
try:
seq_record = SeqIO.read(handle, 'genbank')
org_accession = seq_record.name
except AssertionError,e:
print str(e), genome
for feature in seq_record.features:
if feature.type == 'CDS':
try:
prot_accession = feature.qualifiers['protein_id'][0]
prot_translation = feature.qualifiers['translation'][0]
newfast = '>' + org_accession + ',' + prot_accession + \
'\n' + prot_translation + '\n'
#if newfast not in fasta:
fasta.append(newfast)
except AttributeError,e:
print "organism %s, protein %s did not have \
the right attributes" % (org_accession, prot_accession)
print str(e)
except KeyError,e:
print "organism %s, protein %s did not have \
the right key" % (org_accession, prot_accession)
print str(e)
handle.close()
print "%s proteins were added" % len(fasta)
set_fasta = set(fasta)
print "%s unique proteins were added -- dropping redundant ones" % len(set_fasta)
faastring = "".join(set_fasta)
write_fasta = open('combined_fasta', 'w')
write_fasta.write(faastring)
write_fasta.close()
return set_fasta
def add_protein_to_organisms(self, orgprot_list):
'''
Protein factory method.
Iterates through a list of SearchIO hit objects, matches
the accession against SeqRecord features for each organism.
If there is a match, the new Protein object is created and
stored in the protein list of that Organism.
Args
orgprot_list: a list Biopython SearchIO hit objects (I think).
'''
for org in self.organisms:
handle = open(org.genome_path, "rU")
print 'adding proteins to organism', org.accession
try:
seq_record = SeqIO.read(handle, "genbank")
feature_list = []
for id in orgprot_list:
org_id = id.split(',')[0]
prot_id = id.split(',')[1]
if org.accession == org_id:
for feature in seq_record.features:
if feature.type == 'CDS':
feat_prot_acc = feature.qualifiers['protein_id'][0]
if feat_prot_acc == prot_id:
#print 'appending', hit_prot_acc
org.proteins.append(Protein(feature))
del(seq_record)
except ValueError,e:
print 'error for ', org.accession, str(e)
except AssertionError,e:
print 'error for ', org.accession, str(e)
except UnboundLocalError,e:
print 'error for ', org.accession, str(e)
except KeyError,e:
print 'error for ', org.accession, str(e)
handle.close()
def add_hits_to_proteins(self, hmm_hit_list):
'''Add HMMER results to Protein objects'''
for org in self.organisms:
print "adding SearchIO hit objects for", org.accession
for hit in hmm_hit_list:
hit_org_id = hit.id.split(',')[0]
hit_prot_id = hit.id.split(',')[1]
if org.accession == hit_org_id:
for prot in org.proteins:
if prot.accession == hit_prot_id:
prot.hmm_hit_list.append(hit)
def cluster_number(self, data, maxgap):
'''General function that clusters numbers.
Args
data (list): list of integers.
maxgap (int): max gap between numbers in the cluster.
'''
data.sort()
groups = [[data[0]]]
for x in data[1:]:
if abs(x - groups[-1][-1]) <= maxgap:
groups[-1].append(x)
else:
groups.append([x])
return groups
def find_loci(self, cluster_size, maxgap, locusview=False, colordict=None):
'''
Finds the loci of a given cluster size & maximum gap between cluster members.
Args
cluster_size (int): minimum number of genes in the cluster.
maxgap (int): max basepair gap between genes in the cluster.
Kwargs
locusview (bool): whether or not a map is generated for the locus_parent_organism
colordict (list): pass a pre-made color scheme for identified proteins
'''
if colordict != None:
self.search.protein_arrow_color_dict = colordict
for organism in self.organisms:
print 'finding loci for', organism.name
#reset loci if there is something in there already
organism.loci = []
orghits = []
for protein in organism.proteins:
if len(protein.hmm_hit_list) > 0:
orghits.append((organism.accession, protein.accession,
protein.start_bp, protein.end_bp, protein))
bp_start_pooled = [hit[2] for hit in orghits]
try:
clustered_data = self.cluster_number(bp_start_pooled, maxgap)
significant_cluster_list = []
for cluster in clustered_data:
if len(cluster) > cluster_size:
significant_cluster_list.append(cluster)
#print significant_cluster_list
for cluster in significant_cluster_list:
proteins_in_locus = []
cluster.sort()
for bp_start in cluster:
for hit in orghits:
if bp_start == hit[2]:
proteins_in_locus.append(hit[4])
organism.loci.append(Locus(proteins_in_locus,
organism,
self.search.query_names,
locusview))
except IndexError,e:
print 'Index error', str(e), organism.name
print 'total of', str(len(organism.loci)), 'found for', organism.name
def clear_loci():
'''reset loci in the database.'''
for org in self.organisms:
org.loci=None
|
mattsolo1/hmmerclust
|
hmmerclust/hmmerclust.py
|
OrganismDB.add_protein_to_organisms
|
python
|
def add_protein_to_organisms(self, orgprot_list):
'''
Protein factory method.
Iterates through a list of SearchIO hit objects, matches
the accession against SeqRecord features for each organism.
If there is a match, the new Protein object is created and
stored in the protein list of that Organism.
Args
orgprot_list: a list Biopython SearchIO hit objects (I think).
'''
for org in self.organisms:
handle = open(org.genome_path, "rU")
print 'adding proteins to organism', org.accession
try:
seq_record = SeqIO.read(handle, "genbank")
feature_list = []
for id in orgprot_list:
org_id = id.split(',')[0]
prot_id = id.split(',')[1]
if org.accession == org_id:
for feature in seq_record.features:
if feature.type == 'CDS':
feat_prot_acc = feature.qualifiers['protein_id'][0]
if feat_prot_acc == prot_id:
#print 'appending', hit_prot_acc
org.proteins.append(Protein(feature))
del(seq_record)
except ValueError,e:
print 'error for ', org.accession, str(e)
except AssertionError,e:
print 'error for ', org.accession, str(e)
except UnboundLocalError,e:
print 'error for ', org.accession, str(e)
except KeyError,e:
print 'error for ', org.accession, str(e)
handle.close()
|
Protein factory method.
Iterates through a list of SearchIO hit objects, matches
the accession against SeqRecord features for each organism.
If there is a match, the new Protein object is created and
stored in the protein list of that Organism.
Args
orgprot_list: a list Biopython SearchIO hit objects (I think).
|
train
|
https://github.com/mattsolo1/hmmerclust/blob/471596043a660097ed8b11430d42118a8fd25798/hmmerclust/hmmerclust.py#L209-L259
| null |
class OrganismDB(object):
"""The database object of hmmerclust.
On initialization, takes a list of genbank accessions and the directory
in which they are stored, generates a list of Organisms, and creates a
combined_fasta text file which is later queried by HMMER.
Args
database_name (string): e.g. 'my_database'
genome_list (list): ['NC_015758.gb', 'NC_002695.gb']
genome_dir (string): path to genome location
freshfasta (bool): decide if a new combined_fasta file needs to be made
Attributes
database_name (string): see
genome_list (list)
genome_dir (string)
organisms (list): List of organism objects populated by the
make_organisms() function
df (DataFrame): why do we need this again?
rRNA16SDB (rRNA16SDB): an rRNA database object
"""
def __init__(self, database_name, genome_list, genome_dir, freshfasta=False):
self.database_name = database_name
self.genome_list = genome_list
self.genome_dir = genome_dir
if freshfasta==True:
self.generate_combined_fasta(self.genome_list, self.genome_dir)
self.organisms = []
self.df = None
self.make_organisms(self.genome_list, self.genome_dir)
self.rRNA16SDB = rRNA16SDB(self)
def generate_combined_fasta(self, genome_list, genome_dir):
'''Generate a combined fasta using the genbank files.
Args
genome_list (list)
genome_dir (string)
'''
fasta = []
for genome in genome_list:
full_path = genome_dir + genome
handle = open(full_path, "rU")
print 'making combined fasta for', genome
try:
seq_record = SeqIO.read(handle, 'genbank')
org_accession = seq_record.name
except AssertionError,e:
print str(e), genome
for feature in seq_record.features:
if feature.type == 'CDS':
try:
prot_accession = feature.qualifiers['protein_id'][0]
prot_translation = feature.qualifiers['translation'][0]
newfast = '>' + org_accession + ',' + prot_accession + \
'\n' + prot_translation + '\n'
#if newfast not in fasta:
fasta.append(newfast)
except AttributeError,e:
print "organism %s, protein %s did not have \
the right attributes" % (org_accession, prot_accession)
print str(e)
except KeyError,e:
print "organism %s, protein %s did not have \
the right key" % (org_accession, prot_accession)
print str(e)
handle.close()
print "%s proteins were added" % len(fasta)
set_fasta = set(fasta)
print "%s unique proteins were added -- dropping redundant ones" % len(set_fasta)
faastring = "".join(set_fasta)
write_fasta = open('combined_fasta', 'w')
write_fasta.write(faastring)
write_fasta.close()
return set_fasta
def make_organisms(self, genome_list, genome_dir):
'''Organism factory method.
Appends organisms to the organisms list.
Args
genome_list (list)
genome_dir (string)
'''
for genome in genome_list:
genome_path = genome_dir + genome
handle = open(genome_path, "rU")
print 'Adding organism attributes for', genome
try:
seq_record = SeqIO.read(handle, "genbank")
self.organisms.append(Organism(seq_record, genome_path, self))
del(seq_record)
except ValueError,e:
print genome, str(e)
except AssertionError,e:
print genome, str(e)
except UnboundLocalError,e:
print genome, str(e)
handle.close()
def add_hits_to_proteins(self, hmm_hit_list):
'''Add HMMER results to Protein objects'''
for org in self.organisms:
print "adding SearchIO hit objects for", org.accession
for hit in hmm_hit_list:
hit_org_id = hit.id.split(',')[0]
hit_prot_id = hit.id.split(',')[1]
if org.accession == hit_org_id:
for prot in org.proteins:
if prot.accession == hit_prot_id:
prot.hmm_hit_list.append(hit)
def cluster_number(self, data, maxgap):
'''General function that clusters numbers.
Args
data (list): list of integers.
maxgap (int): max gap between numbers in the cluster.
'''
data.sort()
groups = [[data[0]]]
for x in data[1:]:
if abs(x - groups[-1][-1]) <= maxgap:
groups[-1].append(x)
else:
groups.append([x])
return groups
def find_loci(self, cluster_size, maxgap, locusview=False, colordict=None):
'''
Finds the loci of a given cluster size & maximum gap between cluster members.
Args
cluster_size (int): minimum number of genes in the cluster.
maxgap (int): max basepair gap between genes in the cluster.
Kwargs
locusview (bool): whether or not a map is generated for the locus_parent_organism
colordict (list): pass a pre-made color scheme for identified proteins
'''
if colordict != None:
self.search.protein_arrow_color_dict = colordict
for organism in self.organisms:
print 'finding loci for', organism.name
#reset loci if there is something in there already
organism.loci = []
orghits = []
for protein in organism.proteins:
if len(protein.hmm_hit_list) > 0:
orghits.append((organism.accession, protein.accession,
protein.start_bp, protein.end_bp, protein))
bp_start_pooled = [hit[2] for hit in orghits]
try:
clustered_data = self.cluster_number(bp_start_pooled, maxgap)
significant_cluster_list = []
for cluster in clustered_data:
if len(cluster) > cluster_size:
significant_cluster_list.append(cluster)
#print significant_cluster_list
for cluster in significant_cluster_list:
proteins_in_locus = []
cluster.sort()
for bp_start in cluster:
for hit in orghits:
if bp_start == hit[2]:
proteins_in_locus.append(hit[4])
organism.loci.append(Locus(proteins_in_locus,
organism,
self.search.query_names,
locusview))
except IndexError,e:
print 'Index error', str(e), organism.name
print 'total of', str(len(organism.loci)), 'found for', organism.name
def clear_loci():
'''reset loci in the database.'''
for org in self.organisms:
org.loci=None
|
mattsolo1/hmmerclust
|
hmmerclust/hmmerclust.py
|
OrganismDB.add_hits_to_proteins
|
python
|
def add_hits_to_proteins(self, hmm_hit_list):
'''Add HMMER results to Protein objects'''
for org in self.organisms:
print "adding SearchIO hit objects for", org.accession
for hit in hmm_hit_list:
hit_org_id = hit.id.split(',')[0]
hit_prot_id = hit.id.split(',')[1]
if org.accession == hit_org_id:
for prot in org.proteins:
if prot.accession == hit_prot_id:
prot.hmm_hit_list.append(hit)
|
Add HMMER results to Protein objects
|
train
|
https://github.com/mattsolo1/hmmerclust/blob/471596043a660097ed8b11430d42118a8fd25798/hmmerclust/hmmerclust.py#L261-L275
| null |
class OrganismDB(object):
"""The database object of hmmerclust.
On initialization, takes a list of genbank accessions and the directory
in which they are stored, generates a list of Organisms, and creates a
combined_fasta text file which is later queried by HMMER.
Args
database_name (string): e.g. 'my_database'
genome_list (list): ['NC_015758.gb', 'NC_002695.gb']
genome_dir (string): path to genome location
freshfasta (bool): decide if a new combined_fasta file needs to be made
Attributes
database_name (string): see
genome_list (list)
genome_dir (string)
organisms (list): List of organism objects populated by the
make_organisms() function
df (DataFrame): why do we need this again?
rRNA16SDB (rRNA16SDB): an rRNA database object
"""
def __init__(self, database_name, genome_list, genome_dir, freshfasta=False):
self.database_name = database_name
self.genome_list = genome_list
self.genome_dir = genome_dir
if freshfasta==True:
self.generate_combined_fasta(self.genome_list, self.genome_dir)
self.organisms = []
self.df = None
self.make_organisms(self.genome_list, self.genome_dir)
self.rRNA16SDB = rRNA16SDB(self)
def generate_combined_fasta(self, genome_list, genome_dir):
'''Generate a combined fasta using the genbank files.
Args
genome_list (list)
genome_dir (string)
'''
fasta = []
for genome in genome_list:
full_path = genome_dir + genome
handle = open(full_path, "rU")
print 'making combined fasta for', genome
try:
seq_record = SeqIO.read(handle, 'genbank')
org_accession = seq_record.name
except AssertionError,e:
print str(e), genome
for feature in seq_record.features:
if feature.type == 'CDS':
try:
prot_accession = feature.qualifiers['protein_id'][0]
prot_translation = feature.qualifiers['translation'][0]
newfast = '>' + org_accession + ',' + prot_accession + \
'\n' + prot_translation + '\n'
#if newfast not in fasta:
fasta.append(newfast)
except AttributeError,e:
print "organism %s, protein %s did not have \
the right attributes" % (org_accession, prot_accession)
print str(e)
except KeyError,e:
print "organism %s, protein %s did not have \
the right key" % (org_accession, prot_accession)
print str(e)
handle.close()
print "%s proteins were added" % len(fasta)
set_fasta = set(fasta)
print "%s unique proteins were added -- dropping redundant ones" % len(set_fasta)
faastring = "".join(set_fasta)
write_fasta = open('combined_fasta', 'w')
write_fasta.write(faastring)
write_fasta.close()
return set_fasta
def make_organisms(self, genome_list, genome_dir):
'''Organism factory method.
Appends organisms to the organisms list.
Args
genome_list (list)
genome_dir (string)
'''
for genome in genome_list:
genome_path = genome_dir + genome
handle = open(genome_path, "rU")
print 'Adding organism attributes for', genome
try:
seq_record = SeqIO.read(handle, "genbank")
self.organisms.append(Organism(seq_record, genome_path, self))
del(seq_record)
except ValueError,e:
print genome, str(e)
except AssertionError,e:
print genome, str(e)
except UnboundLocalError,e:
print genome, str(e)
handle.close()
def add_protein_to_organisms(self, orgprot_list):
'''
Protein factory method.
Iterates through a list of SearchIO hit objects, matches
the accession against SeqRecord features for each organism.
If there is a match, the new Protein object is created and
stored in the protein list of that Organism.
Args
orgprot_list: a list Biopython SearchIO hit objects (I think).
'''
for org in self.organisms:
handle = open(org.genome_path, "rU")
print 'adding proteins to organism', org.accession
try:
seq_record = SeqIO.read(handle, "genbank")
feature_list = []
for id in orgprot_list:
org_id = id.split(',')[0]
prot_id = id.split(',')[1]
if org.accession == org_id:
for feature in seq_record.features:
if feature.type == 'CDS':
feat_prot_acc = feature.qualifiers['protein_id'][0]
if feat_prot_acc == prot_id:
#print 'appending', hit_prot_acc
org.proteins.append(Protein(feature))
del(seq_record)
except ValueError,e:
print 'error for ', org.accession, str(e)
except AssertionError,e:
print 'error for ', org.accession, str(e)
except UnboundLocalError,e:
print 'error for ', org.accession, str(e)
except KeyError,e:
print 'error for ', org.accession, str(e)
handle.close()
def cluster_number(self, data, maxgap):
'''General function that clusters numbers.
Args
data (list): list of integers.
maxgap (int): max gap between numbers in the cluster.
'''
data.sort()
groups = [[data[0]]]
for x in data[1:]:
if abs(x - groups[-1][-1]) <= maxgap:
groups[-1].append(x)
else:
groups.append([x])
return groups
def find_loci(self, cluster_size, maxgap, locusview=False, colordict=None):
'''
Finds the loci of a given cluster size & maximum gap between cluster members.
Args
cluster_size (int): minimum number of genes in the cluster.
maxgap (int): max basepair gap between genes in the cluster.
Kwargs
locusview (bool): whether or not a map is generated for the locus_parent_organism
colordict (list): pass a pre-made color scheme for identified proteins
'''
if colordict != None:
self.search.protein_arrow_color_dict = colordict
for organism in self.organisms:
print 'finding loci for', organism.name
#reset loci if there is something in there already
organism.loci = []
orghits = []
for protein in organism.proteins:
if len(protein.hmm_hit_list) > 0:
orghits.append((organism.accession, protein.accession,
protein.start_bp, protein.end_bp, protein))
bp_start_pooled = [hit[2] for hit in orghits]
try:
clustered_data = self.cluster_number(bp_start_pooled, maxgap)
significant_cluster_list = []
for cluster in clustered_data:
if len(cluster) > cluster_size:
significant_cluster_list.append(cluster)
#print significant_cluster_list
for cluster in significant_cluster_list:
proteins_in_locus = []
cluster.sort()
for bp_start in cluster:
for hit in orghits:
if bp_start == hit[2]:
proteins_in_locus.append(hit[4])
organism.loci.append(Locus(proteins_in_locus,
organism,
self.search.query_names,
locusview))
except IndexError,e:
print 'Index error', str(e), organism.name
print 'total of', str(len(organism.loci)), 'found for', organism.name
def clear_loci():
'''reset loci in the database.'''
for org in self.organisms:
org.loci=None
|
mattsolo1/hmmerclust
|
hmmerclust/hmmerclust.py
|
OrganismDB.cluster_number
|
python
|
def cluster_number(self, data, maxgap):
'''General function that clusters numbers.
Args
data (list): list of integers.
maxgap (int): max gap between numbers in the cluster.
'''
data.sort()
groups = [[data[0]]]
for x in data[1:]:
if abs(x - groups[-1][-1]) <= maxgap:
groups[-1].append(x)
else:
groups.append([x])
return groups
|
General function that clusters numbers.
Args
data (list): list of integers.
maxgap (int): max gap between numbers in the cluster.
|
train
|
https://github.com/mattsolo1/hmmerclust/blob/471596043a660097ed8b11430d42118a8fd25798/hmmerclust/hmmerclust.py#L278-L294
| null |
class OrganismDB(object):
"""The database object of hmmerclust.
On initialization, takes a list of genbank accessions and the directory
in which they are stored, generates a list of Organisms, and creates a
combined_fasta text file which is later queried by HMMER.
Args
database_name (string): e.g. 'my_database'
genome_list (list): ['NC_015758.gb', 'NC_002695.gb']
genome_dir (string): path to genome location
freshfasta (bool): decide if a new combined_fasta file needs to be made
Attributes
database_name (string): see
genome_list (list)
genome_dir (string)
organisms (list): List of organism objects populated by the
make_organisms() function
df (DataFrame): why do we need this again?
rRNA16SDB (rRNA16SDB): an rRNA database object
"""
def __init__(self, database_name, genome_list, genome_dir, freshfasta=False):
self.database_name = database_name
self.genome_list = genome_list
self.genome_dir = genome_dir
if freshfasta==True:
self.generate_combined_fasta(self.genome_list, self.genome_dir)
self.organisms = []
self.df = None
self.make_organisms(self.genome_list, self.genome_dir)
self.rRNA16SDB = rRNA16SDB(self)
def generate_combined_fasta(self, genome_list, genome_dir):
'''Generate a combined fasta using the genbank files.
Args
genome_list (list)
genome_dir (string)
'''
fasta = []
for genome in genome_list:
full_path = genome_dir + genome
handle = open(full_path, "rU")
print 'making combined fasta for', genome
try:
seq_record = SeqIO.read(handle, 'genbank')
org_accession = seq_record.name
except AssertionError,e:
print str(e), genome
for feature in seq_record.features:
if feature.type == 'CDS':
try:
prot_accession = feature.qualifiers['protein_id'][0]
prot_translation = feature.qualifiers['translation'][0]
newfast = '>' + org_accession + ',' + prot_accession + \
'\n' + prot_translation + '\n'
#if newfast not in fasta:
fasta.append(newfast)
except AttributeError,e:
print "organism %s, protein %s did not have \
the right attributes" % (org_accession, prot_accession)
print str(e)
except KeyError,e:
print "organism %s, protein %s did not have \
the right key" % (org_accession, prot_accession)
print str(e)
handle.close()
print "%s proteins were added" % len(fasta)
set_fasta = set(fasta)
print "%s unique proteins were added -- dropping redundant ones" % len(set_fasta)
faastring = "".join(set_fasta)
write_fasta = open('combined_fasta', 'w')
write_fasta.write(faastring)
write_fasta.close()
return set_fasta
def make_organisms(self, genome_list, genome_dir):
'''Organism factory method.
Appends organisms to the organisms list.
Args
genome_list (list)
genome_dir (string)
'''
for genome in genome_list:
genome_path = genome_dir + genome
handle = open(genome_path, "rU")
print 'Adding organism attributes for', genome
try:
seq_record = SeqIO.read(handle, "genbank")
self.organisms.append(Organism(seq_record, genome_path, self))
del(seq_record)
except ValueError,e:
print genome, str(e)
except AssertionError,e:
print genome, str(e)
except UnboundLocalError,e:
print genome, str(e)
handle.close()
def add_protein_to_organisms(self, orgprot_list):
'''
Protein factory method.
Iterates through a list of SearchIO hit objects, matches
the accession against SeqRecord features for each organism.
If there is a match, the new Protein object is created and
stored in the protein list of that Organism.
Args
orgprot_list: a list Biopython SearchIO hit objects (I think).
'''
for org in self.organisms:
handle = open(org.genome_path, "rU")
print 'adding proteins to organism', org.accession
try:
seq_record = SeqIO.read(handle, "genbank")
feature_list = []
for id in orgprot_list:
org_id = id.split(',')[0]
prot_id = id.split(',')[1]
if org.accession == org_id:
for feature in seq_record.features:
if feature.type == 'CDS':
feat_prot_acc = feature.qualifiers['protein_id'][0]
if feat_prot_acc == prot_id:
#print 'appending', hit_prot_acc
org.proteins.append(Protein(feature))
del(seq_record)
except ValueError,e:
print 'error for ', org.accession, str(e)
except AssertionError,e:
print 'error for ', org.accession, str(e)
except UnboundLocalError,e:
print 'error for ', org.accession, str(e)
except KeyError,e:
print 'error for ', org.accession, str(e)
handle.close()
def add_hits_to_proteins(self, hmm_hit_list):
'''Add HMMER results to Protein objects'''
for org in self.organisms:
print "adding SearchIO hit objects for", org.accession
for hit in hmm_hit_list:
hit_org_id = hit.id.split(',')[0]
hit_prot_id = hit.id.split(',')[1]
if org.accession == hit_org_id:
for prot in org.proteins:
if prot.accession == hit_prot_id:
prot.hmm_hit_list.append(hit)
def find_loci(self, cluster_size, maxgap, locusview=False, colordict=None):
'''
Finds the loci of a given cluster size & maximum gap between cluster members.
Args
cluster_size (int): minimum number of genes in the cluster.
maxgap (int): max basepair gap between genes in the cluster.
Kwargs
locusview (bool): whether or not a map is generated for the locus_parent_organism
colordict (list): pass a pre-made color scheme for identified proteins
'''
if colordict != None:
self.search.protein_arrow_color_dict = colordict
for organism in self.organisms:
print 'finding loci for', organism.name
#reset loci if there is something in there already
organism.loci = []
orghits = []
for protein in organism.proteins:
if len(protein.hmm_hit_list) > 0:
orghits.append((organism.accession, protein.accession,
protein.start_bp, protein.end_bp, protein))
bp_start_pooled = [hit[2] for hit in orghits]
try:
clustered_data = self.cluster_number(bp_start_pooled, maxgap)
significant_cluster_list = []
for cluster in clustered_data:
if len(cluster) > cluster_size:
significant_cluster_list.append(cluster)
#print significant_cluster_list
for cluster in significant_cluster_list:
proteins_in_locus = []
cluster.sort()
for bp_start in cluster:
for hit in orghits:
if bp_start == hit[2]:
proteins_in_locus.append(hit[4])
organism.loci.append(Locus(proteins_in_locus,
organism,
self.search.query_names,
locusview))
except IndexError,e:
print 'Index error', str(e), organism.name
print 'total of', str(len(organism.loci)), 'found for', organism.name
def clear_loci():
'''reset loci in the database.'''
for org in self.organisms:
org.loci=None
|
mattsolo1/hmmerclust
|
hmmerclust/hmmerclust.py
|
OrganismDB.find_loci
|
python
|
def find_loci(self, cluster_size, maxgap, locusview=False, colordict=None):
'''
Finds the loci of a given cluster size & maximum gap between cluster members.
Args
cluster_size (int): minimum number of genes in the cluster.
maxgap (int): max basepair gap between genes in the cluster.
Kwargs
locusview (bool): whether or not a map is generated for the locus_parent_organism
colordict (list): pass a pre-made color scheme for identified proteins
'''
if colordict != None:
self.search.protein_arrow_color_dict = colordict
for organism in self.organisms:
print 'finding loci for', organism.name
#reset loci if there is something in there already
organism.loci = []
orghits = []
for protein in organism.proteins:
if len(protein.hmm_hit_list) > 0:
orghits.append((organism.accession, protein.accession,
protein.start_bp, protein.end_bp, protein))
bp_start_pooled = [hit[2] for hit in orghits]
try:
clustered_data = self.cluster_number(bp_start_pooled, maxgap)
significant_cluster_list = []
for cluster in clustered_data:
if len(cluster) > cluster_size:
significant_cluster_list.append(cluster)
#print significant_cluster_list
for cluster in significant_cluster_list:
proteins_in_locus = []
cluster.sort()
for bp_start in cluster:
for hit in orghits:
if bp_start == hit[2]:
proteins_in_locus.append(hit[4])
organism.loci.append(Locus(proteins_in_locus,
organism,
self.search.query_names,
locusview))
except IndexError,e:
print 'Index error', str(e), organism.name
print 'total of', str(len(organism.loci)), 'found for', organism.name
|
Finds the loci of a given cluster size & maximum gap between cluster members.
Args
cluster_size (int): minimum number of genes in the cluster.
maxgap (int): max basepair gap between genes in the cluster.
Kwargs
locusview (bool): whether or not a map is generated for the locus_parent_organism
colordict (list): pass a pre-made color scheme for identified proteins
|
train
|
https://github.com/mattsolo1/hmmerclust/blob/471596043a660097ed8b11430d42118a8fd25798/hmmerclust/hmmerclust.py#L296-L355
|
[
"def cluster_number(self, data, maxgap): \n '''General function that clusters numbers.\n\n Args\n data (list): list of integers.\n maxgap (int): max gap between numbers in the cluster.\n\n '''\n\n data.sort()\n groups = [[data[0]]]\n for x in data[1:]:\n if abs(x - groups[-1][-1]) <= maxgap:\n groups[-1].append(x)\n else:\n groups.append([x])\n return groups\n"
] |
class OrganismDB(object):
"""The database object of hmmerclust.
On initialization, takes a list of genbank accessions and the directory
in which they are stored, generates a list of Organisms, and creates a
combined_fasta text file which is later queried by HMMER.
Args
database_name (string): e.g. 'my_database'
genome_list (list): ['NC_015758.gb', 'NC_002695.gb']
genome_dir (string): path to genome location
freshfasta (bool): decide if a new combined_fasta file needs to be made
Attributes
database_name (string): see
genome_list (list)
genome_dir (string)
organisms (list): List of organism objects populated by the
make_organisms() function
df (DataFrame): why do we need this again?
rRNA16SDB (rRNA16SDB): an rRNA database object
"""
def __init__(self, database_name, genome_list, genome_dir, freshfasta=False):
self.database_name = database_name
self.genome_list = genome_list
self.genome_dir = genome_dir
if freshfasta==True:
self.generate_combined_fasta(self.genome_list, self.genome_dir)
self.organisms = []
self.df = None
self.make_organisms(self.genome_list, self.genome_dir)
self.rRNA16SDB = rRNA16SDB(self)
def generate_combined_fasta(self, genome_list, genome_dir):
'''Generate a combined fasta using the genbank files.
Args
genome_list (list)
genome_dir (string)
'''
fasta = []
for genome in genome_list:
full_path = genome_dir + genome
handle = open(full_path, "rU")
print 'making combined fasta for', genome
try:
seq_record = SeqIO.read(handle, 'genbank')
org_accession = seq_record.name
except AssertionError,e:
print str(e), genome
for feature in seq_record.features:
if feature.type == 'CDS':
try:
prot_accession = feature.qualifiers['protein_id'][0]
prot_translation = feature.qualifiers['translation'][0]
newfast = '>' + org_accession + ',' + prot_accession + \
'\n' + prot_translation + '\n'
#if newfast not in fasta:
fasta.append(newfast)
except AttributeError,e:
print "organism %s, protein %s did not have \
the right attributes" % (org_accession, prot_accession)
print str(e)
except KeyError,e:
print "organism %s, protein %s did not have \
the right key" % (org_accession, prot_accession)
print str(e)
handle.close()
print "%s proteins were added" % len(fasta)
set_fasta = set(fasta)
print "%s unique proteins were added -- dropping redundant ones" % len(set_fasta)
faastring = "".join(set_fasta)
write_fasta = open('combined_fasta', 'w')
write_fasta.write(faastring)
write_fasta.close()
return set_fasta
def make_organisms(self, genome_list, genome_dir):
'''Organism factory method.
Appends organisms to the organisms list.
Args
genome_list (list)
genome_dir (string)
'''
for genome in genome_list:
genome_path = genome_dir + genome
handle = open(genome_path, "rU")
print 'Adding organism attributes for', genome
try:
seq_record = SeqIO.read(handle, "genbank")
self.organisms.append(Organism(seq_record, genome_path, self))
del(seq_record)
except ValueError,e:
print genome, str(e)
except AssertionError,e:
print genome, str(e)
except UnboundLocalError,e:
print genome, str(e)
handle.close()
def add_protein_to_organisms(self, orgprot_list):
'''
Protein factory method.
Iterates through a list of SearchIO hit objects, matches
the accession against SeqRecord features for each organism.
If there is a match, the new Protein object is created and
stored in the protein list of that Organism.
Args
orgprot_list: a list Biopython SearchIO hit objects (I think).
'''
for org in self.organisms:
handle = open(org.genome_path, "rU")
print 'adding proteins to organism', org.accession
try:
seq_record = SeqIO.read(handle, "genbank")
feature_list = []
for id in orgprot_list:
org_id = id.split(',')[0]
prot_id = id.split(',')[1]
if org.accession == org_id:
for feature in seq_record.features:
if feature.type == 'CDS':
feat_prot_acc = feature.qualifiers['protein_id'][0]
if feat_prot_acc == prot_id:
#print 'appending', hit_prot_acc
org.proteins.append(Protein(feature))
del(seq_record)
except ValueError,e:
print 'error for ', org.accession, str(e)
except AssertionError,e:
print 'error for ', org.accession, str(e)
except UnboundLocalError,e:
print 'error for ', org.accession, str(e)
except KeyError,e:
print 'error for ', org.accession, str(e)
handle.close()
def add_hits_to_proteins(self, hmm_hit_list):
'''Add HMMER results to Protein objects'''
for org in self.organisms:
print "adding SearchIO hit objects for", org.accession
for hit in hmm_hit_list:
hit_org_id = hit.id.split(',')[0]
hit_prot_id = hit.id.split(',')[1]
if org.accession == hit_org_id:
for prot in org.proteins:
if prot.accession == hit_prot_id:
prot.hmm_hit_list.append(hit)
def cluster_number(self, data, maxgap):
'''General function that clusters numbers.
Args
data (list): list of integers.
maxgap (int): max gap between numbers in the cluster.
'''
data.sort()
groups = [[data[0]]]
for x in data[1:]:
if abs(x - groups[-1][-1]) <= maxgap:
groups[-1].append(x)
else:
groups.append([x])
return groups
def find_loci(self, cluster_size, maxgap, locusview=False, colordict=None):
'''
Finds the loci of a given cluster size & maximum gap between cluster members.
Args
cluster_size (int): minimum number of genes in the cluster.
maxgap (int): max basepair gap between genes in the cluster.
Kwargs
locusview (bool): whether or not a map is generated for the locus_parent_organism
colordict (list): pass a pre-made color scheme for identified proteins
'''
if colordict != None:
self.search.protein_arrow_color_dict = colordict
for organism in self.organisms:
print 'finding loci for', organism.name
#reset loci if there is something in there already
organism.loci = []
orghits = []
for protein in organism.proteins:
if len(protein.hmm_hit_list) > 0:
orghits.append((organism.accession, protein.accession,
protein.start_bp, protein.end_bp, protein))
bp_start_pooled = [hit[2] for hit in orghits]
try:
clustered_data = self.cluster_number(bp_start_pooled, maxgap)
significant_cluster_list = []
for cluster in clustered_data:
if len(cluster) > cluster_size:
significant_cluster_list.append(cluster)
#print significant_cluster_list
for cluster in significant_cluster_list:
proteins_in_locus = []
cluster.sort()
for bp_start in cluster:
for hit in orghits:
if bp_start == hit[2]:
proteins_in_locus.append(hit[4])
organism.loci.append(Locus(proteins_in_locus,
organism,
self.search.query_names,
locusview))
except IndexError,e:
print 'Index error', str(e), organism.name
print 'total of', str(len(organism.loci)), 'found for', organism.name
def clear_loci():
'''reset loci in the database.'''
for org in self.organisms:
org.loci=None
|
mattsolo1/hmmerclust
|
hmmerclust/hmmerclust.py
|
Protein.parse_hmm_hit_list
|
python
|
def parse_hmm_hit_list(self, hmm_hit_list):
'''
take a list of hmm hit results, take needed info,
'''
tuplist = []
for hit in hmm_hit_list:
for hsp in hit.hsps:
tup = tuplist.append((hit._query_id.split('_')[0],
hit.bitscore,
hit.evalue,
hsp.bias,
hsp.env_start,
hsp.env_end))
cols = ['name','bitscore','evalue', 'bias', 'hsp_start','hsp_end']
df = DataFrame(tuplist, columns=cols)
df.set_index('name', inplace=True)
return df
|
take a list of hmm hit results, take needed info,
|
train
|
https://github.com/mattsolo1/hmmerclust/blob/471596043a660097ed8b11430d42118a8fd25798/hmmerclust/hmmerclust.py#L462-L484
| null |
class Protein(object):
"""Encapsulates data related to a single protein/gene.
Args
feature(SeqIO feature object)
Attributes
accession info
genome location info
hmm_hit_list (list): Complete list of SearchIO hit objects.
data related to the best-scoring hit
is_in_locus (list): locus the protein/gene belongs to.
"""
def __init__(self, feature):
self.accession = feature.qualifiers['protein_id'][0]
self.gi = feature.qualifiers['db_xref'][0].split(':')[1]
self.product = feature.qualifiers['product'][0]
#self.note = feature.qualifiers['note']
self.start_bp = feature.location.start.position
self.end_bp = feature.location.end.position
self.strand = feature.location.strand
self.translation = feature.qualifiers['translation'][0]
self.numb_residues = len(self.translation)
self.hmm_hit_list = []
self.hit_dataframe = None
self.hit_name_best = 'non-hit'
self.hit_evalue_best = 'non-hit'
self.hit_bitscore_best = 'non-hit'
self.hit_bias_best = 'non-hit'
self.hit_start_best = 'non-hit'
self.hit_end_best = 'non-hit'
self.is_in_locus = None
def __repr__(self):
return "%s - %s" % (self.accession, self.product)
def parse_hmm_hit_list(self, hmm_hit_list):
'''
take a list of hmm hit results, take needed info,
'''
tuplist = []
for hit in hmm_hit_list:
for hsp in hit.hsps:
tup = tuplist.append((hit._query_id.split('_')[0],
hit.bitscore,
hit.evalue,
hsp.bias,
hsp.env_start,
hsp.env_end))
cols = ['name','bitscore','evalue', 'bias', 'hsp_start','hsp_end']
df = DataFrame(tuplist, columns=cols)
df.set_index('name', inplace=True)
return df
|
mattsolo1/hmmerclust
|
hmmerclust/hmmerclust.py
|
rRNA16SDB.write_16S_rRNA_fasta
|
python
|
def write_16S_rRNA_fasta(self, org_list):
'''
Writes a fasta file containing 16S rRNA sequences
for a list of Organism objects,
The first 16S sequence found in the seq record object is used,
since it looks like there are duplicates
'''
fasta = []
for org in org_list:
handle = open(org.genome_path, "rU")
seq_record = SeqIO.read(handle, "genbank")
for feat in seq_record.features:
if feat.type == 'rRNA':
if '16S ribosomal' in feat.qualifiers['product'][0]:
start = feat.location.start.position
end = feat.location.end.position
if ((end - start) > 1400) & ((end - start) < 1700) :
print 'rRNA sequence extracted for', org.accession
fasta.append('>' + org.accession +
'\n' +
str(feat.extract(seq_record.seq)) +
'\n')
org.rRNA16S_sequence = str(feat.extract(seq_record.seq))
break
faastring = "".join(fasta)
filename = '16S-rRNA.fasta'
write_fasta = open(filename, 'w')
write_fasta.write(faastring)
write_fasta.close()
|
Writes a fasta file containing 16S rRNA sequences
for a list of Organism objects,
The first 16S sequence found in the seq record object is used,
since it looks like there are duplicates
|
train
|
https://github.com/mattsolo1/hmmerclust/blob/471596043a660097ed8b11430d42118a8fd25798/hmmerclust/hmmerclust.py#L494-L536
| null |
class rRNA16SDB:
def __init__(self, OrganismDB):
#self.write_16S_rRNA_fasta(OrganismDB.organisms)
self.import_tree_order_from_file(OrganismDB, '16S_aligned.csv')
def write_16S_rRNA_fasta(self, org_list):
'''
Writes a fasta file containing 16S rRNA sequences
for a list of Organism objects,
The first 16S sequence found in the seq record object is used,
since it looks like there are duplicates
'''
fasta = []
for org in org_list:
handle = open(org.genome_path, "rU")
seq_record = SeqIO.read(handle, "genbank")
for feat in seq_record.features:
if feat.type == 'rRNA':
if '16S ribosomal' in feat.qualifiers['product'][0]:
start = feat.location.start.position
end = feat.location.end.position
if ((end - start) > 1400) & ((end - start) < 1700) :
print 'rRNA sequence extracted for', org.accession
fasta.append('>' + org.accession +
'\n' +
str(feat.extract(seq_record.seq)) +
'\n')
org.rRNA16S_sequence = str(feat.extract(seq_record.seq))
break
faastring = "".join(fasta)
filename = '16S-rRNA.fasta'
write_fasta = open(filename, 'w')
write_fasta.write(faastring)
write_fasta.close()
def import_tree_order_from_file(self, MyOrganismDB, filename):
'''
Import the accession list that has been ordered by position
in a phylogenetic tree. Get the index in the list, and
add this to the Organism object. Later we can use this position
to make a heatmap that matches up to a phylogenetic tree.
'''
tree_order = [acc.strip() for acc in open(filename)]
#print tree_order
for org in MyOrganismDB.organisms:
for tree_accession in tree_order:
#print tree_accession
if org.accession == tree_accession:
org.tree_order = tree_order.index(tree_accession)
|
mattsolo1/hmmerclust
|
hmmerclust/hmmerclust.py
|
rRNA16SDB.import_tree_order_from_file
|
python
|
def import_tree_order_from_file(self, MyOrganismDB, filename):
'''
Import the accession list that has been ordered by position
in a phylogenetic tree. Get the index in the list, and
add this to the Organism object. Later we can use this position
to make a heatmap that matches up to a phylogenetic tree.
'''
tree_order = [acc.strip() for acc in open(filename)]
#print tree_order
for org in MyOrganismDB.organisms:
for tree_accession in tree_order:
#print tree_accession
if org.accession == tree_accession:
org.tree_order = tree_order.index(tree_accession)
|
Import the accession list that has been ordered by position
in a phylogenetic tree. Get the index in the list, and
add this to the Organism object. Later we can use this position
to make a heatmap that matches up to a phylogenetic tree.
|
train
|
https://github.com/mattsolo1/hmmerclust/blob/471596043a660097ed8b11430d42118a8fd25798/hmmerclust/hmmerclust.py#L539-L555
| null |
class rRNA16SDB:
def __init__(self, OrganismDB):
#self.write_16S_rRNA_fasta(OrganismDB.organisms)
self.import_tree_order_from_file(OrganismDB, '16S_aligned.csv')
def write_16S_rRNA_fasta(self, org_list):
'''
Writes a fasta file containing 16S rRNA sequences
for a list of Organism objects,
The first 16S sequence found in the seq record object is used,
since it looks like there are duplicates
'''
fasta = []
for org in org_list:
handle = open(org.genome_path, "rU")
seq_record = SeqIO.read(handle, "genbank")
for feat in seq_record.features:
if feat.type == 'rRNA':
if '16S ribosomal' in feat.qualifiers['product'][0]:
start = feat.location.start.position
end = feat.location.end.position
if ((end - start) > 1400) & ((end - start) < 1700) :
print 'rRNA sequence extracted for', org.accession
fasta.append('>' + org.accession +
'\n' +
str(feat.extract(seq_record.seq)) +
'\n')
org.rRNA16S_sequence = str(feat.extract(seq_record.seq))
break
faastring = "".join(fasta)
filename = '16S-rRNA.fasta'
write_fasta = open(filename, 'w')
write_fasta.write(faastring)
write_fasta.close()
def import_tree_order_from_file(self, MyOrganismDB, filename):
'''
Import the accession list that has been ordered by position
in a phylogenetic tree. Get the index in the list, and
add this to the Organism object. Later we can use this position
to make a heatmap that matches up to a phylogenetic tree.
'''
tree_order = [acc.strip() for acc in open(filename)]
#print tree_order
for org in MyOrganismDB.organisms:
for tree_accession in tree_order:
#print tree_accession
if org.accession == tree_accession:
org.tree_order = tree_order.index(tree_accession)
|
mattsolo1/hmmerclust
|
hmmerclust/hmmerclust.py
|
HmmSearch.run_hmmbuild
|
python
|
def run_hmmbuild(self):
'''
Generate hmm with hhbuild,
output to file. Also stores query names.
'''
for alignment in self.alignment_list:
print 'building Hmm for', alignment
alignment_full_path = self.alignment_dir + alignment
query_name = alignment.split("_")[0]
self.query_names.append(query_name)
new_hmm= self.hmm_dir + query_name + ".hmm"
hmmbuild_output = subprocess.call(["hmmbuild", new_hmm,
alignment_full_path])
print 'hhbuild complete for', self.query_names
|
Generate hmm with hhbuild,
output to file. Also stores query names.
|
train
|
https://github.com/mattsolo1/hmmerclust/blob/471596043a660097ed8b11430d42118a8fd25798/hmmerclust/hmmerclust.py#L607-L627
| null |
class HmmSearch:
"""
Give alignment files, name them according to what the names
should be in the analysis.
First the hmm is built with Hmmbuild, and the hmm files output.
Then run Hmmsearch, parse the files, put each result in a list
"""
def __init__(self, OrganismDB, combined_fasta, freshbuild=True, freshsearch=True, ):
self.alignment_dir = './alignments/'
self.alignment_list = [x for x in os.listdir(self.alignment_dir) if '.txt' in x]
self.query_names = []
self.hmm_dir = './hmm/'
if not os.path.exists(self.hmm_dir):
os.makedirs(self.hmm_dir)
self.combined_fasta = combined_fasta
self.hhsearch_result_folder = './hhsearch_results/'
if not os.path.exists(self.hhsearch_result_folder):
os.makedirs(self.hhsearch_result_folder)
self.hmm_result_list=[]
if freshbuild == True:
self.run_hmmbuild()
if freshsearch == True:
self.run_hmmsearch()
self.combined_hit_list = self.extract_hit_list_from_hmmsearch_results()
self.orgprot_list = list(set([x.id for x in self.combined_hit_list]))
OrganismDB.search = self
self.protein_arrow_color_dict = self.make_protein_arrow_color_dict(self.query_names)
OrganismDB.add_protein_to_organisms(self.orgprot_list)
OrganismDB.add_hits_to_proteins(self.combined_hit_list)
self.parse_proteins(OrganismDB)
self.set_best_hit_values_for_proteins(OrganismDB)
def run_hmmsearch(self):
'''
'''
all_searches = []
for name in self.query_names:
print 'running HHsearch on', name
hmm_full_path = self.hmm_dir + name + '.hmm'
hmmsearch_output = subprocess.check_output(["hmmsearch",
"--cpu", str(processors), hmm_full_path,
self.combined_fasta])
hmm_result_file_name = self.hhsearch_result_folder + name + ".out"
self.hmm_result_list.append((name + ".out"))
f = open(hmm_result_file_name, 'w')
f.write(hmmsearch_output)
f.close()
def extract_hit_list_from_hmmsearch_results(self):
'''
Make a giant list of all the hit objects from
our search
'''
combined_list_of_hits = []
for result in self.hmm_result_list:
fullpath = self.hhsearch_result_folder + result
se = SearchIO.read(fullpath, 'hmmer3-text')
sublist = []
for hit in se:
combined_list_of_hits.append(hit)
sublist.append(hit.id)
print 'extracted', str(len(sublist)), 'hits for', result
return combined_list_of_hits
def make_protein_arrow_color_dict(self, query_names):
'''
Generates a random color for all proteins in query_names,
stores these in a dict.
'''
protein_arrow_color_dict = dict()
for protein in self.query_names:
protein_arrow_color_dict[protein] = (random(), random(), random())
return protein_arrow_color_dict
def make_hsps(self, hit):
hit_name = hit._query_id.split("_")[0]
hit_evalue = hit.evalue
hit_bitscore = hit.bitscore
def parse_proteins(self,OrganismDB):
'''
Iterate through all the proteins in the DB,
creates a hit_dataframe for each protein.
'''
for org in OrganismDB.organisms:
for prot in org.proteins:
if len(prot.hmm_hit_list) > 0:
try:
prot.hit_dataframe = prot.parse_hmm_hit_list(prot.hmm_hit_list)
except ValueError,e:
print 'error for', org.name, prot.accession, str(e)
def set_best_hit_values_for_proteins(self, OrganismDB):
'''
Iterate through all proteins in the DB,
drop duplicates in the hit_dataframe, then store the maximum
hit information as protein attributes.
'''
for org in OrganismDB.organisms:
print 'setting best hit values for', org.name
for prot in org.proteins:
if len(prot.hmm_hit_list) > 0:
try:
dd_df = prot.hit_dataframe.drop_duplicates(subset='bitscore')
try:
prot.hit_name_best = dd_df.bitscore.idxmax()
prot.hit_evalue_best = dd_df.ix[prot.hit_name_best].evalue
prot.hit_bitscore_best = dd_df.ix[prot.hit_name_best].bitscore
prot.hit_bias_best = dd_df.ix[prot.hit_name_best].bias
prot.hit_start_best = dd_df.ix[prot.hit_name_best].hsp_start
prot.hit_end_best = dd_df.ix[prot.hit_name_best].hsp_end
except:
print 'could not set best hit values for ', org.name
except AttributeError:
pass
|
mattsolo1/hmmerclust
|
hmmerclust/hmmerclust.py
|
HmmSearch.extract_hit_list_from_hmmsearch_results
|
python
|
def extract_hit_list_from_hmmsearch_results(self):
'''
Make a giant list of all the hit objects from
our search
'''
combined_list_of_hits = []
for result in self.hmm_result_list:
fullpath = self.hhsearch_result_folder + result
se = SearchIO.read(fullpath, 'hmmer3-text')
sublist = []
for hit in se:
combined_list_of_hits.append(hit)
sublist.append(hit.id)
print 'extracted', str(len(sublist)), 'hits for', result
return combined_list_of_hits
|
Make a giant list of all the hit objects from
our search
|
train
|
https://github.com/mattsolo1/hmmerclust/blob/471596043a660097ed8b11430d42118a8fd25798/hmmerclust/hmmerclust.py#L655-L676
| null |
class HmmSearch:
"""
Give alignment files, name them according to what the names
should be in the analysis.
First the hmm is built with Hmmbuild, and the hmm files output.
Then run Hmmsearch, parse the files, put each result in a list
"""
def __init__(self, OrganismDB, combined_fasta, freshbuild=True, freshsearch=True, ):
self.alignment_dir = './alignments/'
self.alignment_list = [x for x in os.listdir(self.alignment_dir) if '.txt' in x]
self.query_names = []
self.hmm_dir = './hmm/'
if not os.path.exists(self.hmm_dir):
os.makedirs(self.hmm_dir)
self.combined_fasta = combined_fasta
self.hhsearch_result_folder = './hhsearch_results/'
if not os.path.exists(self.hhsearch_result_folder):
os.makedirs(self.hhsearch_result_folder)
self.hmm_result_list=[]
if freshbuild == True:
self.run_hmmbuild()
if freshsearch == True:
self.run_hmmsearch()
self.combined_hit_list = self.extract_hit_list_from_hmmsearch_results()
self.orgprot_list = list(set([x.id for x in self.combined_hit_list]))
OrganismDB.search = self
self.protein_arrow_color_dict = self.make_protein_arrow_color_dict(self.query_names)
OrganismDB.add_protein_to_organisms(self.orgprot_list)
OrganismDB.add_hits_to_proteins(self.combined_hit_list)
self.parse_proteins(OrganismDB)
self.set_best_hit_values_for_proteins(OrganismDB)
def run_hmmbuild(self):
'''
Generate hmm with hhbuild,
output to file. Also stores query names.
'''
for alignment in self.alignment_list:
print 'building Hmm for', alignment
alignment_full_path = self.alignment_dir + alignment
query_name = alignment.split("_")[0]
self.query_names.append(query_name)
new_hmm= self.hmm_dir + query_name + ".hmm"
hmmbuild_output = subprocess.call(["hmmbuild", new_hmm,
alignment_full_path])
print 'hhbuild complete for', self.query_names
def run_hmmsearch(self):
'''
'''
all_searches = []
for name in self.query_names:
print 'running HHsearch on', name
hmm_full_path = self.hmm_dir + name + '.hmm'
hmmsearch_output = subprocess.check_output(["hmmsearch",
"--cpu", str(processors), hmm_full_path,
self.combined_fasta])
hmm_result_file_name = self.hhsearch_result_folder + name + ".out"
self.hmm_result_list.append((name + ".out"))
f = open(hmm_result_file_name, 'w')
f.write(hmmsearch_output)
f.close()
def extract_hit_list_from_hmmsearch_results(self):
'''
Make a giant list of all the hit objects from
our search
'''
combined_list_of_hits = []
for result in self.hmm_result_list:
fullpath = self.hhsearch_result_folder + result
se = SearchIO.read(fullpath, 'hmmer3-text')
sublist = []
for hit in se:
combined_list_of_hits.append(hit)
sublist.append(hit.id)
print 'extracted', str(len(sublist)), 'hits for', result
return combined_list_of_hits
def make_protein_arrow_color_dict(self, query_names):
'''
Generates a random color for all proteins in query_names,
stores these in a dict.
'''
protein_arrow_color_dict = dict()
for protein in self.query_names:
protein_arrow_color_dict[protein] = (random(), random(), random())
return protein_arrow_color_dict
def make_hsps(self, hit):
hit_name = hit._query_id.split("_")[0]
hit_evalue = hit.evalue
hit_bitscore = hit.bitscore
def parse_proteins(self,OrganismDB):
'''
Iterate through all the proteins in the DB,
creates a hit_dataframe for each protein.
'''
for org in OrganismDB.organisms:
for prot in org.proteins:
if len(prot.hmm_hit_list) > 0:
try:
prot.hit_dataframe = prot.parse_hmm_hit_list(prot.hmm_hit_list)
except ValueError,e:
print 'error for', org.name, prot.accession, str(e)
def set_best_hit_values_for_proteins(self, OrganismDB):
'''
Iterate through all proteins in the DB,
drop duplicates in the hit_dataframe, then store the maximum
hit information as protein attributes.
'''
for org in OrganismDB.organisms:
print 'setting best hit values for', org.name
for prot in org.proteins:
if len(prot.hmm_hit_list) > 0:
try:
dd_df = prot.hit_dataframe.drop_duplicates(subset='bitscore')
try:
prot.hit_name_best = dd_df.bitscore.idxmax()
prot.hit_evalue_best = dd_df.ix[prot.hit_name_best].evalue
prot.hit_bitscore_best = dd_df.ix[prot.hit_name_best].bitscore
prot.hit_bias_best = dd_df.ix[prot.hit_name_best].bias
prot.hit_start_best = dd_df.ix[prot.hit_name_best].hsp_start
prot.hit_end_best = dd_df.ix[prot.hit_name_best].hsp_end
except:
print 'could not set best hit values for ', org.name
except AttributeError:
pass
|
mattsolo1/hmmerclust
|
hmmerclust/hmmerclust.py
|
HmmSearch.make_protein_arrow_color_dict
|
python
|
def make_protein_arrow_color_dict(self, query_names):
'''
Generates a random color for all proteins in query_names,
stores these in a dict.
'''
protein_arrow_color_dict = dict()
for protein in self.query_names:
protein_arrow_color_dict[protein] = (random(), random(), random())
return protein_arrow_color_dict
|
Generates a random color for all proteins in query_names,
stores these in a dict.
|
train
|
https://github.com/mattsolo1/hmmerclust/blob/471596043a660097ed8b11430d42118a8fd25798/hmmerclust/hmmerclust.py#L679-L691
| null |
class HmmSearch:
"""
Give alignment files, name them according to what the names
should be in the analysis.
First the hmm is built with Hmmbuild, and the hmm files output.
Then run Hmmsearch, parse the files, put each result in a list
"""
def __init__(self, OrganismDB, combined_fasta, freshbuild=True, freshsearch=True, ):
self.alignment_dir = './alignments/'
self.alignment_list = [x for x in os.listdir(self.alignment_dir) if '.txt' in x]
self.query_names = []
self.hmm_dir = './hmm/'
if not os.path.exists(self.hmm_dir):
os.makedirs(self.hmm_dir)
self.combined_fasta = combined_fasta
self.hhsearch_result_folder = './hhsearch_results/'
if not os.path.exists(self.hhsearch_result_folder):
os.makedirs(self.hhsearch_result_folder)
self.hmm_result_list=[]
if freshbuild == True:
self.run_hmmbuild()
if freshsearch == True:
self.run_hmmsearch()
self.combined_hit_list = self.extract_hit_list_from_hmmsearch_results()
self.orgprot_list = list(set([x.id for x in self.combined_hit_list]))
OrganismDB.search = self
self.protein_arrow_color_dict = self.make_protein_arrow_color_dict(self.query_names)
OrganismDB.add_protein_to_organisms(self.orgprot_list)
OrganismDB.add_hits_to_proteins(self.combined_hit_list)
self.parse_proteins(OrganismDB)
self.set_best_hit_values_for_proteins(OrganismDB)
def run_hmmbuild(self):
'''
Generate hmm with hhbuild,
output to file. Also stores query names.
'''
for alignment in self.alignment_list:
print 'building Hmm for', alignment
alignment_full_path = self.alignment_dir + alignment
query_name = alignment.split("_")[0]
self.query_names.append(query_name)
new_hmm= self.hmm_dir + query_name + ".hmm"
hmmbuild_output = subprocess.call(["hmmbuild", new_hmm,
alignment_full_path])
print 'hhbuild complete for', self.query_names
def run_hmmsearch(self):
'''
'''
all_searches = []
for name in self.query_names:
print 'running HHsearch on', name
hmm_full_path = self.hmm_dir + name + '.hmm'
hmmsearch_output = subprocess.check_output(["hmmsearch",
"--cpu", str(processors), hmm_full_path,
self.combined_fasta])
hmm_result_file_name = self.hhsearch_result_folder + name + ".out"
self.hmm_result_list.append((name + ".out"))
f = open(hmm_result_file_name, 'w')
f.write(hmmsearch_output)
f.close()
def extract_hit_list_from_hmmsearch_results(self):
'''
Make a giant list of all the hit objects from
our search
'''
combined_list_of_hits = []
for result in self.hmm_result_list:
fullpath = self.hhsearch_result_folder + result
se = SearchIO.read(fullpath, 'hmmer3-text')
sublist = []
for hit in se:
combined_list_of_hits.append(hit)
sublist.append(hit.id)
print 'extracted', str(len(sublist)), 'hits for', result
return combined_list_of_hits
def make_hsps(self, hit):
hit_name = hit._query_id.split("_")[0]
hit_evalue = hit.evalue
hit_bitscore = hit.bitscore
def parse_proteins(self,OrganismDB):
'''
Iterate through all the proteins in the DB,
creates a hit_dataframe for each protein.
'''
for org in OrganismDB.organisms:
for prot in org.proteins:
if len(prot.hmm_hit_list) > 0:
try:
prot.hit_dataframe = prot.parse_hmm_hit_list(prot.hmm_hit_list)
except ValueError,e:
print 'error for', org.name, prot.accession, str(e)
def set_best_hit_values_for_proteins(self, OrganismDB):
'''
Iterate through all proteins in the DB,
drop duplicates in the hit_dataframe, then store the maximum
hit information as protein attributes.
'''
for org in OrganismDB.organisms:
print 'setting best hit values for', org.name
for prot in org.proteins:
if len(prot.hmm_hit_list) > 0:
try:
dd_df = prot.hit_dataframe.drop_duplicates(subset='bitscore')
try:
prot.hit_name_best = dd_df.bitscore.idxmax()
prot.hit_evalue_best = dd_df.ix[prot.hit_name_best].evalue
prot.hit_bitscore_best = dd_df.ix[prot.hit_name_best].bitscore
prot.hit_bias_best = dd_df.ix[prot.hit_name_best].bias
prot.hit_start_best = dd_df.ix[prot.hit_name_best].hsp_start
prot.hit_end_best = dd_df.ix[prot.hit_name_best].hsp_end
except:
print 'could not set best hit values for ', org.name
except AttributeError:
pass
|
mattsolo1/hmmerclust
|
hmmerclust/hmmerclust.py
|
HmmSearch.parse_proteins
|
python
|
def parse_proteins(self,OrganismDB):
'''
Iterate through all the proteins in the DB,
creates a hit_dataframe for each protein.
'''
for org in OrganismDB.organisms:
for prot in org.proteins:
if len(prot.hmm_hit_list) > 0:
try:
prot.hit_dataframe = prot.parse_hmm_hit_list(prot.hmm_hit_list)
except ValueError,e:
print 'error for', org.name, prot.accession, str(e)
|
Iterate through all the proteins in the DB,
creates a hit_dataframe for each protein.
|
train
|
https://github.com/mattsolo1/hmmerclust/blob/471596043a660097ed8b11430d42118a8fd25798/hmmerclust/hmmerclust.py#L701-L714
| null |
class HmmSearch:
"""
Give alignment files, name them according to what the names
should be in the analysis.
First the hmm is built with Hmmbuild, and the hmm files output.
Then run Hmmsearch, parse the files, put each result in a list
"""
def __init__(self, OrganismDB, combined_fasta, freshbuild=True, freshsearch=True, ):
self.alignment_dir = './alignments/'
self.alignment_list = [x for x in os.listdir(self.alignment_dir) if '.txt' in x]
self.query_names = []
self.hmm_dir = './hmm/'
if not os.path.exists(self.hmm_dir):
os.makedirs(self.hmm_dir)
self.combined_fasta = combined_fasta
self.hhsearch_result_folder = './hhsearch_results/'
if not os.path.exists(self.hhsearch_result_folder):
os.makedirs(self.hhsearch_result_folder)
self.hmm_result_list=[]
if freshbuild == True:
self.run_hmmbuild()
if freshsearch == True:
self.run_hmmsearch()
self.combined_hit_list = self.extract_hit_list_from_hmmsearch_results()
self.orgprot_list = list(set([x.id for x in self.combined_hit_list]))
OrganismDB.search = self
self.protein_arrow_color_dict = self.make_protein_arrow_color_dict(self.query_names)
OrganismDB.add_protein_to_organisms(self.orgprot_list)
OrganismDB.add_hits_to_proteins(self.combined_hit_list)
self.parse_proteins(OrganismDB)
self.set_best_hit_values_for_proteins(OrganismDB)
def run_hmmbuild(self):
'''
Generate hmm with hhbuild,
output to file. Also stores query names.
'''
for alignment in self.alignment_list:
print 'building Hmm for', alignment
alignment_full_path = self.alignment_dir + alignment
query_name = alignment.split("_")[0]
self.query_names.append(query_name)
new_hmm= self.hmm_dir + query_name + ".hmm"
hmmbuild_output = subprocess.call(["hmmbuild", new_hmm,
alignment_full_path])
print 'hhbuild complete for', self.query_names
def run_hmmsearch(self):
'''
'''
all_searches = []
for name in self.query_names:
print 'running HHsearch on', name
hmm_full_path = self.hmm_dir + name + '.hmm'
hmmsearch_output = subprocess.check_output(["hmmsearch",
"--cpu", str(processors), hmm_full_path,
self.combined_fasta])
hmm_result_file_name = self.hhsearch_result_folder + name + ".out"
self.hmm_result_list.append((name + ".out"))
f = open(hmm_result_file_name, 'w')
f.write(hmmsearch_output)
f.close()
def extract_hit_list_from_hmmsearch_results(self):
'''
Make a giant list of all the hit objects from
our search
'''
combined_list_of_hits = []
for result in self.hmm_result_list:
fullpath = self.hhsearch_result_folder + result
se = SearchIO.read(fullpath, 'hmmer3-text')
sublist = []
for hit in se:
combined_list_of_hits.append(hit)
sublist.append(hit.id)
print 'extracted', str(len(sublist)), 'hits for', result
return combined_list_of_hits
def make_protein_arrow_color_dict(self, query_names):
'''
Generates a random color for all proteins in query_names,
stores these in a dict.
'''
protein_arrow_color_dict = dict()
for protein in self.query_names:
protein_arrow_color_dict[protein] = (random(), random(), random())
return protein_arrow_color_dict
def make_hsps(self, hit):
hit_name = hit._query_id.split("_")[0]
hit_evalue = hit.evalue
hit_bitscore = hit.bitscore
def set_best_hit_values_for_proteins(self, OrganismDB):
'''
Iterate through all proteins in the DB,
drop duplicates in the hit_dataframe, then store the maximum
hit information as protein attributes.
'''
for org in OrganismDB.organisms:
print 'setting best hit values for', org.name
for prot in org.proteins:
if len(prot.hmm_hit_list) > 0:
try:
dd_df = prot.hit_dataframe.drop_duplicates(subset='bitscore')
try:
prot.hit_name_best = dd_df.bitscore.idxmax()
prot.hit_evalue_best = dd_df.ix[prot.hit_name_best].evalue
prot.hit_bitscore_best = dd_df.ix[prot.hit_name_best].bitscore
prot.hit_bias_best = dd_df.ix[prot.hit_name_best].bias
prot.hit_start_best = dd_df.ix[prot.hit_name_best].hsp_start
prot.hit_end_best = dd_df.ix[prot.hit_name_best].hsp_end
except:
print 'could not set best hit values for ', org.name
except AttributeError:
pass
|
mattsolo1/hmmerclust
|
hmmerclust/hmmerclust.py
|
HmmSearch.set_best_hit_values_for_proteins
|
python
|
def set_best_hit_values_for_proteins(self, OrganismDB):
'''
Iterate through all proteins in the DB,
drop duplicates in the hit_dataframe, then store the maximum
hit information as protein attributes.
'''
for org in OrganismDB.organisms:
print 'setting best hit values for', org.name
for prot in org.proteins:
if len(prot.hmm_hit_list) > 0:
try:
dd_df = prot.hit_dataframe.drop_duplicates(subset='bitscore')
try:
prot.hit_name_best = dd_df.bitscore.idxmax()
prot.hit_evalue_best = dd_df.ix[prot.hit_name_best].evalue
prot.hit_bitscore_best = dd_df.ix[prot.hit_name_best].bitscore
prot.hit_bias_best = dd_df.ix[prot.hit_name_best].bias
prot.hit_start_best = dd_df.ix[prot.hit_name_best].hsp_start
prot.hit_end_best = dd_df.ix[prot.hit_name_best].hsp_end
except:
print 'could not set best hit values for ', org.name
except AttributeError:
pass
|
Iterate through all proteins in the DB,
drop duplicates in the hit_dataframe, then store the maximum
hit information as protein attributes.
|
train
|
https://github.com/mattsolo1/hmmerclust/blob/471596043a660097ed8b11430d42118a8fd25798/hmmerclust/hmmerclust.py#L716-L741
| null |
class HmmSearch:
"""
Give alignment files, name them according to what the names
should be in the analysis.
First the hmm is built with Hmmbuild, and the hmm files output.
Then run Hmmsearch, parse the files, put each result in a list
"""
def __init__(self, OrganismDB, combined_fasta, freshbuild=True, freshsearch=True, ):
self.alignment_dir = './alignments/'
self.alignment_list = [x for x in os.listdir(self.alignment_dir) if '.txt' in x]
self.query_names = []
self.hmm_dir = './hmm/'
if not os.path.exists(self.hmm_dir):
os.makedirs(self.hmm_dir)
self.combined_fasta = combined_fasta
self.hhsearch_result_folder = './hhsearch_results/'
if not os.path.exists(self.hhsearch_result_folder):
os.makedirs(self.hhsearch_result_folder)
self.hmm_result_list=[]
if freshbuild == True:
self.run_hmmbuild()
if freshsearch == True:
self.run_hmmsearch()
self.combined_hit_list = self.extract_hit_list_from_hmmsearch_results()
self.orgprot_list = list(set([x.id for x in self.combined_hit_list]))
OrganismDB.search = self
self.protein_arrow_color_dict = self.make_protein_arrow_color_dict(self.query_names)
OrganismDB.add_protein_to_organisms(self.orgprot_list)
OrganismDB.add_hits_to_proteins(self.combined_hit_list)
self.parse_proteins(OrganismDB)
self.set_best_hit_values_for_proteins(OrganismDB)
def run_hmmbuild(self):
'''
Generate hmm with hhbuild,
output to file. Also stores query names.
'''
for alignment in self.alignment_list:
print 'building Hmm for', alignment
alignment_full_path = self.alignment_dir + alignment
query_name = alignment.split("_")[0]
self.query_names.append(query_name)
new_hmm= self.hmm_dir + query_name + ".hmm"
hmmbuild_output = subprocess.call(["hmmbuild", new_hmm,
alignment_full_path])
print 'hhbuild complete for', self.query_names
def run_hmmsearch(self):
'''
'''
all_searches = []
for name in self.query_names:
print 'running HHsearch on', name
hmm_full_path = self.hmm_dir + name + '.hmm'
hmmsearch_output = subprocess.check_output(["hmmsearch",
"--cpu", str(processors), hmm_full_path,
self.combined_fasta])
hmm_result_file_name = self.hhsearch_result_folder + name + ".out"
self.hmm_result_list.append((name + ".out"))
f = open(hmm_result_file_name, 'w')
f.write(hmmsearch_output)
f.close()
def extract_hit_list_from_hmmsearch_results(self):
'''
Make a giant list of all the hit objects from
our search
'''
combined_list_of_hits = []
for result in self.hmm_result_list:
fullpath = self.hhsearch_result_folder + result
se = SearchIO.read(fullpath, 'hmmer3-text')
sublist = []
for hit in se:
combined_list_of_hits.append(hit)
sublist.append(hit.id)
print 'extracted', str(len(sublist)), 'hits for', result
return combined_list_of_hits
def make_protein_arrow_color_dict(self, query_names):
'''
Generates a random color for all proteins in query_names,
stores these in a dict.
'''
protein_arrow_color_dict = dict()
for protein in self.query_names:
protein_arrow_color_dict[protein] = (random(), random(), random())
return protein_arrow_color_dict
def make_hsps(self, hit):
hit_name = hit._query_id.split("_")[0]
hit_evalue = hit.evalue
hit_bitscore = hit.bitscore
def parse_proteins(self,OrganismDB):
'''
Iterate through all the proteins in the DB,
creates a hit_dataframe for each protein.
'''
for org in OrganismDB.organisms:
for prot in org.proteins:
if len(prot.hmm_hit_list) > 0:
try:
prot.hit_dataframe = prot.parse_hmm_hit_list(prot.hmm_hit_list)
except ValueError,e:
print 'error for', org.name, prot.accession, str(e)
|
mattsolo1/hmmerclust
|
hmmerclust/hmmerclust.py
|
HeatMap.make_heatmap
|
python
|
def make_heatmap(self, unstacked_df, singleletters):
fig, ax = plt.subplots(num=None, figsize=(10,len(unstacked_df)/3), dpi=80, facecolor='w', edgecolor='k')
#heatmap = ax.pcolor(unstacked_df, cmap=plt.cm.Reds, alpha=2, vmax = 5)
#heatmap = ax.pcolor(unstacked_df, cmap=plt.cm.gist_ncar_r, alpha=20, vmax = 20)
#heatmap = ax.pcolor(unstacked_df, cmap=plt.cm.YlGnBu, alpha=20, vmax = 2)
heatmap = ax.pcolor(unstacked_df, cmap=plt.cm.jet, alpha=10, vmax = 5)
# ax.set_title('140616 - Esp distribution in actinobacteria')
#cb = plt.colorbar(heatmap)
#cb.set_label('# of copies')
species_names_only = ['%s locus:%s' % (x[0],str(x[1])[-12:]) for x in unstacked_df.index.values]
ax.set_aspect('equal')
ax.yaxis.set_ticks(range(0, len(unstacked_df.values)))
ax.xaxis.set_ticks(range(0, len(unstacked_df.columns)))
ax.set_xticklabels(unstacked_df.columns, rotation='90')
ax.set_yticklabels(species_names_only)
#ax.set_yticklabels(unstacked_df.index.values)
ax.tick_params(axis='both', left='off', right='off', bottom='off', top='off')
#ax.set_xticks(np.range(data.shape[0])+0.5, minor=False)
#ax.set_yticks(np.range(data.shape[1])+0.5, minor=False)
#ax.invert_yaxis()
#ax.xaxis.tick_top()
plt.grid(True, color='black', ls='-', linewidth=0.5)
'''exerimental: displaying text on the heatmap'''
if singleletters != None:
for y in range(unstacked_df.values.shape[0]):
for x in range(unstacked_df.values.shape[1]):
plt.text(x + 0.5, y + 0.5, '%.4s' % singleletters[(x)],
horizontalalignment='center',
verticalalignment='center',
)
plt.savefig("out.svg")
plt.show()
|
exerimental: displaying text on the heatmap
|
train
|
https://github.com/mattsolo1/hmmerclust/blob/471596043a660097ed8b11430d42118a8fd25798/hmmerclust/hmmerclust.py#L1094-L1136
| null |
class HeatMap:
def __init__(self, DataFrame, by_locus=False, cols=None, subset=None, singleletters=None):
self.unstacked_df = self.unstack_df(DataFrame, by_locus, cols, subset)
self.heatmap = self.make_heatmap(self.unstacked_df, singleletters)
def unstack_df(self, DataFrame, by_locus, cols, subset):
if by_locus == True:
colheads = ['org_species', 'locus_id', 'org_tree_order', 'hit_query']
else:
colheads = ['org_species', 'org_tree_order', 'hit_query']
unstacked_df = DataFrame.groupby(colheads).size().unstack()
if subset != None:
unstacked_df = unstacked_df.dropna(subset=subset)
unstacked_df = unstacked_df.fillna(0).sortlevel('org_tree_order', ascending=False)
if cols != None:
unstacked_df=unstacked_df[cols]
return unstacked_df
#print species_names_only
def make_heatmap_text(self, unstacked_df):
fig, ax = plt.subplots(num=None, figsize=(10,len(unstacked_df)/3), dpi=80, facecolor='w', edgecolor='k')
#heatmap = ax.pcolor(unstacked_df, cmap=plt.cm.Reds, alpha=2, vmax = 5)
#heatmap = ax.pcolor(unstacked_df, cmap=plt.cm.gist_ncar_r, alpha=20, vmax = 20)
#heatmap = ax.pcolor(unstacked_df, cmap=plt.cm.YlGnBu, alpha=20, vmax = 2)
heatmap = ax.pcolor(unstacked_df, cmap=plt.cm.jet, alpha=10, vmax = 5)
# ax.set_title('140616 - Esp distribution in actinobacteria')
#cb = plt.colorbar(heatmap)
#cb.set_label('# of copies')
species_names_only = ['%s locus:%s' % (x[0],str(x[1])[-12:]) for x in unstacked_df.index.values]
ax.set_aspect('equal')
ax.yaxis.set_ticks(range(0, len(unstacked_df.values)))
ax.xaxis.set_ticks(range(0, len(unstacked_df.columns)))
ax.set_xticklabels(unstacked_df.columns, rotation='90')
ax.set_yticklabels(species_names_only)
#ax.set_yticklabels(unstacked_df.index.values)
ax.tick_params(axis='both', left='off', right='off', bottom='off', top='off')
#ax.set_xticks(np.range(data.shape[0])+0.5, minor=False)
#ax.set_yticks(np.range(data.shape[1])+0.5, minor=False)
#ax.invert_yaxis()
#ax.xaxis.tick_top()
plt.grid(True, color='black', ls='-', linewidth=0.5)
'''exerimental: displaying text on the heatmap'''
for y in range(unstacked_df.values.shape[0]):
for x in range(unstacked_df.values.shape[1]):
plt.text(x + 0.5, y + 0.5, '%.4s' % 'A',
horizontalalignment='center',
verticalalignment='center',
)
plt.show()
|
mattsolo1/hmmerclust
|
hmmerclust/hmmerclust.py
|
RelatedProteinGroup.make_related_protein_fasta_from_dataframe
|
python
|
def make_related_protein_fasta_from_dataframe(self, input_df):
'''
DataFrame should have
'''
dirname = './group_fastas'
if not os.path.exists(dirname):
os.makedirs(dirname)
unique_hit_queries = set(input_df.hit_query)
for hq in unique_hit_queries:
fasta = []
subdf = input_df[input_df.hit_query==hq].reset_index()
for i in range(0, len(subdf)):
fasta.append('>' + subdf.ix[i].org_name.replace(" ", "-") +
"," + subdf.ix[i].hit_query +
"," + subdf.ix[i].prot_acc +
'\n' + subdf.ix[i].prot_translation + '\n')
faastring = "".join(fasta)
filename = './group_fastas/' + hq + '.fasta'
write_fasta = open(filename, 'w')
write_fasta.write(faastring)
write_fasta.close()
|
DataFrame should have
|
train
|
https://github.com/mattsolo1/hmmerclust/blob/471596043a660097ed8b11430d42118a8fd25798/hmmerclust/hmmerclust.py#L1200-L1231
| null |
class RelatedProteinGroup:
'''
An object representing a group of related proteins
to be used for generating alignments, phylogeny, etc.
Input is a list of Protein objects, e.g. of the same type that were
identified in the Hmm search & where found in a cluster.
Can output a fasta file for each group for making alignments & trees
'''
def __init__(self, input_df):
self.make_related_protein_fasta_from_dataframe(input_df)
|
dvdotsenko/jsonrpc.py
|
jsonrpcparts/serializers.py
|
clean_dict_keys
|
python
|
def clean_dict_keys(d):
new_d = {}
for (k, v) in d.iteritems():
new_d[str(k)] = v
return new_d
|
Convert all keys of the dict 'd' to (ascii-)strings.
:Raises: UnicodeEncodeError
|
train
|
https://github.com/dvdotsenko/jsonrpc.py/blob/19673edd77a9518ac5655bd407f6b93ffbb2cafc/jsonrpcparts/serializers.py#L16-L24
| null |
"""
JSON-RPC (especially v2) prescribes that the message format adheres
to a specific format/schema. This module contains code that helps
in serializing data (including errors) into JSON-RPC message format.
This file is part of `jsonrpcparts` project. See project's source for license and copyright.
"""
import json
import random
import time
import uuid
from . import errors
class BaseJSONRPCSerializer(object):
"""
Common base class for various json rpc serializers
Mostly done just for keeping track of common methods and attributes
(and thus define some sort of internal API/signature for these)
"""
# these are used in stringify/destringify calls like so:
# json_data_string = json.dumps(data, cls=json_encoder)
# and allow users to customize the encoder used, thus allowing
# support for json-serialization of "odd" types like sets, UUID, models ets.
# by default it's the default JSONEncoder
json_decoder = json.JSONDecoder
json_encoder = json.JSONEncoder
@classmethod
def json_dumps(cls, obj, **kwargs):
"""
A rewrap of json.dumps done for one reason - to inject a custom `cls` kwarg
:param obj:
:param kwargs:
:return:
:rtype: str
"""
if 'cls' not in kwargs:
kwargs['cls'] = cls.json_encoder
return json.dumps(obj, **kwargs)
@classmethod
def json_loads(cls, s, **kwargs):
"""
A rewrap of json.loads done for one reason - to inject a custom `cls` kwarg
:param s:
:param kwargs:
:return:
:rtype: dict
"""
if 'cls' not in kwargs:
kwargs['cls'] = cls.json_decoder
return json.loads(s, **kwargs)
@staticmethod
def assemble_request(method, *args, **kwargs):
"""serialize JSON-RPC-Request
"""
raise NotImplemented
@staticmethod
def assemble_response(result, *args, **kwargs):
"""serialize a JSON-RPC-Response (without error)
"""
raise NotImplemented
@staticmethod
def parse_request(jsonrpc_message_as_string, *args, **kwargs):
"""We take apart JSON-RPC-formatted message as a string and decompose it
into a dictionary object, emitting errors if parsing detects issues with
the format of the message.
"""
raise NotImplemented
@staticmethod
def parse_response(jsonrpc_message_as_string, *args, **kwargs):
"""de-serialize a JSON-RPC Response/error
"""
raise NotImplemented
#----------------------
# JSON-RPC 1.0
class JSONRPC10Serializer(BaseJSONRPCSerializer):
"""JSON-RPC V1.0 data-structure / serializer
This implementation is quite liberal in what it accepts: It treats
missing "params" and "id" in Requests and missing "result"/"error" in
Responses as empty/null.
:SeeAlso: JSON-RPC 1.0 specification
:TODO: catch json.dumps not-serializable-exceptions
"""
@staticmethod
def assemble_request(method, params=tuple(), id=0):
"""serialize JSON-RPC-Request
:Parameters:
- method: the method-name (str/unicode)
- params: the parameters (list/tuple)
- id: if id=None, this results in a Notification
:Returns: | {"method": "...", "params": ..., "id": ...}
| "method", "params" and "id" are always in this order.
:Raises: TypeError if method/params is of wrong type or
not JSON-serializable
"""
if not isinstance(method, (str, unicode)):
raise TypeError('"method" must be a string (or unicode string).')
if not isinstance(params, (tuple, list)):
raise TypeError("params must be a tuple/list.")
return {
"method": method,
"params": params,
"id": id
}
@staticmethod
def assemble_notification_request(method, params=tuple()):
"""serialize a JSON-RPC-Notification
:Parameters: see dumps_request
:Returns: | {"method": "...", "params": ..., "id": null}
| "method", "params" and "id" are always in this order.
:Raises: see dumps_request
"""
if not isinstance(method, (str, unicode)):
raise TypeError('"method" must be a string (or unicode string).')
if not isinstance(params, (tuple, list)):
raise TypeError("params must be a tuple/list.")
return {
"method": method,
"params": params,
"id": None
}
@staticmethod
def assemble_response(result, id=None):
"""serialize a JSON-RPC-Response (without error)
:Returns: | {"result": ..., "error": null, "id": ...}
| "result", "error" and "id" are always in this order.
:Raises: TypeError if not JSON-serializable
"""
return {
"result": result,
"error": None,
"id": id
}
@staticmethod
def assemble_error_response(error, id=None):
"""serialize a JSON-RPC-Response-error
Since JSON-RPC 1.0 does not define an error-object, this uses the
JSON-RPC 2.0 error-object.
:Parameters:
- error: a RPCFault instance
:Returns: | {"result": null, "error": {"code": error_code, "message": error_message, "data": error_data}, "id": ...}
| "result", "error" and "id" are always in this order, data is omitted if None.
:Raises: ValueError if error is not a RPCFault instance,
TypeError if not JSON-serializable
"""
if not isinstance(error, errors.RPCFault):
raise ValueError("""error must be a RPCFault-instance.""")
if error.error_data is None:
return {
"result": None,
"error": {
"code": error.error_code,
"message": error.message
},
"id": id
}
else:
return {
"result": None,
"error": {
"code":error.error_code,
"message": error.message,
"data": error.error_data
},
"id": id
}
@classmethod
def parse_request(cls, jsonrpc_message):
"""We take apart JSON-RPC-formatted message as a string and decompose it
into a dictionary object, emitting errors if parsing detects issues with
the format of the message.
:Returns: | [method_name, params, id] or [method_name, params]
| params is a tuple/list
| if id is missing, this is a Notification
:Raises: RPCParseError, RPCInvalidRPC, RPCInvalidMethodParams
"""
try:
data = cls.json_loads(jsonrpc_message)
except ValueError, err:
raise errors.RPCParseError("No valid JSON. (%s)" % str(err))
if not isinstance(data, dict):
raise errors.RPCInvalidRPC("No valid RPC-package.")
if "method" not in data:
raise errors.RPCInvalidRPC("""Invalid Request, "method" is missing.""")
if not isinstance(data["method"], (str, unicode)):
raise errors.RPCInvalidRPC("""Invalid Request, "method" must be a string.""")
if "id" not in data:
data["id"] = None #be liberal
if "params" not in data:
data["params"] = () #be liberal
if not isinstance(data["params"], (list, tuple)):
raise errors.RPCInvalidRPC("""Invalid Request, "params" must be an array.""")
if len(data) != 3:
raise errors.RPCInvalidRPC("""Invalid Request, additional fields found.""")
# notification / request
if data["id"] is None:
return data["method"], data["params"] #notification
else:
return data["method"], data["params"], data["id"] #request
@classmethod
def parse_response(cls, jsonrpc_message):
"""de-serialize a JSON-RPC Response/error
:Returns: | [result, id] for Responses
:Raises: | RPCFault+derivates for error-packages/faults, RPCParseError, RPCInvalidRPC
| Note that for error-packages which do not match the
V2.0-definition, RPCFault(-1, "Error", RECEIVED_ERROR_OBJ)
is raised.
"""
try:
data = cls.json_loads(jsonrpc_message)
except ValueError, err:
raise errors.RPCParseError("No valid JSON. (%s)" % str(err))
if not isinstance(data, dict):
raise errors.RPCInvalidRPC("No valid RPC-package.")
if "id" not in data:
raise errors.RPCInvalidRPC("""Invalid Response, "id" missing.""")
if "result" not in data:
data["result"] = None #be liberal
if "error" not in data:
data["error"] = None #be liberal
if len(data) != 3:
raise errors.RPCInvalidRPC("""Invalid Response, additional or missing fields.""")
#error
if data["error"] is not None:
if data["result"] is not None:
raise errors.RPCInvalidRPC("""Invalid Response, one of "result" or "error" must be null.""")
#v2.0 error-format
if (
isinstance(data["error"], dict) and
"code" in data["error"] and
"message" in data["error"] and
(
len(data["error"]) == 2 or
("data" in data["error"] and len(data["error"])==3)
)
):
if "data" not in data["error"]:
error_data = None
else:
error_data = data["error"]["data"]
if data["error"]["code"] == errors.PARSE_ERROR:
raise errors.RPCParseError(error_data)
elif data["error"]["code"] == errors.INVALID_REQUEST:
raise errors.RPCInvalidRPC(error_data)
elif data["error"]["code"] == errors.METHOD_NOT_FOUND:
raise errors.RPCMethodNotFound(error_data)
elif data["error"]["code"] == errors.INVALID_METHOD_PARAMS:
raise errors.RPCInvalidMethodParams(error_data)
elif data["error"]["code"] == errors.INTERNAL_ERROR:
raise errors.RPCInternalError(error_data)
elif data["error"]["code"] == errors.PROCEDURE_EXCEPTION:
raise errors.RPCProcedureException(error_data)
elif data["error"]["code"] == errors.AUTHENTIFICATION_ERROR:
raise errors.RPCAuthentificationError(error_data)
elif data["error"]["code"] == errors.PERMISSION_DENIED:
raise errors.RPCPermissionDenied(error_data)
elif data["error"]["code"] == errors.INVALID_PARAM_VALUES:
raise errors.RPCInvalidParamValues(error_data)
else:
raise errors.RPCFault(data["error"]["code"], data["error"]["message"], error_data)
#other error-format
else:
raise errors.RPCFault(-1, "Error", data["error"])
#result
else:
return data["result"], data["id"]
#----------------------
# JSON-RPC 2.0
class JSONRPC20Serializer(BaseJSONRPCSerializer):
@staticmethod
def assemble_request(method, params=None, notification=False):
"""serialize JSON-RPC-Request
:Parameters:
- method: the method-name (str/unicode)
- params: the parameters (None/list/tuple/dict)
- notification: bool
:Returns: | {"jsonrpc": "2.0", "method": "...", "params": ..., "id": ...}
| "jsonrpc", "method", "params" and "id" are always in this order.
| "params" is omitted if empty
:Raises: TypeError if method/params is of wrong type or
not JSON-serializable
"""
if not isinstance(method, (str, unicode)):
raise TypeError('"method" must be a string (or unicode string).')
if params and not isinstance(params, (tuple, list, dict)):
raise TypeError("params must be a tuple/list/dict or None.")
base = {
"jsonrpc": "2.0",
"method": method
}
if params:
base["params"] = params
if not notification:
base['id'] = str(uuid.uuid4())
return base
@staticmethod
def assemble_response(result, request_id):
return {
"jsonrpc": "2.0",
"result": result,
"id": request_id
}
@staticmethod
def assemble_error_response(error):
if not isinstance(error, errors.RPCFault):
raise ValueError("""error must be a RPCFault-instance.""")
if error.error_data is None:
return {
"jsonrpc": "2.0",
"error": {
"code":error.error_code,
"message":error.message
},
"id": error.request_id
}
else:
return {
"jsonrpc": "2.0",
"error": {
"code":error.error_code,
"message": error.message,
"data": error.error_data
},
"id": error.request_id
}
@classmethod
def _parse_single_request(cls, request_data):
"""
:Returns: | [method_name, params, id]
| method (str)
| params (tuple/list or dict)
| id (str/int/None) (None means this is Notification)
:Raises: RPCParseError, RPCInvalidRPC, RPCInvalidMethodParams
"""
request_id = request_data.get('id', None) # Notifications don't have IDs
for argument in ['jsonrpc', 'method']:
if argument not in request_data:
raise errors.RPCInvalidRequest('argument "%s" missing.' % argument, request_id)
if not isinstance(request_data[argument], (str, unicode)):
raise errors.RPCInvalidRequest('value of argument "%s" must be a string.' % argument, request_id)
if request_data["jsonrpc"] != "2.0":
raise errors.RPCInvalidRequest('Invalid jsonrpc version.', request_id)
if "params" in request_data:
if not isinstance(request_data["params"], (list, tuple, dict)):
raise errors.RPCInvalidMethodParams(
'value of argument "parameter" is of non-supported type %s' % type(request_data["params"]),
request_id
)
return (
request_data["method"],
request_data.get("params", None),
request_id
)
@classmethod
def _parse_single_request_trap_errors(cls, request_data):
"""Traps exceptions generated by __parse_single_request and
converts them into values of request_id and error in the
returned tuple.
:Returns: (method_name, params_object, request_id, error)
Where:
- method_name is a str (or None when error is set)
- params_object is one of list/tuple/dict/None
- request_id is long/int/string/None
- error is an instance of errors.RPCFault subclass or None
"""
try:
method, params, request_id = cls._parse_single_request(request_data)
return method, params, request_id, None
except errors.RPCFault as ex:
return None, None, ex.request_id, ex
@classmethod
def parse_request(cls, request_string):
"""JSONRPC allows for **batch** requests to be communicated
as array of dicts. This method parses out each individual
element in the batch and returns a list of tuples, each
tuple a result of parsing of each item in the batch.
:Returns: | tuple of (results, is_batch_mode_flag)
| where:
| - results is a tuple describing the request
| - Is_batch_mode_flag is a Bool indicating if the
| request came in in batch mode (as array of requests) or not.
:Raises: RPCParseError, RPCInvalidRequest
"""
try:
batch = cls.json_loads(request_string)
except ValueError as err:
raise errors.RPCParseError("No valid JSON. (%s)" % str(err))
if isinstance(batch, (list, tuple)) and batch:
# batch is true batch.
# list of parsed request objects, is_batch_mode_flag
return [cls._parse_single_request_trap_errors(request) for request in batch], True
elif isinstance(batch, dict):
# `batch` is actually single request object
return [cls._parse_single_request_trap_errors(batch)], False
raise errors.RPCInvalidRequest("Neither a batch array nor a single request object found in the request.")
@classmethod
def _parse_single_response(cls, response_data):
"""de-serialize a JSON-RPC Response/error
:Returns: | [result, id] for Responses
:Raises: | RPCFault+derivates for error-packages/faults, RPCParseError, RPCInvalidRPC
"""
if not isinstance(response_data, dict):
raise errors.RPCInvalidRequest("No valid RPC-package.")
if "id" not in response_data:
raise errors.RPCInvalidRequest("""Invalid Response, "id" missing.""")
request_id = response_data['id']
if "jsonrpc" not in response_data:
raise errors.RPCInvalidRequest("""Invalid Response, "jsonrpc" missing.""", request_id)
if not isinstance(response_data["jsonrpc"], (str, unicode)):
raise errors.RPCInvalidRequest("""Invalid Response, "jsonrpc" must be a string.""")
if response_data["jsonrpc"] != "2.0":
raise errors.RPCInvalidRequest("""Invalid jsonrpc version.""", request_id)
error = response_data.get('error', None)
result = response_data.get('result', None)
if error and result:
raise errors.RPCInvalidRequest("""Invalid Response, only "result" OR "error" allowed.""", request_id)
if error:
if not isinstance(error, dict):
raise errors.RPCInvalidRequest("Invalid Response, invalid error-object.", request_id)
if not ("code" in error and "message" in error):
raise errors.RPCInvalidRequest("Invalid Response, invalid error-object.", request_id)
error_data = error.get("data", None)
if error['code'] in errors.ERROR_CODE_CLASS_MAP:
raise errors.ERROR_CODE_CLASS_MAP[error['code']](error_data, request_id)
else:
error_object = errors.RPCFault(error_data, request_id)
error_object.error_code = error['code']
error_object.message = error['message']
raise error_object
return result, request_id
@classmethod
def _parse_single_response_trap_errors(cls, response_data):
try:
result, request_id = cls._parse_single_response(response_data)
return result, request_id, None
except errors.RPCFault as ex:
return None, ex.request_id, ex
@classmethod
def parse_response(cls, response_string):
"""JSONRPC allows for **batch** responses to be communicated
as arrays of dicts. This method parses out each individual
element in the batch and returns a list of tuples, each
tuple a result of parsing of each item in the batch.
:Returns: | tuple of (results, is_batch_mode_flag)
| where:
| - results is a tuple describing the request
| - Is_batch_mode_flag is a Bool indicating if the
| request came in in batch mode (as array of requests) or not.
:Raises: RPCParseError, RPCInvalidRequest
"""
try:
batch = cls.json_loads(response_string)
except ValueError as err:
raise errors.RPCParseError("No valid JSON. (%s)" % str(err))
if isinstance(batch, (list, tuple)) and batch:
# batch is true batch.
# list of parsed request objects, is_batch_mode_flag
return [cls._parse_single_response_trap_errors(response) for response in batch], True
elif isinstance(batch, dict):
# `batch` is actually single response object
return [cls._parse_single_response_trap_errors(batch)], False
raise errors.RPCParseError("Neither a batch array nor a single response object found in the response.")
|
dvdotsenko/jsonrpc.py
|
jsonrpcparts/serializers.py
|
BaseJSONRPCSerializer.json_dumps
|
python
|
def json_dumps(cls, obj, **kwargs):
if 'cls' not in kwargs:
kwargs['cls'] = cls.json_encoder
return json.dumps(obj, **kwargs)
|
A rewrap of json.dumps done for one reason - to inject a custom `cls` kwarg
:param obj:
:param kwargs:
:return:
:rtype: str
|
train
|
https://github.com/dvdotsenko/jsonrpc.py/blob/19673edd77a9518ac5655bd407f6b93ffbb2cafc/jsonrpcparts/serializers.py#L43-L54
| null |
class BaseJSONRPCSerializer(object):
"""
Common base class for various json rpc serializers
Mostly done just for keeping track of common methods and attributes
(and thus define some sort of internal API/signature for these)
"""
# these are used in stringify/destringify calls like so:
# json_data_string = json.dumps(data, cls=json_encoder)
# and allow users to customize the encoder used, thus allowing
# support for json-serialization of "odd" types like sets, UUID, models ets.
# by default it's the default JSONEncoder
json_decoder = json.JSONDecoder
json_encoder = json.JSONEncoder
@classmethod
@classmethod
def json_loads(cls, s, **kwargs):
"""
A rewrap of json.loads done for one reason - to inject a custom `cls` kwarg
:param s:
:param kwargs:
:return:
:rtype: dict
"""
if 'cls' not in kwargs:
kwargs['cls'] = cls.json_decoder
return json.loads(s, **kwargs)
@staticmethod
def assemble_request(method, *args, **kwargs):
"""serialize JSON-RPC-Request
"""
raise NotImplemented
@staticmethod
def assemble_response(result, *args, **kwargs):
"""serialize a JSON-RPC-Response (without error)
"""
raise NotImplemented
@staticmethod
def parse_request(jsonrpc_message_as_string, *args, **kwargs):
"""We take apart JSON-RPC-formatted message as a string and decompose it
into a dictionary object, emitting errors if parsing detects issues with
the format of the message.
"""
raise NotImplemented
@staticmethod
def parse_response(jsonrpc_message_as_string, *args, **kwargs):
"""de-serialize a JSON-RPC Response/error
"""
raise NotImplemented
|
dvdotsenko/jsonrpc.py
|
jsonrpcparts/serializers.py
|
BaseJSONRPCSerializer.json_loads
|
python
|
def json_loads(cls, s, **kwargs):
if 'cls' not in kwargs:
kwargs['cls'] = cls.json_decoder
return json.loads(s, **kwargs)
|
A rewrap of json.loads done for one reason - to inject a custom `cls` kwarg
:param s:
:param kwargs:
:return:
:rtype: dict
|
train
|
https://github.com/dvdotsenko/jsonrpc.py/blob/19673edd77a9518ac5655bd407f6b93ffbb2cafc/jsonrpcparts/serializers.py#L57-L68
| null |
class BaseJSONRPCSerializer(object):
"""
Common base class for various json rpc serializers
Mostly done just for keeping track of common methods and attributes
(and thus define some sort of internal API/signature for these)
"""
# these are used in stringify/destringify calls like so:
# json_data_string = json.dumps(data, cls=json_encoder)
# and allow users to customize the encoder used, thus allowing
# support for json-serialization of "odd" types like sets, UUID, models ets.
# by default it's the default JSONEncoder
json_decoder = json.JSONDecoder
json_encoder = json.JSONEncoder
@classmethod
def json_dumps(cls, obj, **kwargs):
"""
A rewrap of json.dumps done for one reason - to inject a custom `cls` kwarg
:param obj:
:param kwargs:
:return:
:rtype: str
"""
if 'cls' not in kwargs:
kwargs['cls'] = cls.json_encoder
return json.dumps(obj, **kwargs)
@classmethod
@staticmethod
def assemble_request(method, *args, **kwargs):
"""serialize JSON-RPC-Request
"""
raise NotImplemented
@staticmethod
def assemble_response(result, *args, **kwargs):
"""serialize a JSON-RPC-Response (without error)
"""
raise NotImplemented
@staticmethod
def parse_request(jsonrpc_message_as_string, *args, **kwargs):
"""We take apart JSON-RPC-formatted message as a string and decompose it
into a dictionary object, emitting errors if parsing detects issues with
the format of the message.
"""
raise NotImplemented
@staticmethod
def parse_response(jsonrpc_message_as_string, *args, **kwargs):
"""de-serialize a JSON-RPC Response/error
"""
raise NotImplemented
|
dvdotsenko/jsonrpc.py
|
jsonrpcparts/serializers.py
|
JSONRPC10Serializer.assemble_request
|
python
|
def assemble_request(method, params=tuple(), id=0):
if not isinstance(method, (str, unicode)):
raise TypeError('"method" must be a string (or unicode string).')
if not isinstance(params, (tuple, list)):
raise TypeError("params must be a tuple/list.")
return {
"method": method,
"params": params,
"id": id
}
|
serialize JSON-RPC-Request
:Parameters:
- method: the method-name (str/unicode)
- params: the parameters (list/tuple)
- id: if id=None, this results in a Notification
:Returns: | {"method": "...", "params": ..., "id": ...}
| "method", "params" and "id" are always in this order.
:Raises: TypeError if method/params is of wrong type or
not JSON-serializable
|
train
|
https://github.com/dvdotsenko/jsonrpc.py/blob/19673edd77a9518ac5655bd407f6b93ffbb2cafc/jsonrpcparts/serializers.py#L112-L133
| null |
class JSONRPC10Serializer(BaseJSONRPCSerializer):
"""JSON-RPC V1.0 data-structure / serializer
This implementation is quite liberal in what it accepts: It treats
missing "params" and "id" in Requests and missing "result"/"error" in
Responses as empty/null.
:SeeAlso: JSON-RPC 1.0 specification
:TODO: catch json.dumps not-serializable-exceptions
"""
@staticmethod
@staticmethod
def assemble_notification_request(method, params=tuple()):
"""serialize a JSON-RPC-Notification
:Parameters: see dumps_request
:Returns: | {"method": "...", "params": ..., "id": null}
| "method", "params" and "id" are always in this order.
:Raises: see dumps_request
"""
if not isinstance(method, (str, unicode)):
raise TypeError('"method" must be a string (or unicode string).')
if not isinstance(params, (tuple, list)):
raise TypeError("params must be a tuple/list.")
return {
"method": method,
"params": params,
"id": None
}
@staticmethod
def assemble_response(result, id=None):
"""serialize a JSON-RPC-Response (without error)
:Returns: | {"result": ..., "error": null, "id": ...}
| "result", "error" and "id" are always in this order.
:Raises: TypeError if not JSON-serializable
"""
return {
"result": result,
"error": None,
"id": id
}
@staticmethod
def assemble_error_response(error, id=None):
"""serialize a JSON-RPC-Response-error
Since JSON-RPC 1.0 does not define an error-object, this uses the
JSON-RPC 2.0 error-object.
:Parameters:
- error: a RPCFault instance
:Returns: | {"result": null, "error": {"code": error_code, "message": error_message, "data": error_data}, "id": ...}
| "result", "error" and "id" are always in this order, data is omitted if None.
:Raises: ValueError if error is not a RPCFault instance,
TypeError if not JSON-serializable
"""
if not isinstance(error, errors.RPCFault):
raise ValueError("""error must be a RPCFault-instance.""")
if error.error_data is None:
return {
"result": None,
"error": {
"code": error.error_code,
"message": error.message
},
"id": id
}
else:
return {
"result": None,
"error": {
"code":error.error_code,
"message": error.message,
"data": error.error_data
},
"id": id
}
@classmethod
def parse_request(cls, jsonrpc_message):
"""We take apart JSON-RPC-formatted message as a string and decompose it
into a dictionary object, emitting errors if parsing detects issues with
the format of the message.
:Returns: | [method_name, params, id] or [method_name, params]
| params is a tuple/list
| if id is missing, this is a Notification
:Raises: RPCParseError, RPCInvalidRPC, RPCInvalidMethodParams
"""
try:
data = cls.json_loads(jsonrpc_message)
except ValueError, err:
raise errors.RPCParseError("No valid JSON. (%s)" % str(err))
if not isinstance(data, dict):
raise errors.RPCInvalidRPC("No valid RPC-package.")
if "method" not in data:
raise errors.RPCInvalidRPC("""Invalid Request, "method" is missing.""")
if not isinstance(data["method"], (str, unicode)):
raise errors.RPCInvalidRPC("""Invalid Request, "method" must be a string.""")
if "id" not in data:
data["id"] = None #be liberal
if "params" not in data:
data["params"] = () #be liberal
if not isinstance(data["params"], (list, tuple)):
raise errors.RPCInvalidRPC("""Invalid Request, "params" must be an array.""")
if len(data) != 3:
raise errors.RPCInvalidRPC("""Invalid Request, additional fields found.""")
# notification / request
if data["id"] is None:
return data["method"], data["params"] #notification
else:
return data["method"], data["params"], data["id"] #request
@classmethod
def parse_response(cls, jsonrpc_message):
"""de-serialize a JSON-RPC Response/error
:Returns: | [result, id] for Responses
:Raises: | RPCFault+derivates for error-packages/faults, RPCParseError, RPCInvalidRPC
| Note that for error-packages which do not match the
V2.0-definition, RPCFault(-1, "Error", RECEIVED_ERROR_OBJ)
is raised.
"""
try:
data = cls.json_loads(jsonrpc_message)
except ValueError, err:
raise errors.RPCParseError("No valid JSON. (%s)" % str(err))
if not isinstance(data, dict):
raise errors.RPCInvalidRPC("No valid RPC-package.")
if "id" not in data:
raise errors.RPCInvalidRPC("""Invalid Response, "id" missing.""")
if "result" not in data:
data["result"] = None #be liberal
if "error" not in data:
data["error"] = None #be liberal
if len(data) != 3:
raise errors.RPCInvalidRPC("""Invalid Response, additional or missing fields.""")
#error
if data["error"] is not None:
if data["result"] is not None:
raise errors.RPCInvalidRPC("""Invalid Response, one of "result" or "error" must be null.""")
#v2.0 error-format
if (
isinstance(data["error"], dict) and
"code" in data["error"] and
"message" in data["error"] and
(
len(data["error"]) == 2 or
("data" in data["error"] and len(data["error"])==3)
)
):
if "data" not in data["error"]:
error_data = None
else:
error_data = data["error"]["data"]
if data["error"]["code"] == errors.PARSE_ERROR:
raise errors.RPCParseError(error_data)
elif data["error"]["code"] == errors.INVALID_REQUEST:
raise errors.RPCInvalidRPC(error_data)
elif data["error"]["code"] == errors.METHOD_NOT_FOUND:
raise errors.RPCMethodNotFound(error_data)
elif data["error"]["code"] == errors.INVALID_METHOD_PARAMS:
raise errors.RPCInvalidMethodParams(error_data)
elif data["error"]["code"] == errors.INTERNAL_ERROR:
raise errors.RPCInternalError(error_data)
elif data["error"]["code"] == errors.PROCEDURE_EXCEPTION:
raise errors.RPCProcedureException(error_data)
elif data["error"]["code"] == errors.AUTHENTIFICATION_ERROR:
raise errors.RPCAuthentificationError(error_data)
elif data["error"]["code"] == errors.PERMISSION_DENIED:
raise errors.RPCPermissionDenied(error_data)
elif data["error"]["code"] == errors.INVALID_PARAM_VALUES:
raise errors.RPCInvalidParamValues(error_data)
else:
raise errors.RPCFault(data["error"]["code"], data["error"]["message"], error_data)
#other error-format
else:
raise errors.RPCFault(-1, "Error", data["error"])
#result
else:
return data["result"], data["id"]
|
dvdotsenko/jsonrpc.py
|
jsonrpcparts/serializers.py
|
JSONRPC10Serializer.assemble_notification_request
|
python
|
def assemble_notification_request(method, params=tuple()):
if not isinstance(method, (str, unicode)):
raise TypeError('"method" must be a string (or unicode string).')
if not isinstance(params, (tuple, list)):
raise TypeError("params must be a tuple/list.")
return {
"method": method,
"params": params,
"id": None
}
|
serialize a JSON-RPC-Notification
:Parameters: see dumps_request
:Returns: | {"method": "...", "params": ..., "id": null}
| "method", "params" and "id" are always in this order.
:Raises: see dumps_request
|
train
|
https://github.com/dvdotsenko/jsonrpc.py/blob/19673edd77a9518ac5655bd407f6b93ffbb2cafc/jsonrpcparts/serializers.py#L136-L153
| null |
class JSONRPC10Serializer(BaseJSONRPCSerializer):
"""JSON-RPC V1.0 data-structure / serializer
This implementation is quite liberal in what it accepts: It treats
missing "params" and "id" in Requests and missing "result"/"error" in
Responses as empty/null.
:SeeAlso: JSON-RPC 1.0 specification
:TODO: catch json.dumps not-serializable-exceptions
"""
@staticmethod
def assemble_request(method, params=tuple(), id=0):
"""serialize JSON-RPC-Request
:Parameters:
- method: the method-name (str/unicode)
- params: the parameters (list/tuple)
- id: if id=None, this results in a Notification
:Returns: | {"method": "...", "params": ..., "id": ...}
| "method", "params" and "id" are always in this order.
:Raises: TypeError if method/params is of wrong type or
not JSON-serializable
"""
if not isinstance(method, (str, unicode)):
raise TypeError('"method" must be a string (or unicode string).')
if not isinstance(params, (tuple, list)):
raise TypeError("params must be a tuple/list.")
return {
"method": method,
"params": params,
"id": id
}
@staticmethod
@staticmethod
def assemble_response(result, id=None):
"""serialize a JSON-RPC-Response (without error)
:Returns: | {"result": ..., "error": null, "id": ...}
| "result", "error" and "id" are always in this order.
:Raises: TypeError if not JSON-serializable
"""
return {
"result": result,
"error": None,
"id": id
}
@staticmethod
def assemble_error_response(error, id=None):
"""serialize a JSON-RPC-Response-error
Since JSON-RPC 1.0 does not define an error-object, this uses the
JSON-RPC 2.0 error-object.
:Parameters:
- error: a RPCFault instance
:Returns: | {"result": null, "error": {"code": error_code, "message": error_message, "data": error_data}, "id": ...}
| "result", "error" and "id" are always in this order, data is omitted if None.
:Raises: ValueError if error is not a RPCFault instance,
TypeError if not JSON-serializable
"""
if not isinstance(error, errors.RPCFault):
raise ValueError("""error must be a RPCFault-instance.""")
if error.error_data is None:
return {
"result": None,
"error": {
"code": error.error_code,
"message": error.message
},
"id": id
}
else:
return {
"result": None,
"error": {
"code":error.error_code,
"message": error.message,
"data": error.error_data
},
"id": id
}
@classmethod
def parse_request(cls, jsonrpc_message):
"""We take apart JSON-RPC-formatted message as a string and decompose it
into a dictionary object, emitting errors if parsing detects issues with
the format of the message.
:Returns: | [method_name, params, id] or [method_name, params]
| params is a tuple/list
| if id is missing, this is a Notification
:Raises: RPCParseError, RPCInvalidRPC, RPCInvalidMethodParams
"""
try:
data = cls.json_loads(jsonrpc_message)
except ValueError, err:
raise errors.RPCParseError("No valid JSON. (%s)" % str(err))
if not isinstance(data, dict):
raise errors.RPCInvalidRPC("No valid RPC-package.")
if "method" not in data:
raise errors.RPCInvalidRPC("""Invalid Request, "method" is missing.""")
if not isinstance(data["method"], (str, unicode)):
raise errors.RPCInvalidRPC("""Invalid Request, "method" must be a string.""")
if "id" not in data:
data["id"] = None #be liberal
if "params" not in data:
data["params"] = () #be liberal
if not isinstance(data["params"], (list, tuple)):
raise errors.RPCInvalidRPC("""Invalid Request, "params" must be an array.""")
if len(data) != 3:
raise errors.RPCInvalidRPC("""Invalid Request, additional fields found.""")
# notification / request
if data["id"] is None:
return data["method"], data["params"] #notification
else:
return data["method"], data["params"], data["id"] #request
@classmethod
def parse_response(cls, jsonrpc_message):
"""de-serialize a JSON-RPC Response/error
:Returns: | [result, id] for Responses
:Raises: | RPCFault+derivates for error-packages/faults, RPCParseError, RPCInvalidRPC
| Note that for error-packages which do not match the
V2.0-definition, RPCFault(-1, "Error", RECEIVED_ERROR_OBJ)
is raised.
"""
try:
data = cls.json_loads(jsonrpc_message)
except ValueError, err:
raise errors.RPCParseError("No valid JSON. (%s)" % str(err))
if not isinstance(data, dict):
raise errors.RPCInvalidRPC("No valid RPC-package.")
if "id" not in data:
raise errors.RPCInvalidRPC("""Invalid Response, "id" missing.""")
if "result" not in data:
data["result"] = None #be liberal
if "error" not in data:
data["error"] = None #be liberal
if len(data) != 3:
raise errors.RPCInvalidRPC("""Invalid Response, additional or missing fields.""")
#error
if data["error"] is not None:
if data["result"] is not None:
raise errors.RPCInvalidRPC("""Invalid Response, one of "result" or "error" must be null.""")
#v2.0 error-format
if (
isinstance(data["error"], dict) and
"code" in data["error"] and
"message" in data["error"] and
(
len(data["error"]) == 2 or
("data" in data["error"] and len(data["error"])==3)
)
):
if "data" not in data["error"]:
error_data = None
else:
error_data = data["error"]["data"]
if data["error"]["code"] == errors.PARSE_ERROR:
raise errors.RPCParseError(error_data)
elif data["error"]["code"] == errors.INVALID_REQUEST:
raise errors.RPCInvalidRPC(error_data)
elif data["error"]["code"] == errors.METHOD_NOT_FOUND:
raise errors.RPCMethodNotFound(error_data)
elif data["error"]["code"] == errors.INVALID_METHOD_PARAMS:
raise errors.RPCInvalidMethodParams(error_data)
elif data["error"]["code"] == errors.INTERNAL_ERROR:
raise errors.RPCInternalError(error_data)
elif data["error"]["code"] == errors.PROCEDURE_EXCEPTION:
raise errors.RPCProcedureException(error_data)
elif data["error"]["code"] == errors.AUTHENTIFICATION_ERROR:
raise errors.RPCAuthentificationError(error_data)
elif data["error"]["code"] == errors.PERMISSION_DENIED:
raise errors.RPCPermissionDenied(error_data)
elif data["error"]["code"] == errors.INVALID_PARAM_VALUES:
raise errors.RPCInvalidParamValues(error_data)
else:
raise errors.RPCFault(data["error"]["code"], data["error"]["message"], error_data)
#other error-format
else:
raise errors.RPCFault(-1, "Error", data["error"])
#result
else:
return data["result"], data["id"]
|
dvdotsenko/jsonrpc.py
|
jsonrpcparts/serializers.py
|
JSONRPC10Serializer.parse_request
|
python
|
def parse_request(cls, jsonrpc_message):
try:
data = cls.json_loads(jsonrpc_message)
except ValueError, err:
raise errors.RPCParseError("No valid JSON. (%s)" % str(err))
if not isinstance(data, dict):
raise errors.RPCInvalidRPC("No valid RPC-package.")
if "method" not in data:
raise errors.RPCInvalidRPC("""Invalid Request, "method" is missing.""")
if not isinstance(data["method"], (str, unicode)):
raise errors.RPCInvalidRPC("""Invalid Request, "method" must be a string.""")
if "id" not in data:
data["id"] = None #be liberal
if "params" not in data:
data["params"] = () #be liberal
if not isinstance(data["params"], (list, tuple)):
raise errors.RPCInvalidRPC("""Invalid Request, "params" must be an array.""")
if len(data) != 3:
raise errors.RPCInvalidRPC("""Invalid Request, additional fields found.""")
# notification / request
if data["id"] is None:
return data["method"], data["params"] #notification
else:
return data["method"], data["params"], data["id"]
|
We take apart JSON-RPC-formatted message as a string and decompose it
into a dictionary object, emitting errors if parsing detects issues with
the format of the message.
:Returns: | [method_name, params, id] or [method_name, params]
| params is a tuple/list
| if id is missing, this is a Notification
:Raises: RPCParseError, RPCInvalidRPC, RPCInvalidMethodParams
|
train
|
https://github.com/dvdotsenko/jsonrpc.py/blob/19673edd77a9518ac5655bd407f6b93ffbb2cafc/jsonrpcparts/serializers.py#L206-L240
| null |
class JSONRPC10Serializer(BaseJSONRPCSerializer):
"""JSON-RPC V1.0 data-structure / serializer
This implementation is quite liberal in what it accepts: It treats
missing "params" and "id" in Requests and missing "result"/"error" in
Responses as empty/null.
:SeeAlso: JSON-RPC 1.0 specification
:TODO: catch json.dumps not-serializable-exceptions
"""
@staticmethod
def assemble_request(method, params=tuple(), id=0):
"""serialize JSON-RPC-Request
:Parameters:
- method: the method-name (str/unicode)
- params: the parameters (list/tuple)
- id: if id=None, this results in a Notification
:Returns: | {"method": "...", "params": ..., "id": ...}
| "method", "params" and "id" are always in this order.
:Raises: TypeError if method/params is of wrong type or
not JSON-serializable
"""
if not isinstance(method, (str, unicode)):
raise TypeError('"method" must be a string (or unicode string).')
if not isinstance(params, (tuple, list)):
raise TypeError("params must be a tuple/list.")
return {
"method": method,
"params": params,
"id": id
}
@staticmethod
def assemble_notification_request(method, params=tuple()):
"""serialize a JSON-RPC-Notification
:Parameters: see dumps_request
:Returns: | {"method": "...", "params": ..., "id": null}
| "method", "params" and "id" are always in this order.
:Raises: see dumps_request
"""
if not isinstance(method, (str, unicode)):
raise TypeError('"method" must be a string (or unicode string).')
if not isinstance(params, (tuple, list)):
raise TypeError("params must be a tuple/list.")
return {
"method": method,
"params": params,
"id": None
}
@staticmethod
def assemble_response(result, id=None):
"""serialize a JSON-RPC-Response (without error)
:Returns: | {"result": ..., "error": null, "id": ...}
| "result", "error" and "id" are always in this order.
:Raises: TypeError if not JSON-serializable
"""
return {
"result": result,
"error": None,
"id": id
}
@staticmethod
def assemble_error_response(error, id=None):
"""serialize a JSON-RPC-Response-error
Since JSON-RPC 1.0 does not define an error-object, this uses the
JSON-RPC 2.0 error-object.
:Parameters:
- error: a RPCFault instance
:Returns: | {"result": null, "error": {"code": error_code, "message": error_message, "data": error_data}, "id": ...}
| "result", "error" and "id" are always in this order, data is omitted if None.
:Raises: ValueError if error is not a RPCFault instance,
TypeError if not JSON-serializable
"""
if not isinstance(error, errors.RPCFault):
raise ValueError("""error must be a RPCFault-instance.""")
if error.error_data is None:
return {
"result": None,
"error": {
"code": error.error_code,
"message": error.message
},
"id": id
}
else:
return {
"result": None,
"error": {
"code":error.error_code,
"message": error.message,
"data": error.error_data
},
"id": id
}
@classmethod
#request
@classmethod
def parse_response(cls, jsonrpc_message):
"""de-serialize a JSON-RPC Response/error
:Returns: | [result, id] for Responses
:Raises: | RPCFault+derivates for error-packages/faults, RPCParseError, RPCInvalidRPC
| Note that for error-packages which do not match the
V2.0-definition, RPCFault(-1, "Error", RECEIVED_ERROR_OBJ)
is raised.
"""
try:
data = cls.json_loads(jsonrpc_message)
except ValueError, err:
raise errors.RPCParseError("No valid JSON. (%s)" % str(err))
if not isinstance(data, dict):
raise errors.RPCInvalidRPC("No valid RPC-package.")
if "id" not in data:
raise errors.RPCInvalidRPC("""Invalid Response, "id" missing.""")
if "result" not in data:
data["result"] = None #be liberal
if "error" not in data:
data["error"] = None #be liberal
if len(data) != 3:
raise errors.RPCInvalidRPC("""Invalid Response, additional or missing fields.""")
#error
if data["error"] is not None:
if data["result"] is not None:
raise errors.RPCInvalidRPC("""Invalid Response, one of "result" or "error" must be null.""")
#v2.0 error-format
if (
isinstance(data["error"], dict) and
"code" in data["error"] and
"message" in data["error"] and
(
len(data["error"]) == 2 or
("data" in data["error"] and len(data["error"])==3)
)
):
if "data" not in data["error"]:
error_data = None
else:
error_data = data["error"]["data"]
if data["error"]["code"] == errors.PARSE_ERROR:
raise errors.RPCParseError(error_data)
elif data["error"]["code"] == errors.INVALID_REQUEST:
raise errors.RPCInvalidRPC(error_data)
elif data["error"]["code"] == errors.METHOD_NOT_FOUND:
raise errors.RPCMethodNotFound(error_data)
elif data["error"]["code"] == errors.INVALID_METHOD_PARAMS:
raise errors.RPCInvalidMethodParams(error_data)
elif data["error"]["code"] == errors.INTERNAL_ERROR:
raise errors.RPCInternalError(error_data)
elif data["error"]["code"] == errors.PROCEDURE_EXCEPTION:
raise errors.RPCProcedureException(error_data)
elif data["error"]["code"] == errors.AUTHENTIFICATION_ERROR:
raise errors.RPCAuthentificationError(error_data)
elif data["error"]["code"] == errors.PERMISSION_DENIED:
raise errors.RPCPermissionDenied(error_data)
elif data["error"]["code"] == errors.INVALID_PARAM_VALUES:
raise errors.RPCInvalidParamValues(error_data)
else:
raise errors.RPCFault(data["error"]["code"], data["error"]["message"], error_data)
#other error-format
else:
raise errors.RPCFault(-1, "Error", data["error"])
#result
else:
return data["result"], data["id"]
|
dvdotsenko/jsonrpc.py
|
jsonrpcparts/serializers.py
|
JSONRPC10Serializer.parse_response
|
python
|
def parse_response(cls, jsonrpc_message):
try:
data = cls.json_loads(jsonrpc_message)
except ValueError, err:
raise errors.RPCParseError("No valid JSON. (%s)" % str(err))
if not isinstance(data, dict):
raise errors.RPCInvalidRPC("No valid RPC-package.")
if "id" not in data:
raise errors.RPCInvalidRPC("""Invalid Response, "id" missing.""")
if "result" not in data:
data["result"] = None #be liberal
if "error" not in data:
data["error"] = None #be liberal
if len(data) != 3:
raise errors.RPCInvalidRPC("""Invalid Response, additional or missing fields.""")
#error
if data["error"] is not None:
if data["result"] is not None:
raise errors.RPCInvalidRPC("""Invalid Response, one of "result" or "error" must be null.""")
#v2.0 error-format
if (
isinstance(data["error"], dict) and
"code" in data["error"] and
"message" in data["error"] and
(
len(data["error"]) == 2 or
("data" in data["error"] and len(data["error"])==3)
)
):
if "data" not in data["error"]:
error_data = None
else:
error_data = data["error"]["data"]
if data["error"]["code"] == errors.PARSE_ERROR:
raise errors.RPCParseError(error_data)
elif data["error"]["code"] == errors.INVALID_REQUEST:
raise errors.RPCInvalidRPC(error_data)
elif data["error"]["code"] == errors.METHOD_NOT_FOUND:
raise errors.RPCMethodNotFound(error_data)
elif data["error"]["code"] == errors.INVALID_METHOD_PARAMS:
raise errors.RPCInvalidMethodParams(error_data)
elif data["error"]["code"] == errors.INTERNAL_ERROR:
raise errors.RPCInternalError(error_data)
elif data["error"]["code"] == errors.PROCEDURE_EXCEPTION:
raise errors.RPCProcedureException(error_data)
elif data["error"]["code"] == errors.AUTHENTIFICATION_ERROR:
raise errors.RPCAuthentificationError(error_data)
elif data["error"]["code"] == errors.PERMISSION_DENIED:
raise errors.RPCPermissionDenied(error_data)
elif data["error"]["code"] == errors.INVALID_PARAM_VALUES:
raise errors.RPCInvalidParamValues(error_data)
else:
raise errors.RPCFault(data["error"]["code"], data["error"]["message"], error_data)
#other error-format
else:
raise errors.RPCFault(-1, "Error", data["error"])
#result
else:
return data["result"], data["id"]
|
de-serialize a JSON-RPC Response/error
:Returns: | [result, id] for Responses
:Raises: | RPCFault+derivates for error-packages/faults, RPCParseError, RPCInvalidRPC
| Note that for error-packages which do not match the
V2.0-definition, RPCFault(-1, "Error", RECEIVED_ERROR_OBJ)
is raised.
|
train
|
https://github.com/dvdotsenko/jsonrpc.py/blob/19673edd77a9518ac5655bd407f6b93ffbb2cafc/jsonrpcparts/serializers.py#L243-L311
| null |
class JSONRPC10Serializer(BaseJSONRPCSerializer):
"""JSON-RPC V1.0 data-structure / serializer
This implementation is quite liberal in what it accepts: It treats
missing "params" and "id" in Requests and missing "result"/"error" in
Responses as empty/null.
:SeeAlso: JSON-RPC 1.0 specification
:TODO: catch json.dumps not-serializable-exceptions
"""
@staticmethod
def assemble_request(method, params=tuple(), id=0):
"""serialize JSON-RPC-Request
:Parameters:
- method: the method-name (str/unicode)
- params: the parameters (list/tuple)
- id: if id=None, this results in a Notification
:Returns: | {"method": "...", "params": ..., "id": ...}
| "method", "params" and "id" are always in this order.
:Raises: TypeError if method/params is of wrong type or
not JSON-serializable
"""
if not isinstance(method, (str, unicode)):
raise TypeError('"method" must be a string (or unicode string).')
if not isinstance(params, (tuple, list)):
raise TypeError("params must be a tuple/list.")
return {
"method": method,
"params": params,
"id": id
}
@staticmethod
def assemble_notification_request(method, params=tuple()):
"""serialize a JSON-RPC-Notification
:Parameters: see dumps_request
:Returns: | {"method": "...", "params": ..., "id": null}
| "method", "params" and "id" are always in this order.
:Raises: see dumps_request
"""
if not isinstance(method, (str, unicode)):
raise TypeError('"method" must be a string (or unicode string).')
if not isinstance(params, (tuple, list)):
raise TypeError("params must be a tuple/list.")
return {
"method": method,
"params": params,
"id": None
}
@staticmethod
def assemble_response(result, id=None):
"""serialize a JSON-RPC-Response (without error)
:Returns: | {"result": ..., "error": null, "id": ...}
| "result", "error" and "id" are always in this order.
:Raises: TypeError if not JSON-serializable
"""
return {
"result": result,
"error": None,
"id": id
}
@staticmethod
def assemble_error_response(error, id=None):
"""serialize a JSON-RPC-Response-error
Since JSON-RPC 1.0 does not define an error-object, this uses the
JSON-RPC 2.0 error-object.
:Parameters:
- error: a RPCFault instance
:Returns: | {"result": null, "error": {"code": error_code, "message": error_message, "data": error_data}, "id": ...}
| "result", "error" and "id" are always in this order, data is omitted if None.
:Raises: ValueError if error is not a RPCFault instance,
TypeError if not JSON-serializable
"""
if not isinstance(error, errors.RPCFault):
raise ValueError("""error must be a RPCFault-instance.""")
if error.error_data is None:
return {
"result": None,
"error": {
"code": error.error_code,
"message": error.message
},
"id": id
}
else:
return {
"result": None,
"error": {
"code":error.error_code,
"message": error.message,
"data": error.error_data
},
"id": id
}
@classmethod
def parse_request(cls, jsonrpc_message):
"""We take apart JSON-RPC-formatted message as a string and decompose it
into a dictionary object, emitting errors if parsing detects issues with
the format of the message.
:Returns: | [method_name, params, id] or [method_name, params]
| params is a tuple/list
| if id is missing, this is a Notification
:Raises: RPCParseError, RPCInvalidRPC, RPCInvalidMethodParams
"""
try:
data = cls.json_loads(jsonrpc_message)
except ValueError, err:
raise errors.RPCParseError("No valid JSON. (%s)" % str(err))
if not isinstance(data, dict):
raise errors.RPCInvalidRPC("No valid RPC-package.")
if "method" not in data:
raise errors.RPCInvalidRPC("""Invalid Request, "method" is missing.""")
if not isinstance(data["method"], (str, unicode)):
raise errors.RPCInvalidRPC("""Invalid Request, "method" must be a string.""")
if "id" not in data:
data["id"] = None #be liberal
if "params" not in data:
data["params"] = () #be liberal
if not isinstance(data["params"], (list, tuple)):
raise errors.RPCInvalidRPC("""Invalid Request, "params" must be an array.""")
if len(data) != 3:
raise errors.RPCInvalidRPC("""Invalid Request, additional fields found.""")
# notification / request
if data["id"] is None:
return data["method"], data["params"] #notification
else:
return data["method"], data["params"], data["id"] #request
@classmethod
|
dvdotsenko/jsonrpc.py
|
jsonrpcparts/serializers.py
|
JSONRPC20Serializer.assemble_request
|
python
|
def assemble_request(method, params=None, notification=False):
if not isinstance(method, (str, unicode)):
raise TypeError('"method" must be a string (or unicode string).')
if params and not isinstance(params, (tuple, list, dict)):
raise TypeError("params must be a tuple/list/dict or None.")
base = {
"jsonrpc": "2.0",
"method": method
}
if params:
base["params"] = params
if not notification:
base['id'] = str(uuid.uuid4())
return base
|
serialize JSON-RPC-Request
:Parameters:
- method: the method-name (str/unicode)
- params: the parameters (None/list/tuple/dict)
- notification: bool
:Returns: | {"jsonrpc": "2.0", "method": "...", "params": ..., "id": ...}
| "jsonrpc", "method", "params" and "id" are always in this order.
| "params" is omitted if empty
:Raises: TypeError if method/params is of wrong type or
not JSON-serializable
|
train
|
https://github.com/dvdotsenko/jsonrpc.py/blob/19673edd77a9518ac5655bd407f6b93ffbb2cafc/jsonrpcparts/serializers.py#L319-L349
| null |
class JSONRPC20Serializer(BaseJSONRPCSerializer):
@staticmethod
@staticmethod
def assemble_response(result, request_id):
return {
"jsonrpc": "2.0",
"result": result,
"id": request_id
}
@staticmethod
def assemble_error_response(error):
if not isinstance(error, errors.RPCFault):
raise ValueError("""error must be a RPCFault-instance.""")
if error.error_data is None:
return {
"jsonrpc": "2.0",
"error": {
"code":error.error_code,
"message":error.message
},
"id": error.request_id
}
else:
return {
"jsonrpc": "2.0",
"error": {
"code":error.error_code,
"message": error.message,
"data": error.error_data
},
"id": error.request_id
}
@classmethod
def _parse_single_request(cls, request_data):
"""
:Returns: | [method_name, params, id]
| method (str)
| params (tuple/list or dict)
| id (str/int/None) (None means this is Notification)
:Raises: RPCParseError, RPCInvalidRPC, RPCInvalidMethodParams
"""
request_id = request_data.get('id', None) # Notifications don't have IDs
for argument in ['jsonrpc', 'method']:
if argument not in request_data:
raise errors.RPCInvalidRequest('argument "%s" missing.' % argument, request_id)
if not isinstance(request_data[argument], (str, unicode)):
raise errors.RPCInvalidRequest('value of argument "%s" must be a string.' % argument, request_id)
if request_data["jsonrpc"] != "2.0":
raise errors.RPCInvalidRequest('Invalid jsonrpc version.', request_id)
if "params" in request_data:
if not isinstance(request_data["params"], (list, tuple, dict)):
raise errors.RPCInvalidMethodParams(
'value of argument "parameter" is of non-supported type %s' % type(request_data["params"]),
request_id
)
return (
request_data["method"],
request_data.get("params", None),
request_id
)
@classmethod
def _parse_single_request_trap_errors(cls, request_data):
"""Traps exceptions generated by __parse_single_request and
converts them into values of request_id and error in the
returned tuple.
:Returns: (method_name, params_object, request_id, error)
Where:
- method_name is a str (or None when error is set)
- params_object is one of list/tuple/dict/None
- request_id is long/int/string/None
- error is an instance of errors.RPCFault subclass or None
"""
try:
method, params, request_id = cls._parse_single_request(request_data)
return method, params, request_id, None
except errors.RPCFault as ex:
return None, None, ex.request_id, ex
@classmethod
def parse_request(cls, request_string):
"""JSONRPC allows for **batch** requests to be communicated
as array of dicts. This method parses out each individual
element in the batch and returns a list of tuples, each
tuple a result of parsing of each item in the batch.
:Returns: | tuple of (results, is_batch_mode_flag)
| where:
| - results is a tuple describing the request
| - Is_batch_mode_flag is a Bool indicating if the
| request came in in batch mode (as array of requests) or not.
:Raises: RPCParseError, RPCInvalidRequest
"""
try:
batch = cls.json_loads(request_string)
except ValueError as err:
raise errors.RPCParseError("No valid JSON. (%s)" % str(err))
if isinstance(batch, (list, tuple)) and batch:
# batch is true batch.
# list of parsed request objects, is_batch_mode_flag
return [cls._parse_single_request_trap_errors(request) for request in batch], True
elif isinstance(batch, dict):
# `batch` is actually single request object
return [cls._parse_single_request_trap_errors(batch)], False
raise errors.RPCInvalidRequest("Neither a batch array nor a single request object found in the request.")
@classmethod
def _parse_single_response(cls, response_data):
"""de-serialize a JSON-RPC Response/error
:Returns: | [result, id] for Responses
:Raises: | RPCFault+derivates for error-packages/faults, RPCParseError, RPCInvalidRPC
"""
if not isinstance(response_data, dict):
raise errors.RPCInvalidRequest("No valid RPC-package.")
if "id" not in response_data:
raise errors.RPCInvalidRequest("""Invalid Response, "id" missing.""")
request_id = response_data['id']
if "jsonrpc" not in response_data:
raise errors.RPCInvalidRequest("""Invalid Response, "jsonrpc" missing.""", request_id)
if not isinstance(response_data["jsonrpc"], (str, unicode)):
raise errors.RPCInvalidRequest("""Invalid Response, "jsonrpc" must be a string.""")
if response_data["jsonrpc"] != "2.0":
raise errors.RPCInvalidRequest("""Invalid jsonrpc version.""", request_id)
error = response_data.get('error', None)
result = response_data.get('result', None)
if error and result:
raise errors.RPCInvalidRequest("""Invalid Response, only "result" OR "error" allowed.""", request_id)
if error:
if not isinstance(error, dict):
raise errors.RPCInvalidRequest("Invalid Response, invalid error-object.", request_id)
if not ("code" in error and "message" in error):
raise errors.RPCInvalidRequest("Invalid Response, invalid error-object.", request_id)
error_data = error.get("data", None)
if error['code'] in errors.ERROR_CODE_CLASS_MAP:
raise errors.ERROR_CODE_CLASS_MAP[error['code']](error_data, request_id)
else:
error_object = errors.RPCFault(error_data, request_id)
error_object.error_code = error['code']
error_object.message = error['message']
raise error_object
return result, request_id
@classmethod
def _parse_single_response_trap_errors(cls, response_data):
try:
result, request_id = cls._parse_single_response(response_data)
return result, request_id, None
except errors.RPCFault as ex:
return None, ex.request_id, ex
@classmethod
def parse_response(cls, response_string):
"""JSONRPC allows for **batch** responses to be communicated
as arrays of dicts. This method parses out each individual
element in the batch and returns a list of tuples, each
tuple a result of parsing of each item in the batch.
:Returns: | tuple of (results, is_batch_mode_flag)
| where:
| - results is a tuple describing the request
| - Is_batch_mode_flag is a Bool indicating if the
| request came in in batch mode (as array of requests) or not.
:Raises: RPCParseError, RPCInvalidRequest
"""
try:
batch = cls.json_loads(response_string)
except ValueError as err:
raise errors.RPCParseError("No valid JSON. (%s)" % str(err))
if isinstance(batch, (list, tuple)) and batch:
# batch is true batch.
# list of parsed request objects, is_batch_mode_flag
return [cls._parse_single_response_trap_errors(response) for response in batch], True
elif isinstance(batch, dict):
# `batch` is actually single response object
return [cls._parse_single_response_trap_errors(batch)], False
raise errors.RPCParseError("Neither a batch array nor a single response object found in the response.")
|
dvdotsenko/jsonrpc.py
|
jsonrpcparts/serializers.py
|
JSONRPC20Serializer._parse_single_request_trap_errors
|
python
|
def _parse_single_request_trap_errors(cls, request_data):
try:
method, params, request_id = cls._parse_single_request(request_data)
return method, params, request_id, None
except errors.RPCFault as ex:
return None, None, ex.request_id, ex
|
Traps exceptions generated by __parse_single_request and
converts them into values of request_id and error in the
returned tuple.
:Returns: (method_name, params_object, request_id, error)
Where:
- method_name is a str (or None when error is set)
- params_object is one of list/tuple/dict/None
- request_id is long/int/string/None
- error is an instance of errors.RPCFault subclass or None
|
train
|
https://github.com/dvdotsenko/jsonrpc.py/blob/19673edd77a9518ac5655bd407f6b93ffbb2cafc/jsonrpcparts/serializers.py#L421-L437
| null |
class JSONRPC20Serializer(BaseJSONRPCSerializer):
@staticmethod
def assemble_request(method, params=None, notification=False):
"""serialize JSON-RPC-Request
:Parameters:
- method: the method-name (str/unicode)
- params: the parameters (None/list/tuple/dict)
- notification: bool
:Returns: | {"jsonrpc": "2.0", "method": "...", "params": ..., "id": ...}
| "jsonrpc", "method", "params" and "id" are always in this order.
| "params" is omitted if empty
:Raises: TypeError if method/params is of wrong type or
not JSON-serializable
"""
if not isinstance(method, (str, unicode)):
raise TypeError('"method" must be a string (or unicode string).')
if params and not isinstance(params, (tuple, list, dict)):
raise TypeError("params must be a tuple/list/dict or None.")
base = {
"jsonrpc": "2.0",
"method": method
}
if params:
base["params"] = params
if not notification:
base['id'] = str(uuid.uuid4())
return base
@staticmethod
def assemble_response(result, request_id):
return {
"jsonrpc": "2.0",
"result": result,
"id": request_id
}
@staticmethod
def assemble_error_response(error):
if not isinstance(error, errors.RPCFault):
raise ValueError("""error must be a RPCFault-instance.""")
if error.error_data is None:
return {
"jsonrpc": "2.0",
"error": {
"code":error.error_code,
"message":error.message
},
"id": error.request_id
}
else:
return {
"jsonrpc": "2.0",
"error": {
"code":error.error_code,
"message": error.message,
"data": error.error_data
},
"id": error.request_id
}
@classmethod
def _parse_single_request(cls, request_data):
"""
:Returns: | [method_name, params, id]
| method (str)
| params (tuple/list or dict)
| id (str/int/None) (None means this is Notification)
:Raises: RPCParseError, RPCInvalidRPC, RPCInvalidMethodParams
"""
request_id = request_data.get('id', None) # Notifications don't have IDs
for argument in ['jsonrpc', 'method']:
if argument not in request_data:
raise errors.RPCInvalidRequest('argument "%s" missing.' % argument, request_id)
if not isinstance(request_data[argument], (str, unicode)):
raise errors.RPCInvalidRequest('value of argument "%s" must be a string.' % argument, request_id)
if request_data["jsonrpc"] != "2.0":
raise errors.RPCInvalidRequest('Invalid jsonrpc version.', request_id)
if "params" in request_data:
if not isinstance(request_data["params"], (list, tuple, dict)):
raise errors.RPCInvalidMethodParams(
'value of argument "parameter" is of non-supported type %s' % type(request_data["params"]),
request_id
)
return (
request_data["method"],
request_data.get("params", None),
request_id
)
@classmethod
@classmethod
def parse_request(cls, request_string):
"""JSONRPC allows for **batch** requests to be communicated
as array of dicts. This method parses out each individual
element in the batch and returns a list of tuples, each
tuple a result of parsing of each item in the batch.
:Returns: | tuple of (results, is_batch_mode_flag)
| where:
| - results is a tuple describing the request
| - Is_batch_mode_flag is a Bool indicating if the
| request came in in batch mode (as array of requests) or not.
:Raises: RPCParseError, RPCInvalidRequest
"""
try:
batch = cls.json_loads(request_string)
except ValueError as err:
raise errors.RPCParseError("No valid JSON. (%s)" % str(err))
if isinstance(batch, (list, tuple)) and batch:
# batch is true batch.
# list of parsed request objects, is_batch_mode_flag
return [cls._parse_single_request_trap_errors(request) for request in batch], True
elif isinstance(batch, dict):
# `batch` is actually single request object
return [cls._parse_single_request_trap_errors(batch)], False
raise errors.RPCInvalidRequest("Neither a batch array nor a single request object found in the request.")
@classmethod
def _parse_single_response(cls, response_data):
"""de-serialize a JSON-RPC Response/error
:Returns: | [result, id] for Responses
:Raises: | RPCFault+derivates for error-packages/faults, RPCParseError, RPCInvalidRPC
"""
if not isinstance(response_data, dict):
raise errors.RPCInvalidRequest("No valid RPC-package.")
if "id" not in response_data:
raise errors.RPCInvalidRequest("""Invalid Response, "id" missing.""")
request_id = response_data['id']
if "jsonrpc" not in response_data:
raise errors.RPCInvalidRequest("""Invalid Response, "jsonrpc" missing.""", request_id)
if not isinstance(response_data["jsonrpc"], (str, unicode)):
raise errors.RPCInvalidRequest("""Invalid Response, "jsonrpc" must be a string.""")
if response_data["jsonrpc"] != "2.0":
raise errors.RPCInvalidRequest("""Invalid jsonrpc version.""", request_id)
error = response_data.get('error', None)
result = response_data.get('result', None)
if error and result:
raise errors.RPCInvalidRequest("""Invalid Response, only "result" OR "error" allowed.""", request_id)
if error:
if not isinstance(error, dict):
raise errors.RPCInvalidRequest("Invalid Response, invalid error-object.", request_id)
if not ("code" in error and "message" in error):
raise errors.RPCInvalidRequest("Invalid Response, invalid error-object.", request_id)
error_data = error.get("data", None)
if error['code'] in errors.ERROR_CODE_CLASS_MAP:
raise errors.ERROR_CODE_CLASS_MAP[error['code']](error_data, request_id)
else:
error_object = errors.RPCFault(error_data, request_id)
error_object.error_code = error['code']
error_object.message = error['message']
raise error_object
return result, request_id
@classmethod
def _parse_single_response_trap_errors(cls, response_data):
try:
result, request_id = cls._parse_single_response(response_data)
return result, request_id, None
except errors.RPCFault as ex:
return None, ex.request_id, ex
@classmethod
def parse_response(cls, response_string):
"""JSONRPC allows for **batch** responses to be communicated
as arrays of dicts. This method parses out each individual
element in the batch and returns a list of tuples, each
tuple a result of parsing of each item in the batch.
:Returns: | tuple of (results, is_batch_mode_flag)
| where:
| - results is a tuple describing the request
| - Is_batch_mode_flag is a Bool indicating if the
| request came in in batch mode (as array of requests) or not.
:Raises: RPCParseError, RPCInvalidRequest
"""
try:
batch = cls.json_loads(response_string)
except ValueError as err:
raise errors.RPCParseError("No valid JSON. (%s)" % str(err))
if isinstance(batch, (list, tuple)) and batch:
# batch is true batch.
# list of parsed request objects, is_batch_mode_flag
return [cls._parse_single_response_trap_errors(response) for response in batch], True
elif isinstance(batch, dict):
# `batch` is actually single response object
return [cls._parse_single_response_trap_errors(batch)], False
raise errors.RPCParseError("Neither a batch array nor a single response object found in the response.")
|
dvdotsenko/jsonrpc.py
|
jsonrpcparts/serializers.py
|
JSONRPC20Serializer.parse_request
|
python
|
def parse_request(cls, request_string):
try:
batch = cls.json_loads(request_string)
except ValueError as err:
raise errors.RPCParseError("No valid JSON. (%s)" % str(err))
if isinstance(batch, (list, tuple)) and batch:
# batch is true batch.
# list of parsed request objects, is_batch_mode_flag
return [cls._parse_single_request_trap_errors(request) for request in batch], True
elif isinstance(batch, dict):
# `batch` is actually single request object
return [cls._parse_single_request_trap_errors(batch)], False
raise errors.RPCInvalidRequest("Neither a batch array nor a single request object found in the request.")
|
JSONRPC allows for **batch** requests to be communicated
as array of dicts. This method parses out each individual
element in the batch and returns a list of tuples, each
tuple a result of parsing of each item in the batch.
:Returns: | tuple of (results, is_batch_mode_flag)
| where:
| - results is a tuple describing the request
| - Is_batch_mode_flag is a Bool indicating if the
| request came in in batch mode (as array of requests) or not.
:Raises: RPCParseError, RPCInvalidRequest
|
train
|
https://github.com/dvdotsenko/jsonrpc.py/blob/19673edd77a9518ac5655bd407f6b93ffbb2cafc/jsonrpcparts/serializers.py#L440-L467
|
[
"def json_loads(cls, s, **kwargs):\n \"\"\"\n A rewrap of json.loads done for one reason - to inject a custom `cls` kwarg\n\n :param s:\n :param kwargs:\n :return:\n :rtype: dict\n \"\"\"\n if 'cls' not in kwargs:\n kwargs['cls'] = cls.json_decoder\n return json.loads(s, **kwargs)\n",
"def _parse_single_request_trap_errors(cls, request_data):\n \"\"\"Traps exceptions generated by __parse_single_request and\n converts them into values of request_id and error in the\n returned tuple.\n\n :Returns: (method_name, params_object, request_id, error)\n Where:\n - method_name is a str (or None when error is set)\n - params_object is one of list/tuple/dict/None\n - request_id is long/int/string/None\n - error is an instance of errors.RPCFault subclass or None\n \"\"\"\n try:\n method, params, request_id = cls._parse_single_request(request_data)\n return method, params, request_id, None\n except errors.RPCFault as ex:\n return None, None, ex.request_id, ex\n"
] |
class JSONRPC20Serializer(BaseJSONRPCSerializer):
@staticmethod
def assemble_request(method, params=None, notification=False):
"""serialize JSON-RPC-Request
:Parameters:
- method: the method-name (str/unicode)
- params: the parameters (None/list/tuple/dict)
- notification: bool
:Returns: | {"jsonrpc": "2.0", "method": "...", "params": ..., "id": ...}
| "jsonrpc", "method", "params" and "id" are always in this order.
| "params" is omitted if empty
:Raises: TypeError if method/params is of wrong type or
not JSON-serializable
"""
if not isinstance(method, (str, unicode)):
raise TypeError('"method" must be a string (or unicode string).')
if params and not isinstance(params, (tuple, list, dict)):
raise TypeError("params must be a tuple/list/dict or None.")
base = {
"jsonrpc": "2.0",
"method": method
}
if params:
base["params"] = params
if not notification:
base['id'] = str(uuid.uuid4())
return base
@staticmethod
def assemble_response(result, request_id):
return {
"jsonrpc": "2.0",
"result": result,
"id": request_id
}
@staticmethod
def assemble_error_response(error):
if not isinstance(error, errors.RPCFault):
raise ValueError("""error must be a RPCFault-instance.""")
if error.error_data is None:
return {
"jsonrpc": "2.0",
"error": {
"code":error.error_code,
"message":error.message
},
"id": error.request_id
}
else:
return {
"jsonrpc": "2.0",
"error": {
"code":error.error_code,
"message": error.message,
"data": error.error_data
},
"id": error.request_id
}
@classmethod
def _parse_single_request(cls, request_data):
"""
:Returns: | [method_name, params, id]
| method (str)
| params (tuple/list or dict)
| id (str/int/None) (None means this is Notification)
:Raises: RPCParseError, RPCInvalidRPC, RPCInvalidMethodParams
"""
request_id = request_data.get('id', None) # Notifications don't have IDs
for argument in ['jsonrpc', 'method']:
if argument not in request_data:
raise errors.RPCInvalidRequest('argument "%s" missing.' % argument, request_id)
if not isinstance(request_data[argument], (str, unicode)):
raise errors.RPCInvalidRequest('value of argument "%s" must be a string.' % argument, request_id)
if request_data["jsonrpc"] != "2.0":
raise errors.RPCInvalidRequest('Invalid jsonrpc version.', request_id)
if "params" in request_data:
if not isinstance(request_data["params"], (list, tuple, dict)):
raise errors.RPCInvalidMethodParams(
'value of argument "parameter" is of non-supported type %s' % type(request_data["params"]),
request_id
)
return (
request_data["method"],
request_data.get("params", None),
request_id
)
@classmethod
def _parse_single_request_trap_errors(cls, request_data):
"""Traps exceptions generated by __parse_single_request and
converts them into values of request_id and error in the
returned tuple.
:Returns: (method_name, params_object, request_id, error)
Where:
- method_name is a str (or None when error is set)
- params_object is one of list/tuple/dict/None
- request_id is long/int/string/None
- error is an instance of errors.RPCFault subclass or None
"""
try:
method, params, request_id = cls._parse_single_request(request_data)
return method, params, request_id, None
except errors.RPCFault as ex:
return None, None, ex.request_id, ex
@classmethod
@classmethod
def _parse_single_response(cls, response_data):
"""de-serialize a JSON-RPC Response/error
:Returns: | [result, id] for Responses
:Raises: | RPCFault+derivates for error-packages/faults, RPCParseError, RPCInvalidRPC
"""
if not isinstance(response_data, dict):
raise errors.RPCInvalidRequest("No valid RPC-package.")
if "id" not in response_data:
raise errors.RPCInvalidRequest("""Invalid Response, "id" missing.""")
request_id = response_data['id']
if "jsonrpc" not in response_data:
raise errors.RPCInvalidRequest("""Invalid Response, "jsonrpc" missing.""", request_id)
if not isinstance(response_data["jsonrpc"], (str, unicode)):
raise errors.RPCInvalidRequest("""Invalid Response, "jsonrpc" must be a string.""")
if response_data["jsonrpc"] != "2.0":
raise errors.RPCInvalidRequest("""Invalid jsonrpc version.""", request_id)
error = response_data.get('error', None)
result = response_data.get('result', None)
if error and result:
raise errors.RPCInvalidRequest("""Invalid Response, only "result" OR "error" allowed.""", request_id)
if error:
if not isinstance(error, dict):
raise errors.RPCInvalidRequest("Invalid Response, invalid error-object.", request_id)
if not ("code" in error and "message" in error):
raise errors.RPCInvalidRequest("Invalid Response, invalid error-object.", request_id)
error_data = error.get("data", None)
if error['code'] in errors.ERROR_CODE_CLASS_MAP:
raise errors.ERROR_CODE_CLASS_MAP[error['code']](error_data, request_id)
else:
error_object = errors.RPCFault(error_data, request_id)
error_object.error_code = error['code']
error_object.message = error['message']
raise error_object
return result, request_id
@classmethod
def _parse_single_response_trap_errors(cls, response_data):
try:
result, request_id = cls._parse_single_response(response_data)
return result, request_id, None
except errors.RPCFault as ex:
return None, ex.request_id, ex
@classmethod
def parse_response(cls, response_string):
"""JSONRPC allows for **batch** responses to be communicated
as arrays of dicts. This method parses out each individual
element in the batch and returns a list of tuples, each
tuple a result of parsing of each item in the batch.
:Returns: | tuple of (results, is_batch_mode_flag)
| where:
| - results is a tuple describing the request
| - Is_batch_mode_flag is a Bool indicating if the
| request came in in batch mode (as array of requests) or not.
:Raises: RPCParseError, RPCInvalidRequest
"""
try:
batch = cls.json_loads(response_string)
except ValueError as err:
raise errors.RPCParseError("No valid JSON. (%s)" % str(err))
if isinstance(batch, (list, tuple)) and batch:
# batch is true batch.
# list of parsed request objects, is_batch_mode_flag
return [cls._parse_single_response_trap_errors(response) for response in batch], True
elif isinstance(batch, dict):
# `batch` is actually single response object
return [cls._parse_single_response_trap_errors(batch)], False
raise errors.RPCParseError("Neither a batch array nor a single response object found in the response.")
|
dvdotsenko/jsonrpc.py
|
jsonrpcparts/serializers.py
|
JSONRPC20Serializer._parse_single_response
|
python
|
def _parse_single_response(cls, response_data):
if not isinstance(response_data, dict):
raise errors.RPCInvalidRequest("No valid RPC-package.")
if "id" not in response_data:
raise errors.RPCInvalidRequest("""Invalid Response, "id" missing.""")
request_id = response_data['id']
if "jsonrpc" not in response_data:
raise errors.RPCInvalidRequest("""Invalid Response, "jsonrpc" missing.""", request_id)
if not isinstance(response_data["jsonrpc"], (str, unicode)):
raise errors.RPCInvalidRequest("""Invalid Response, "jsonrpc" must be a string.""")
if response_data["jsonrpc"] != "2.0":
raise errors.RPCInvalidRequest("""Invalid jsonrpc version.""", request_id)
error = response_data.get('error', None)
result = response_data.get('result', None)
if error and result:
raise errors.RPCInvalidRequest("""Invalid Response, only "result" OR "error" allowed.""", request_id)
if error:
if not isinstance(error, dict):
raise errors.RPCInvalidRequest("Invalid Response, invalid error-object.", request_id)
if not ("code" in error and "message" in error):
raise errors.RPCInvalidRequest("Invalid Response, invalid error-object.", request_id)
error_data = error.get("data", None)
if error['code'] in errors.ERROR_CODE_CLASS_MAP:
raise errors.ERROR_CODE_CLASS_MAP[error['code']](error_data, request_id)
else:
error_object = errors.RPCFault(error_data, request_id)
error_object.error_code = error['code']
error_object.message = error['message']
raise error_object
return result, request_id
|
de-serialize a JSON-RPC Response/error
:Returns: | [result, id] for Responses
:Raises: | RPCFault+derivates for error-packages/faults, RPCParseError, RPCInvalidRPC
|
train
|
https://github.com/dvdotsenko/jsonrpc.py/blob/19673edd77a9518ac5655bd407f6b93ffbb2cafc/jsonrpcparts/serializers.py#L470-L515
| null |
class JSONRPC20Serializer(BaseJSONRPCSerializer):
@staticmethod
def assemble_request(method, params=None, notification=False):
"""serialize JSON-RPC-Request
:Parameters:
- method: the method-name (str/unicode)
- params: the parameters (None/list/tuple/dict)
- notification: bool
:Returns: | {"jsonrpc": "2.0", "method": "...", "params": ..., "id": ...}
| "jsonrpc", "method", "params" and "id" are always in this order.
| "params" is omitted if empty
:Raises: TypeError if method/params is of wrong type or
not JSON-serializable
"""
if not isinstance(method, (str, unicode)):
raise TypeError('"method" must be a string (or unicode string).')
if params and not isinstance(params, (tuple, list, dict)):
raise TypeError("params must be a tuple/list/dict or None.")
base = {
"jsonrpc": "2.0",
"method": method
}
if params:
base["params"] = params
if not notification:
base['id'] = str(uuid.uuid4())
return base
@staticmethod
def assemble_response(result, request_id):
return {
"jsonrpc": "2.0",
"result": result,
"id": request_id
}
@staticmethod
def assemble_error_response(error):
if not isinstance(error, errors.RPCFault):
raise ValueError("""error must be a RPCFault-instance.""")
if error.error_data is None:
return {
"jsonrpc": "2.0",
"error": {
"code":error.error_code,
"message":error.message
},
"id": error.request_id
}
else:
return {
"jsonrpc": "2.0",
"error": {
"code":error.error_code,
"message": error.message,
"data": error.error_data
},
"id": error.request_id
}
@classmethod
def _parse_single_request(cls, request_data):
"""
:Returns: | [method_name, params, id]
| method (str)
| params (tuple/list or dict)
| id (str/int/None) (None means this is Notification)
:Raises: RPCParseError, RPCInvalidRPC, RPCInvalidMethodParams
"""
request_id = request_data.get('id', None) # Notifications don't have IDs
for argument in ['jsonrpc', 'method']:
if argument not in request_data:
raise errors.RPCInvalidRequest('argument "%s" missing.' % argument, request_id)
if not isinstance(request_data[argument], (str, unicode)):
raise errors.RPCInvalidRequest('value of argument "%s" must be a string.' % argument, request_id)
if request_data["jsonrpc"] != "2.0":
raise errors.RPCInvalidRequest('Invalid jsonrpc version.', request_id)
if "params" in request_data:
if not isinstance(request_data["params"], (list, tuple, dict)):
raise errors.RPCInvalidMethodParams(
'value of argument "parameter" is of non-supported type %s' % type(request_data["params"]),
request_id
)
return (
request_data["method"],
request_data.get("params", None),
request_id
)
@classmethod
def _parse_single_request_trap_errors(cls, request_data):
"""Traps exceptions generated by __parse_single_request and
converts them into values of request_id and error in the
returned tuple.
:Returns: (method_name, params_object, request_id, error)
Where:
- method_name is a str (or None when error is set)
- params_object is one of list/tuple/dict/None
- request_id is long/int/string/None
- error is an instance of errors.RPCFault subclass or None
"""
try:
method, params, request_id = cls._parse_single_request(request_data)
return method, params, request_id, None
except errors.RPCFault as ex:
return None, None, ex.request_id, ex
@classmethod
def parse_request(cls, request_string):
"""JSONRPC allows for **batch** requests to be communicated
as array of dicts. This method parses out each individual
element in the batch and returns a list of tuples, each
tuple a result of parsing of each item in the batch.
:Returns: | tuple of (results, is_batch_mode_flag)
| where:
| - results is a tuple describing the request
| - Is_batch_mode_flag is a Bool indicating if the
| request came in in batch mode (as array of requests) or not.
:Raises: RPCParseError, RPCInvalidRequest
"""
try:
batch = cls.json_loads(request_string)
except ValueError as err:
raise errors.RPCParseError("No valid JSON. (%s)" % str(err))
if isinstance(batch, (list, tuple)) and batch:
# batch is true batch.
# list of parsed request objects, is_batch_mode_flag
return [cls._parse_single_request_trap_errors(request) for request in batch], True
elif isinstance(batch, dict):
# `batch` is actually single request object
return [cls._parse_single_request_trap_errors(batch)], False
raise errors.RPCInvalidRequest("Neither a batch array nor a single request object found in the request.")
@classmethod
@classmethod
def _parse_single_response_trap_errors(cls, response_data):
try:
result, request_id = cls._parse_single_response(response_data)
return result, request_id, None
except errors.RPCFault as ex:
return None, ex.request_id, ex
@classmethod
def parse_response(cls, response_string):
"""JSONRPC allows for **batch** responses to be communicated
as arrays of dicts. This method parses out each individual
element in the batch and returns a list of tuples, each
tuple a result of parsing of each item in the batch.
:Returns: | tuple of (results, is_batch_mode_flag)
| where:
| - results is a tuple describing the request
| - Is_batch_mode_flag is a Bool indicating if the
| request came in in batch mode (as array of requests) or not.
:Raises: RPCParseError, RPCInvalidRequest
"""
try:
batch = cls.json_loads(response_string)
except ValueError as err:
raise errors.RPCParseError("No valid JSON. (%s)" % str(err))
if isinstance(batch, (list, tuple)) and batch:
# batch is true batch.
# list of parsed request objects, is_batch_mode_flag
return [cls._parse_single_response_trap_errors(response) for response in batch], True
elif isinstance(batch, dict):
# `batch` is actually single response object
return [cls._parse_single_response_trap_errors(batch)], False
raise errors.RPCParseError("Neither a batch array nor a single response object found in the response.")
|
dvdotsenko/jsonrpc.py
|
jsonrpcparts/serializers.py
|
JSONRPC20Serializer.parse_response
|
python
|
def parse_response(cls, response_string):
try:
batch = cls.json_loads(response_string)
except ValueError as err:
raise errors.RPCParseError("No valid JSON. (%s)" % str(err))
if isinstance(batch, (list, tuple)) and batch:
# batch is true batch.
# list of parsed request objects, is_batch_mode_flag
return [cls._parse_single_response_trap_errors(response) for response in batch], True
elif isinstance(batch, dict):
# `batch` is actually single response object
return [cls._parse_single_response_trap_errors(batch)], False
raise errors.RPCParseError("Neither a batch array nor a single response object found in the response.")
|
JSONRPC allows for **batch** responses to be communicated
as arrays of dicts. This method parses out each individual
element in the batch and returns a list of tuples, each
tuple a result of parsing of each item in the batch.
:Returns: | tuple of (results, is_batch_mode_flag)
| where:
| - results is a tuple describing the request
| - Is_batch_mode_flag is a Bool indicating if the
| request came in in batch mode (as array of requests) or not.
:Raises: RPCParseError, RPCInvalidRequest
|
train
|
https://github.com/dvdotsenko/jsonrpc.py/blob/19673edd77a9518ac5655bd407f6b93ffbb2cafc/jsonrpcparts/serializers.py#L526-L553
|
[
"def json_loads(cls, s, **kwargs):\n \"\"\"\n A rewrap of json.loads done for one reason - to inject a custom `cls` kwarg\n\n :param s:\n :param kwargs:\n :return:\n :rtype: dict\n \"\"\"\n if 'cls' not in kwargs:\n kwargs['cls'] = cls.json_decoder\n return json.loads(s, **kwargs)\n",
"def _parse_single_response_trap_errors(cls, response_data):\n try:\n result, request_id = cls._parse_single_response(response_data)\n return result, request_id, None\n except errors.RPCFault as ex:\n return None, ex.request_id, ex\n"
] |
class JSONRPC20Serializer(BaseJSONRPCSerializer):
@staticmethod
def assemble_request(method, params=None, notification=False):
"""serialize JSON-RPC-Request
:Parameters:
- method: the method-name (str/unicode)
- params: the parameters (None/list/tuple/dict)
- notification: bool
:Returns: | {"jsonrpc": "2.0", "method": "...", "params": ..., "id": ...}
| "jsonrpc", "method", "params" and "id" are always in this order.
| "params" is omitted if empty
:Raises: TypeError if method/params is of wrong type or
not JSON-serializable
"""
if not isinstance(method, (str, unicode)):
raise TypeError('"method" must be a string (or unicode string).')
if params and not isinstance(params, (tuple, list, dict)):
raise TypeError("params must be a tuple/list/dict or None.")
base = {
"jsonrpc": "2.0",
"method": method
}
if params:
base["params"] = params
if not notification:
base['id'] = str(uuid.uuid4())
return base
@staticmethod
def assemble_response(result, request_id):
return {
"jsonrpc": "2.0",
"result": result,
"id": request_id
}
@staticmethod
def assemble_error_response(error):
if not isinstance(error, errors.RPCFault):
raise ValueError("""error must be a RPCFault-instance.""")
if error.error_data is None:
return {
"jsonrpc": "2.0",
"error": {
"code":error.error_code,
"message":error.message
},
"id": error.request_id
}
else:
return {
"jsonrpc": "2.0",
"error": {
"code":error.error_code,
"message": error.message,
"data": error.error_data
},
"id": error.request_id
}
@classmethod
def _parse_single_request(cls, request_data):
"""
:Returns: | [method_name, params, id]
| method (str)
| params (tuple/list or dict)
| id (str/int/None) (None means this is Notification)
:Raises: RPCParseError, RPCInvalidRPC, RPCInvalidMethodParams
"""
request_id = request_data.get('id', None) # Notifications don't have IDs
for argument in ['jsonrpc', 'method']:
if argument not in request_data:
raise errors.RPCInvalidRequest('argument "%s" missing.' % argument, request_id)
if not isinstance(request_data[argument], (str, unicode)):
raise errors.RPCInvalidRequest('value of argument "%s" must be a string.' % argument, request_id)
if request_data["jsonrpc"] != "2.0":
raise errors.RPCInvalidRequest('Invalid jsonrpc version.', request_id)
if "params" in request_data:
if not isinstance(request_data["params"], (list, tuple, dict)):
raise errors.RPCInvalidMethodParams(
'value of argument "parameter" is of non-supported type %s' % type(request_data["params"]),
request_id
)
return (
request_data["method"],
request_data.get("params", None),
request_id
)
@classmethod
def _parse_single_request_trap_errors(cls, request_data):
"""Traps exceptions generated by __parse_single_request and
converts them into values of request_id and error in the
returned tuple.
:Returns: (method_name, params_object, request_id, error)
Where:
- method_name is a str (or None when error is set)
- params_object is one of list/tuple/dict/None
- request_id is long/int/string/None
- error is an instance of errors.RPCFault subclass or None
"""
try:
method, params, request_id = cls._parse_single_request(request_data)
return method, params, request_id, None
except errors.RPCFault as ex:
return None, None, ex.request_id, ex
@classmethod
def parse_request(cls, request_string):
"""JSONRPC allows for **batch** requests to be communicated
as array of dicts. This method parses out each individual
element in the batch and returns a list of tuples, each
tuple a result of parsing of each item in the batch.
:Returns: | tuple of (results, is_batch_mode_flag)
| where:
| - results is a tuple describing the request
| - Is_batch_mode_flag is a Bool indicating if the
| request came in in batch mode (as array of requests) or not.
:Raises: RPCParseError, RPCInvalidRequest
"""
try:
batch = cls.json_loads(request_string)
except ValueError as err:
raise errors.RPCParseError("No valid JSON. (%s)" % str(err))
if isinstance(batch, (list, tuple)) and batch:
# batch is true batch.
# list of parsed request objects, is_batch_mode_flag
return [cls._parse_single_request_trap_errors(request) for request in batch], True
elif isinstance(batch, dict):
# `batch` is actually single request object
return [cls._parse_single_request_trap_errors(batch)], False
raise errors.RPCInvalidRequest("Neither a batch array nor a single request object found in the request.")
@classmethod
def _parse_single_response(cls, response_data):
"""de-serialize a JSON-RPC Response/error
:Returns: | [result, id] for Responses
:Raises: | RPCFault+derivates for error-packages/faults, RPCParseError, RPCInvalidRPC
"""
if not isinstance(response_data, dict):
raise errors.RPCInvalidRequest("No valid RPC-package.")
if "id" not in response_data:
raise errors.RPCInvalidRequest("""Invalid Response, "id" missing.""")
request_id = response_data['id']
if "jsonrpc" not in response_data:
raise errors.RPCInvalidRequest("""Invalid Response, "jsonrpc" missing.""", request_id)
if not isinstance(response_data["jsonrpc"], (str, unicode)):
raise errors.RPCInvalidRequest("""Invalid Response, "jsonrpc" must be a string.""")
if response_data["jsonrpc"] != "2.0":
raise errors.RPCInvalidRequest("""Invalid jsonrpc version.""", request_id)
error = response_data.get('error', None)
result = response_data.get('result', None)
if error and result:
raise errors.RPCInvalidRequest("""Invalid Response, only "result" OR "error" allowed.""", request_id)
if error:
if not isinstance(error, dict):
raise errors.RPCInvalidRequest("Invalid Response, invalid error-object.", request_id)
if not ("code" in error and "message" in error):
raise errors.RPCInvalidRequest("Invalid Response, invalid error-object.", request_id)
error_data = error.get("data", None)
if error['code'] in errors.ERROR_CODE_CLASS_MAP:
raise errors.ERROR_CODE_CLASS_MAP[error['code']](error_data, request_id)
else:
error_object = errors.RPCFault(error_data, request_id)
error_object.error_code = error['code']
error_object.message = error['message']
raise error_object
return result, request_id
@classmethod
def _parse_single_response_trap_errors(cls, response_data):
try:
result, request_id = cls._parse_single_response(response_data)
return result, request_id, None
except errors.RPCFault as ex:
return None, ex.request_id, ex
@classmethod
|
dvdotsenko/jsonrpc.py
|
jsonrpcparts/application.py
|
JSONPRCCollection.register_class
|
python
|
def register_class(self, instance, name=None):
prefix_name = name or instance.__class__.__name__
for e in dir(instance):
if e[0][0] != "_":
self.register_function(
getattr(instance, e),
name="%s.%s" % (prefix_name, e)
)
|
Add all functions of a class-instance to the RPC-services.
All entries of the instance which do not begin with '_' are added.
:Parameters:
- myinst: class-instance containing the functions
- name: | hierarchical prefix.
| If omitted, the functions are added directly.
| If given, the functions are added as "name.function".
:TODO:
- only add functions and omit attributes?
- improve hierarchy?
|
train
|
https://github.com/dvdotsenko/jsonrpc.py/blob/19673edd77a9518ac5655bd407f6b93ffbb2cafc/jsonrpcparts/application.py#L22-L43
|
[
"def register_function(self, function, name=None):\n \"\"\"Add a function to the RPC-services.\n\n :Parameters:\n - function: function to add\n - name: RPC-name for the function. If omitted/None, the original\n name of the function is used.\n \"\"\"\n if name:\n self[name] = function\n else:\n self[function.__name__] = function\n"
] |
class JSONPRCCollection(dict):
"""
A dictionary-like collection that helps with registration
and use (calling of) JSON-RPC methods.
"""
def register_function(self, function, name=None):
"""Add a function to the RPC-services.
:Parameters:
- function: function to add
- name: RPC-name for the function. If omitted/None, the original
name of the function is used.
"""
if name:
self[name] = function
else:
self[function.__name__] = function
|
dvdotsenko/jsonrpc.py
|
jsonrpcparts/application.py
|
JSONPRCApplication.process_method
|
python
|
def process_method(self, method, args, kwargs, request_id=None, **context):
return method(*([] if args is None else args), **({} if kwargs is None else kwargs))
|
Executes the actual method with args, kwargs provided.
This step is broken out of the process_requests flow to
allow for ease of overriding the call in your subclass of this class.
In some cases it's preferable to make callee aware of the request_id
and easily overridable caller method allows for that.
:param method: A callable registered as JSON-RPC method
:type method: callable
:param args: A list of none or more positional args to pass to the method call
:type args: list
:param kargs: A dict of none or more named args to pass to the method call
:type kargs: dict
:param request_id: None or non-None value of the `id` attribute in JSON-RPC request
:param context:
A dict with additional parameters passed to handle_request_string and process_requests
Allows wrapping code to pass additional parameters deep into parsing stack, override this
method and fold the parameters as needed into tha method call.
Imagine capturing authentication / permissions data from headers, converting them into
actionable / flag objects and putting them into **context.
Then override this method and fold the arguments into the call
(which may be a decorated function, where decorator unfolds the params and calls the actual method)
By default, context is not passed to method call below.
:return: The value method returns
|
train
|
https://github.com/dvdotsenko/jsonrpc.py/blob/19673edd77a9518ac5655bd407f6b93ffbb2cafc/jsonrpcparts/application.py#L69-L97
| null |
class JSONPRCApplication(JSONPRCCollection):
def __init__(self, data_serializer=JSONRPC20Serializer, *args, **kw):
"""
:Parameters:
- data_serializer: a data_structure+serializer-instance
"""
super(JSONPRCApplication, self).__init__(*args, **kw)
self._data_serializer = data_serializer
def process_requests(self, requests, **context):
"""
Turns a list of request objects into a list of
response objects.
:param requests: A list of tuples describing the RPC call
:type requests: list[list[callable,object,object,list]]
:param context:
A dict with additional parameters passed to handle_request_string and process_requests
Allows wrapping code to pass additional parameters deep into parsing stack, override this
method and fold the parameters as needed into tha method call.
Imagine capturing authentication / permissions data from headers, converting them into
actionable / flag objects and putting them into **context.
Then override this method and fold the arguments into the call
(which may be a decorated function, where decorator unfolds the params and calls the actual method)
By default, context is not passed to method call below.
"""
ds = self._data_serializer
responses = []
for method, params, request_id, error in requests:
if error: # these are request message validation errors
if error.request_id: # no ID = Notification. We don't reply
responses.append(ds.assemble_error_response(error))
continue
if method not in self:
if request_id:
responses.append(ds.assemble_error_response(
errors.RPCMethodNotFound(
'Method "%s" is not found.' % method,
request_id
)
))
continue
try:
args = []
kwargs = {}
if isinstance(params, dict):
kwargs = params
elif params: # and/or must be type(params, list):
args = params
result = self.process_method(
self[method],
args,
kwargs,
request_id=request_id,
**context
)
if request_id:
responses.append(ds.assemble_response(result, request_id))
except errors.RPCFault as ex:
if request_id:
responses.append(ds.assemble_error_response(ex))
except Exception as ex:
if request_id:
responses.append(ds.assemble_error_response(
errors.RPCInternalError(
'While processing the follwoing message ("%s","%s","%s") ' % (method, params, request_id) +\
'encountered the following error message "%s"' % ex.message,
request_id=request_id,
message=ex.message
)
))
return responses
def handle_request_string(self, request_string, **context):
"""Handle a RPC-Request.
:param request_string: the received rpc-string
:param context:
A dict with additional parameters passed to process_requests and process_method
Allows wrapping code to pass additional parameters deep into parsing stack, override process_method
method and fold the parameters as needed into tha method call.
Imagine capturing authentication / permissions data from headers, converting them into
actionable / flag objects and putting them into **context.
Then override this method and fold the arguments into the call
(which may be a decorated function, where decorator unfolds the params and calls the actual method)
By default, context is not passed to method call below.
:return: the encoded (serialized as string) JSON of the response
"""
ds = self._data_serializer
try:
requests, is_batch_mode = ds.parse_request(request_string)
except errors.RPCFault as ex:
return ds.json_dumps(ds.assemble_error_response(ex))
except Exception as ex:
return ds.json_dumps(ds.assemble_error_response(
errors.RPCInternalError(
'While processing the follwoing message "%s" ' % request_string +\
'encountered the following error message "%s"' % ex.message
)
))
responses = self.process_requests(requests, **context)
if not responses:
return None
try:
if is_batch_mode:
return ds.json_dumps(responses)
else:
return ds.json_dumps(responses[0])
except Exception as ex:
return json.dumps(
ds.assemble_error_response(
errors.RPCInternalError(
'While processing the follwoing message "%s" ' % request_string +\
'encountered the following error message "%s"' % ex.message
)
)
)
|
dvdotsenko/jsonrpc.py
|
jsonrpcparts/application.py
|
JSONPRCApplication.process_requests
|
python
|
def process_requests(self, requests, **context):
ds = self._data_serializer
responses = []
for method, params, request_id, error in requests:
if error: # these are request message validation errors
if error.request_id: # no ID = Notification. We don't reply
responses.append(ds.assemble_error_response(error))
continue
if method not in self:
if request_id:
responses.append(ds.assemble_error_response(
errors.RPCMethodNotFound(
'Method "%s" is not found.' % method,
request_id
)
))
continue
try:
args = []
kwargs = {}
if isinstance(params, dict):
kwargs = params
elif params: # and/or must be type(params, list):
args = params
result = self.process_method(
self[method],
args,
kwargs,
request_id=request_id,
**context
)
if request_id:
responses.append(ds.assemble_response(result, request_id))
except errors.RPCFault as ex:
if request_id:
responses.append(ds.assemble_error_response(ex))
except Exception as ex:
if request_id:
responses.append(ds.assemble_error_response(
errors.RPCInternalError(
'While processing the follwoing message ("%s","%s","%s") ' % (method, params, request_id) +\
'encountered the following error message "%s"' % ex.message,
request_id=request_id,
message=ex.message
)
))
return responses
|
Turns a list of request objects into a list of
response objects.
:param requests: A list of tuples describing the RPC call
:type requests: list[list[callable,object,object,list]]
:param context:
A dict with additional parameters passed to handle_request_string and process_requests
Allows wrapping code to pass additional parameters deep into parsing stack, override this
method and fold the parameters as needed into tha method call.
Imagine capturing authentication / permissions data from headers, converting them into
actionable / flag objects and putting them into **context.
Then override this method and fold the arguments into the call
(which may be a decorated function, where decorator unfolds the params and calls the actual method)
By default, context is not passed to method call below.
|
train
|
https://github.com/dvdotsenko/jsonrpc.py/blob/19673edd77a9518ac5655bd407f6b93ffbb2cafc/jsonrpcparts/application.py#L99-L167
|
[
"def process_method(self, method, args, kwargs, request_id=None, **context):\n \"\"\"\n Executes the actual method with args, kwargs provided.\n\n This step is broken out of the process_requests flow to\n allow for ease of overriding the call in your subclass of this class.\n\n In some cases it's preferable to make callee aware of the request_id\n and easily overridable caller method allows for that.\n\n :param method: A callable registered as JSON-RPC method\n :type method: callable\n :param args: A list of none or more positional args to pass to the method call\n :type args: list\n :param kargs: A dict of none or more named args to pass to the method call\n :type kargs: dict\n :param request_id: None or non-None value of the `id` attribute in JSON-RPC request\n :param context:\n A dict with additional parameters passed to handle_request_string and process_requests\n Allows wrapping code to pass additional parameters deep into parsing stack, override this\n method and fold the parameters as needed into tha method call.\n Imagine capturing authentication / permissions data from headers, converting them into\n actionable / flag objects and putting them into **context.\n Then override this method and fold the arguments into the call\n (which may be a decorated function, where decorator unfolds the params and calls the actual method)\n By default, context is not passed to method call below.\n :return: The value method returns\n \"\"\"\n return method(*([] if args is None else args), **({} if kwargs is None else kwargs))\n"
] |
class JSONPRCApplication(JSONPRCCollection):
def __init__(self, data_serializer=JSONRPC20Serializer, *args, **kw):
"""
:Parameters:
- data_serializer: a data_structure+serializer-instance
"""
super(JSONPRCApplication, self).__init__(*args, **kw)
self._data_serializer = data_serializer
def process_method(self, method, args, kwargs, request_id=None, **context):
"""
Executes the actual method with args, kwargs provided.
This step is broken out of the process_requests flow to
allow for ease of overriding the call in your subclass of this class.
In some cases it's preferable to make callee aware of the request_id
and easily overridable caller method allows for that.
:param method: A callable registered as JSON-RPC method
:type method: callable
:param args: A list of none or more positional args to pass to the method call
:type args: list
:param kargs: A dict of none or more named args to pass to the method call
:type kargs: dict
:param request_id: None or non-None value of the `id` attribute in JSON-RPC request
:param context:
A dict with additional parameters passed to handle_request_string and process_requests
Allows wrapping code to pass additional parameters deep into parsing stack, override this
method and fold the parameters as needed into tha method call.
Imagine capturing authentication / permissions data from headers, converting them into
actionable / flag objects and putting them into **context.
Then override this method and fold the arguments into the call
(which may be a decorated function, where decorator unfolds the params and calls the actual method)
By default, context is not passed to method call below.
:return: The value method returns
"""
return method(*([] if args is None else args), **({} if kwargs is None else kwargs))
def handle_request_string(self, request_string, **context):
"""Handle a RPC-Request.
:param request_string: the received rpc-string
:param context:
A dict with additional parameters passed to process_requests and process_method
Allows wrapping code to pass additional parameters deep into parsing stack, override process_method
method and fold the parameters as needed into tha method call.
Imagine capturing authentication / permissions data from headers, converting them into
actionable / flag objects and putting them into **context.
Then override this method and fold the arguments into the call
(which may be a decorated function, where decorator unfolds the params and calls the actual method)
By default, context is not passed to method call below.
:return: the encoded (serialized as string) JSON of the response
"""
ds = self._data_serializer
try:
requests, is_batch_mode = ds.parse_request(request_string)
except errors.RPCFault as ex:
return ds.json_dumps(ds.assemble_error_response(ex))
except Exception as ex:
return ds.json_dumps(ds.assemble_error_response(
errors.RPCInternalError(
'While processing the follwoing message "%s" ' % request_string +\
'encountered the following error message "%s"' % ex.message
)
))
responses = self.process_requests(requests, **context)
if not responses:
return None
try:
if is_batch_mode:
return ds.json_dumps(responses)
else:
return ds.json_dumps(responses[0])
except Exception as ex:
return json.dumps(
ds.assemble_error_response(
errors.RPCInternalError(
'While processing the follwoing message "%s" ' % request_string +\
'encountered the following error message "%s"' % ex.message
)
)
)
|
dvdotsenko/jsonrpc.py
|
jsonrpcparts/application.py
|
JSONPRCApplication.handle_request_string
|
python
|
def handle_request_string(self, request_string, **context):
ds = self._data_serializer
try:
requests, is_batch_mode = ds.parse_request(request_string)
except errors.RPCFault as ex:
return ds.json_dumps(ds.assemble_error_response(ex))
except Exception as ex:
return ds.json_dumps(ds.assemble_error_response(
errors.RPCInternalError(
'While processing the follwoing message "%s" ' % request_string +\
'encountered the following error message "%s"' % ex.message
)
))
responses = self.process_requests(requests, **context)
if not responses:
return None
try:
if is_batch_mode:
return ds.json_dumps(responses)
else:
return ds.json_dumps(responses[0])
except Exception as ex:
return json.dumps(
ds.assemble_error_response(
errors.RPCInternalError(
'While processing the follwoing message "%s" ' % request_string +\
'encountered the following error message "%s"' % ex.message
)
)
)
|
Handle a RPC-Request.
:param request_string: the received rpc-string
:param context:
A dict with additional parameters passed to process_requests and process_method
Allows wrapping code to pass additional parameters deep into parsing stack, override process_method
method and fold the parameters as needed into tha method call.
Imagine capturing authentication / permissions data from headers, converting them into
actionable / flag objects and putting them into **context.
Then override this method and fold the arguments into the call
(which may be a decorated function, where decorator unfolds the params and calls the actual method)
By default, context is not passed to method call below.
:return: the encoded (serialized as string) JSON of the response
|
train
|
https://github.com/dvdotsenko/jsonrpc.py/blob/19673edd77a9518ac5655bd407f6b93ffbb2cafc/jsonrpcparts/application.py#L169-L217
|
[
"def process_requests(self, requests, **context):\n \"\"\"\n Turns a list of request objects into a list of\n response objects.\n\n :param requests: A list of tuples describing the RPC call\n :type requests: list[list[callable,object,object,list]]\n :param context:\n A dict with additional parameters passed to handle_request_string and process_requests\n Allows wrapping code to pass additional parameters deep into parsing stack, override this\n method and fold the parameters as needed into tha method call.\n Imagine capturing authentication / permissions data from headers, converting them into\n actionable / flag objects and putting them into **context.\n Then override this method and fold the arguments into the call\n (which may be a decorated function, where decorator unfolds the params and calls the actual method)\n By default, context is not passed to method call below.\n \"\"\"\n\n ds = self._data_serializer\n\n responses = []\n for method, params, request_id, error in requests:\n\n if error: # these are request message validation errors\n if error.request_id: # no ID = Notification. We don't reply\n responses.append(ds.assemble_error_response(error))\n continue\n\n if method not in self:\n if request_id:\n responses.append(ds.assemble_error_response(\n errors.RPCMethodNotFound(\n 'Method \"%s\" is not found.' % method,\n request_id\n )\n ))\n continue\n\n try:\n args = []\n kwargs = {}\n if isinstance(params, dict):\n kwargs = params\n elif params: # and/or must be type(params, list):\n args = params\n result = self.process_method(\n self[method],\n args,\n kwargs,\n request_id=request_id,\n **context\n )\n if request_id:\n responses.append(ds.assemble_response(result, request_id))\n except errors.RPCFault as ex:\n if request_id:\n responses.append(ds.assemble_error_response(ex))\n except Exception as ex:\n if request_id:\n responses.append(ds.assemble_error_response(\n errors.RPCInternalError(\n 'While processing the follwoing message (\"%s\",\"%s\",\"%s\") ' % (method, params, request_id) +\\\n 'encountered the following error message \"%s\"' % ex.message,\n request_id=request_id,\n message=ex.message\n )\n ))\n\n return responses\n",
"def json_dumps(cls, obj, **kwargs):\n \"\"\"\n A rewrap of json.dumps done for one reason - to inject a custom `cls` kwarg\n\n :param obj:\n :param kwargs:\n :return:\n :rtype: str\n \"\"\"\n if 'cls' not in kwargs:\n kwargs['cls'] = cls.json_encoder\n return json.dumps(obj, **kwargs)\n",
"def assemble_error_response(error):\n\n if not isinstance(error, errors.RPCFault):\n raise ValueError(\"\"\"error must be a RPCFault-instance.\"\"\")\n\n if error.error_data is None:\n return {\n \"jsonrpc\": \"2.0\",\n \"error\": {\n \"code\":error.error_code,\n \"message\":error.message\n },\n \"id\": error.request_id\n }\n else:\n return {\n \"jsonrpc\": \"2.0\",\n \"error\": {\n \"code\":error.error_code,\n \"message\": error.message,\n \"data\": error.error_data\n },\n \"id\": error.request_id\n }\n",
"def parse_request(cls, request_string):\n \"\"\"JSONRPC allows for **batch** requests to be communicated\n as array of dicts. This method parses out each individual\n element in the batch and returns a list of tuples, each\n tuple a result of parsing of each item in the batch.\n\n :Returns: | tuple of (results, is_batch_mode_flag)\n | where:\n | - results is a tuple describing the request\n | - Is_batch_mode_flag is a Bool indicating if the\n | request came in in batch mode (as array of requests) or not.\n\n :Raises: RPCParseError, RPCInvalidRequest\n \"\"\"\n try:\n batch = cls.json_loads(request_string)\n except ValueError as err:\n raise errors.RPCParseError(\"No valid JSON. (%s)\" % str(err))\n\n if isinstance(batch, (list, tuple)) and batch:\n # batch is true batch.\n # list of parsed request objects, is_batch_mode_flag\n return [cls._parse_single_request_trap_errors(request) for request in batch], True\n elif isinstance(batch, dict):\n # `batch` is actually single request object\n return [cls._parse_single_request_trap_errors(batch)], False\n\n raise errors.RPCInvalidRequest(\"Neither a batch array nor a single request object found in the request.\")\n"
] |
class JSONPRCApplication(JSONPRCCollection):
def __init__(self, data_serializer=JSONRPC20Serializer, *args, **kw):
"""
:Parameters:
- data_serializer: a data_structure+serializer-instance
"""
super(JSONPRCApplication, self).__init__(*args, **kw)
self._data_serializer = data_serializer
def process_method(self, method, args, kwargs, request_id=None, **context):
"""
Executes the actual method with args, kwargs provided.
This step is broken out of the process_requests flow to
allow for ease of overriding the call in your subclass of this class.
In some cases it's preferable to make callee aware of the request_id
and easily overridable caller method allows for that.
:param method: A callable registered as JSON-RPC method
:type method: callable
:param args: A list of none or more positional args to pass to the method call
:type args: list
:param kargs: A dict of none or more named args to pass to the method call
:type kargs: dict
:param request_id: None or non-None value of the `id` attribute in JSON-RPC request
:param context:
A dict with additional parameters passed to handle_request_string and process_requests
Allows wrapping code to pass additional parameters deep into parsing stack, override this
method and fold the parameters as needed into tha method call.
Imagine capturing authentication / permissions data from headers, converting them into
actionable / flag objects and putting them into **context.
Then override this method and fold the arguments into the call
(which may be a decorated function, where decorator unfolds the params and calls the actual method)
By default, context is not passed to method call below.
:return: The value method returns
"""
return method(*([] if args is None else args), **({} if kwargs is None else kwargs))
def process_requests(self, requests, **context):
"""
Turns a list of request objects into a list of
response objects.
:param requests: A list of tuples describing the RPC call
:type requests: list[list[callable,object,object,list]]
:param context:
A dict with additional parameters passed to handle_request_string and process_requests
Allows wrapping code to pass additional parameters deep into parsing stack, override this
method and fold the parameters as needed into tha method call.
Imagine capturing authentication / permissions data from headers, converting them into
actionable / flag objects and putting them into **context.
Then override this method and fold the arguments into the call
(which may be a decorated function, where decorator unfolds the params and calls the actual method)
By default, context is not passed to method call below.
"""
ds = self._data_serializer
responses = []
for method, params, request_id, error in requests:
if error: # these are request message validation errors
if error.request_id: # no ID = Notification. We don't reply
responses.append(ds.assemble_error_response(error))
continue
if method not in self:
if request_id:
responses.append(ds.assemble_error_response(
errors.RPCMethodNotFound(
'Method "%s" is not found.' % method,
request_id
)
))
continue
try:
args = []
kwargs = {}
if isinstance(params, dict):
kwargs = params
elif params: # and/or must be type(params, list):
args = params
result = self.process_method(
self[method],
args,
kwargs,
request_id=request_id,
**context
)
if request_id:
responses.append(ds.assemble_response(result, request_id))
except errors.RPCFault as ex:
if request_id:
responses.append(ds.assemble_error_response(ex))
except Exception as ex:
if request_id:
responses.append(ds.assemble_error_response(
errors.RPCInternalError(
'While processing the follwoing message ("%s","%s","%s") ' % (method, params, request_id) +\
'encountered the following error message "%s"' % ex.message,
request_id=request_id,
message=ex.message
)
))
return responses
|
dvdotsenko/jsonrpc.py
|
jsonrpcparts/client.py
|
Client.call
|
python
|
def call(self, method, *args, **kw):
if args and kw:
raise ValueError("JSON-RPC method calls allow only either named or positional arguments.")
if not method:
raise ValueError("JSON-RPC method call requires a method name.")
request = self._data_serializer.assemble_request(
method, args or kw or None
)
if self._in_batch_mode:
self._requests.append(request)
return request.get('id')
else:
return request
|
In context of a batch we return the request's ID
else we return the actual json
|
train
|
https://github.com/dvdotsenko/jsonrpc.py/blob/19673edd77a9518ac5655bd407f6b93ffbb2cafc/jsonrpcparts/client.py#L49-L67
| null |
class Client(object):
def __init__(self, data_serializer=JSONRPC20Serializer):
"""
:Parameters:
- data_serializer: a data_structure+serializer-instance
"""
self._in_batch_mode = False
self._requests = []
self._data_serializer = data_serializer
# Context manager API
def __enter__(self):
self._in_batch_mode = True
self._requests = []
return self
# Context manager API
def __exit__(self, *args):
self._in_batch_mode = False
self._requests = []
pass
def notify(self, method, *args, **kw):
if args and kw:
raise ValueError("JSON-RPC method calls allow only either named or positional arguments.")
if not method:
raise ValueError("JSON-RPC method call requires a method name.")
request = self._data_serializer.assemble_request(
method, args or kw or None, notification=True
)
if self._in_batch_mode:
self._requests.append(request)
else:
return request
def get_batched(self):
if not self._in_batch_mode:
return []
else:
return self._requests
|
miniconfig/python-openevse-wifi
|
openevsewifi/__init__.py
|
Charger.sendCommand
|
python
|
def sendCommand(self, command):
data = { 'rapi' : command }
full_url = self.url + urllib.parse.urlencode(data)
data = urllib.request.urlopen(full_url)
response = re.search('\<p>>\$(.+)\<script', data.read().decode('utf-8'))
if response == None:#If we are using version 1 - https://github.com/OpenEVSE/ESP8266_WiFi_v1.x/blob/master/OpenEVSE_RAPI_WiFi_ESP8266.ino#L357
response = re.search('\>\>\$(.+)\<p>', data.read().decode('utf-8'))
return response.group(1).split()
|
Sends a command through the web interface of the charger and parses the response
|
train
|
https://github.com/miniconfig/python-openevse-wifi/blob/42fabeae052a9f82092fa9220201413732e38bb4/openevsewifi/__init__.py#L31-L39
| null |
class Charger:
def __init__(self, host):
"""A connection to an OpenEVSE charging station equipped with the wifi kit."""
self.url = 'http://' + host + '/r?'
def getStatus(self):
"""Returns the charger's charge status, as a string"""
command = '$GS'
status = self.sendCommand(command)
return states[int(status[1])]
def getChargeTimeElapsed(self):
"""Returns the charge time elapsed (in seconds), or 0 if is not currently charging"""
command = '$GS'
status = self.sendCommand(command)
if int(status[1]) == 3:
return int(status[2])
else:
return 0
def getTimeLimit(self):
"""Returns the time limit in minutes or 0 if no limit is set"""
command = '$G3'
limit = self.sendCommand(command)
return int(limit[1])*15
def getAmmeterScaleFactor(self):
"""Returns the ammeter's current scale factor"""
command = '$GA'
settings = self.sendCommand(command)
return int(settings[1])
def getAmmeterOffset(self):
"""Returns the ammeter's current offset"""
command = '$GA'
settings = self.sendCommand(command)
return int(settings[2])
def getMinAmps(self):
"""Returns the capacity range minimum, in amps"""
command = '$GC'
caprange = self.sendCommand(command)
return int(caprange[1])
def getMaxAmps(self):
"""Returns the capacity range maximum, in amps"""
command = '$GC'
caprange = self.sendCommand(command)
return int(caprange[2])
def getCurrentCapacity(self):
"""Returns the current capacity, in amps"""
command = '$GE'
settings = self.sendCommand(command)
return int(settings[1])
def getServiceLevel(self):
"""Returns the service level"""
command = '$GE'
settings = self.sendCommand(command)
flags = int(settings[2], 16)
return (flags & 0x0001) + 1
def getDiodeCheckEnabled(self):
"""Returns True if enabled, False if disabled"""
command = '$GE'
settings = self.sendCommand(command)
flags = int(settings[2], 16)
return not (flags & 0x0002)
def getVentRequiredEnabled(self):
"""Returns True if enabled, False if disabled"""
command = '$GE'
settings = self.sendCommand(command)
flags = int(settings[2], 16)
return not (flags & 0x0004)
def getGroundCheckEnabled(self):
"""Returns True if enabled, False if disabled"""
command = '$GE'
settings = self.sendCommand(command)
flags = int(settings[2], 16)
return not (flags & 0x0008)
def getStuckRelayCheckEnabled(self):
"""Returns True if enabled, False if disabled"""
command = '$GE'
settings = self.sendCommand(command)
flags = int(settings[2], 16)
return not (flags & 0x0010)
def getAutoServiceLevelEnabled(self):
"""Returns True if enabled, False if disabled"""
command = '$GE'
settings = self.sendCommand(command)
flags = int(settings[2], 16)
return not (flags & 0x0020)
def getAutoStartEnabled(self):
"""Returns True if enabled, False if disabled"""
command = '$GE'
settings = self.sendCommand(command)
flags = int(settings[2], 16)
return not (flags & 0x0040)
def getSerialDebugEnabled(self):
"""Returns True if enabled, False if disabled"""
command = '$GE'
settings = self.sendCommand(command)
flags = int(settings[2], 16)
return not (flags & 0x0080)
def getLCDType(self):
"""Returns LCD type as a string, either monochrome or rgb"""
command = '$GE'
settings = self.sendCommand(command)
flags = int(settings[2], 16)
if flags & 0x0100:
lcdtype = 'monochrome'
else:
lcdtype = 'rgb'
return lcdtype
def getGFISelfTestEnabled(self):
"""Returns True if enabled, False if disabled"""
command = '$GE'
settings = self.sendCommand(command)
flags = int(settings[2], 16)
return not (flags & 0x0200)
def getGFITripCount(self):
"""Returns GFI Trip Count, as integer"""
command = '$GF'
faults = self.sendCommand(command)
return faults[1]
def getNoGndTripCount(self):
"""Returns No Ground Trip Count, as integer"""
command = '$GF'
faults = self.sendCommand(command)
return faults[2]
def getStuckRelayTripCount(self):
"""Returns Stuck Relay Trip Count, as integer"""
command = '$GF'
faults = self.sendCommand(command)
return faults[3]
def getChargingCurrent(self):
"""Returns the charging current, in amps, or 0.0 of not charging"""
command = '$GG'
currentAndVoltage = self.sendCommand(command)
amps = float(currentAndVoltage[1])/1000
return amps
def getChargingVoltage(self):
"""Returns the charging voltage, in volts, or 0.0 of not charging"""
command = '$GG'
currentAndVoltage = self.sendCommand(command)
volts = float(currentAndVoltage[2])/1000
return volts
def getChargeLimit(self):
"""Returns the charge limit in kWh"""
command = '$GH'
limit = self.sendCommand(command)
return limit[1]
def getVoltMeterScaleFactor(self):
"""Returns the voltmeter scale factor, or 0 if there is no voltmeter"""
command = '$GM'
voltMeterSettings = self.sendCommand(command)
if voltMeterSettings[0] == 'NK':
return 0
else:
return voltMeterSettings[1]
def getVoltMeterOffset(self):
"""Returns the voltmeter offset, or 0 if there is no voltmeter"""
command = '$GM'
voltMeterSettings = self.sendCommand(command)
if voltMeterSettings[0] == 'NK':
return 0
else:
return voltMeterSettings[2]
def getAmbientThreshold(self):
"""Returns the ambient temperature threshold in degrees Celcius, or 0 if no Threshold is set"""
command = '$GO'
threshold = self.sendCommand(command)
if threshold[0] == 'NK':
return 0
else:
return float(threshold[1])/10
def getIRThreshold(self):
"""Returns the IR temperature threshold in degrees Celcius, or 0 if no Threshold is set"""
command = '$GO'
threshold = self.sendCommand(command)
if threshold[0] == 'NK':
return 0
else:
return float(threshold[2])/10
def getRTCTemperature(self):
"""Returns the temperature of the real time clock sensor (DS3231), in degrees Celcius, or 0.0 if sensor is not installed"""
command = '$GP'
temperature = self.sendCommand(command)
return float(temperature[1])/10
def getAmbientTemperature(self):
"""Returns the temperature of the ambient sensor (MCP9808), in degrees Celcius, or 0.0 if sensor is not installed"""
command = '$GP'
temperature = self.sendCommand(command)
return float(temperature[2])/10
def getIRTemperature(self):
"""Returns the temperature of the IR remote sensor (TMP007), in degrees Celcius, or 0.0 if sensor is not installed"""
command = '$GP'
temperature = self.sendCommand(command)
return float(temperature[3])/10
def getTime(self):
"""Get the RTC time. Returns a datetime object, or NULL if the clock is not set"""
command = '$GT'
time = self.sendCommand(command)
if time == ['OK','165', '165', '165', '165', '165', '85']:
return NULL
else:
return datetime.datetime(year = int(time[1])+2000,
month = int(time[2]),
day = int(time[3]),
hour = int(time[4]),
minute = int(time[5]),
second = int(time[6]))
def getUsageSession(self):
"""Get the energy usage for the current charging session. Returns the energy usage in Wh"""
command = '$GU'
usage = self.sendCommand(command)
return float(usage[1])/3600
def getUsageTotal(self):
"""Get the total energy usage. Returns the energy usage in Wh"""
command = '$GU'
usage = self.sendCommand(command)
return float(usage[2])
def getFirmwareVersion(self):
"""Returns the Firmware Version, as a string"""
command = '$GV'
version = self.sendCommand(command)
return version[1]
def getProtocolVersion(self):
"""Returns the Protocol Version, as a string"""
command = '$GV'
version = self.sendCommand(command)
return version[2]
|
miniconfig/python-openevse-wifi
|
openevsewifi/__init__.py
|
Charger.getStatus
|
python
|
def getStatus(self):
command = '$GS'
status = self.sendCommand(command)
return states[int(status[1])]
|
Returns the charger's charge status, as a string
|
train
|
https://github.com/miniconfig/python-openevse-wifi/blob/42fabeae052a9f82092fa9220201413732e38bb4/openevsewifi/__init__.py#L41-L45
|
[
"def sendCommand(self, command):\n \"\"\"Sends a command through the web interface of the charger and parses the response\"\"\"\n data = { 'rapi' : command }\n full_url = self.url + urllib.parse.urlencode(data)\n data = urllib.request.urlopen(full_url)\n response = re.search('\\<p>>\\$(.+)\\<script', data.read().decode('utf-8'))\n if response == None:#If we are using version 1 - https://github.com/OpenEVSE/ESP8266_WiFi_v1.x/blob/master/OpenEVSE_RAPI_WiFi_ESP8266.ino#L357\n response = re.search('\\>\\>\\$(.+)\\<p>', data.read().decode('utf-8'))\n return response.group(1).split()\n"
] |
class Charger:
def __init__(self, host):
"""A connection to an OpenEVSE charging station equipped with the wifi kit."""
self.url = 'http://' + host + '/r?'
def sendCommand(self, command):
"""Sends a command through the web interface of the charger and parses the response"""
data = { 'rapi' : command }
full_url = self.url + urllib.parse.urlencode(data)
data = urllib.request.urlopen(full_url)
response = re.search('\<p>>\$(.+)\<script', data.read().decode('utf-8'))
if response == None:#If we are using version 1 - https://github.com/OpenEVSE/ESP8266_WiFi_v1.x/blob/master/OpenEVSE_RAPI_WiFi_ESP8266.ino#L357
response = re.search('\>\>\$(.+)\<p>', data.read().decode('utf-8'))
return response.group(1).split()
def getChargeTimeElapsed(self):
"""Returns the charge time elapsed (in seconds), or 0 if is not currently charging"""
command = '$GS'
status = self.sendCommand(command)
if int(status[1]) == 3:
return int(status[2])
else:
return 0
def getTimeLimit(self):
"""Returns the time limit in minutes or 0 if no limit is set"""
command = '$G3'
limit = self.sendCommand(command)
return int(limit[1])*15
def getAmmeterScaleFactor(self):
"""Returns the ammeter's current scale factor"""
command = '$GA'
settings = self.sendCommand(command)
return int(settings[1])
def getAmmeterOffset(self):
"""Returns the ammeter's current offset"""
command = '$GA'
settings = self.sendCommand(command)
return int(settings[2])
def getMinAmps(self):
"""Returns the capacity range minimum, in amps"""
command = '$GC'
caprange = self.sendCommand(command)
return int(caprange[1])
def getMaxAmps(self):
"""Returns the capacity range maximum, in amps"""
command = '$GC'
caprange = self.sendCommand(command)
return int(caprange[2])
def getCurrentCapacity(self):
"""Returns the current capacity, in amps"""
command = '$GE'
settings = self.sendCommand(command)
return int(settings[1])
def getServiceLevel(self):
"""Returns the service level"""
command = '$GE'
settings = self.sendCommand(command)
flags = int(settings[2], 16)
return (flags & 0x0001) + 1
def getDiodeCheckEnabled(self):
"""Returns True if enabled, False if disabled"""
command = '$GE'
settings = self.sendCommand(command)
flags = int(settings[2], 16)
return not (flags & 0x0002)
def getVentRequiredEnabled(self):
"""Returns True if enabled, False if disabled"""
command = '$GE'
settings = self.sendCommand(command)
flags = int(settings[2], 16)
return not (flags & 0x0004)
def getGroundCheckEnabled(self):
"""Returns True if enabled, False if disabled"""
command = '$GE'
settings = self.sendCommand(command)
flags = int(settings[2], 16)
return not (flags & 0x0008)
def getStuckRelayCheckEnabled(self):
"""Returns True if enabled, False if disabled"""
command = '$GE'
settings = self.sendCommand(command)
flags = int(settings[2], 16)
return not (flags & 0x0010)
def getAutoServiceLevelEnabled(self):
"""Returns True if enabled, False if disabled"""
command = '$GE'
settings = self.sendCommand(command)
flags = int(settings[2], 16)
return not (flags & 0x0020)
def getAutoStartEnabled(self):
"""Returns True if enabled, False if disabled"""
command = '$GE'
settings = self.sendCommand(command)
flags = int(settings[2], 16)
return not (flags & 0x0040)
def getSerialDebugEnabled(self):
"""Returns True if enabled, False if disabled"""
command = '$GE'
settings = self.sendCommand(command)
flags = int(settings[2], 16)
return not (flags & 0x0080)
def getLCDType(self):
"""Returns LCD type as a string, either monochrome or rgb"""
command = '$GE'
settings = self.sendCommand(command)
flags = int(settings[2], 16)
if flags & 0x0100:
lcdtype = 'monochrome'
else:
lcdtype = 'rgb'
return lcdtype
def getGFISelfTestEnabled(self):
"""Returns True if enabled, False if disabled"""
command = '$GE'
settings = self.sendCommand(command)
flags = int(settings[2], 16)
return not (flags & 0x0200)
def getGFITripCount(self):
"""Returns GFI Trip Count, as integer"""
command = '$GF'
faults = self.sendCommand(command)
return faults[1]
def getNoGndTripCount(self):
"""Returns No Ground Trip Count, as integer"""
command = '$GF'
faults = self.sendCommand(command)
return faults[2]
def getStuckRelayTripCount(self):
"""Returns Stuck Relay Trip Count, as integer"""
command = '$GF'
faults = self.sendCommand(command)
return faults[3]
def getChargingCurrent(self):
"""Returns the charging current, in amps, or 0.0 of not charging"""
command = '$GG'
currentAndVoltage = self.sendCommand(command)
amps = float(currentAndVoltage[1])/1000
return amps
def getChargingVoltage(self):
"""Returns the charging voltage, in volts, or 0.0 of not charging"""
command = '$GG'
currentAndVoltage = self.sendCommand(command)
volts = float(currentAndVoltage[2])/1000
return volts
def getChargeLimit(self):
"""Returns the charge limit in kWh"""
command = '$GH'
limit = self.sendCommand(command)
return limit[1]
def getVoltMeterScaleFactor(self):
"""Returns the voltmeter scale factor, or 0 if there is no voltmeter"""
command = '$GM'
voltMeterSettings = self.sendCommand(command)
if voltMeterSettings[0] == 'NK':
return 0
else:
return voltMeterSettings[1]
def getVoltMeterOffset(self):
"""Returns the voltmeter offset, or 0 if there is no voltmeter"""
command = '$GM'
voltMeterSettings = self.sendCommand(command)
if voltMeterSettings[0] == 'NK':
return 0
else:
return voltMeterSettings[2]
def getAmbientThreshold(self):
"""Returns the ambient temperature threshold in degrees Celcius, or 0 if no Threshold is set"""
command = '$GO'
threshold = self.sendCommand(command)
if threshold[0] == 'NK':
return 0
else:
return float(threshold[1])/10
def getIRThreshold(self):
"""Returns the IR temperature threshold in degrees Celcius, or 0 if no Threshold is set"""
command = '$GO'
threshold = self.sendCommand(command)
if threshold[0] == 'NK':
return 0
else:
return float(threshold[2])/10
def getRTCTemperature(self):
"""Returns the temperature of the real time clock sensor (DS3231), in degrees Celcius, or 0.0 if sensor is not installed"""
command = '$GP'
temperature = self.sendCommand(command)
return float(temperature[1])/10
def getAmbientTemperature(self):
"""Returns the temperature of the ambient sensor (MCP9808), in degrees Celcius, or 0.0 if sensor is not installed"""
command = '$GP'
temperature = self.sendCommand(command)
return float(temperature[2])/10
def getIRTemperature(self):
"""Returns the temperature of the IR remote sensor (TMP007), in degrees Celcius, or 0.0 if sensor is not installed"""
command = '$GP'
temperature = self.sendCommand(command)
return float(temperature[3])/10
def getTime(self):
"""Get the RTC time. Returns a datetime object, or NULL if the clock is not set"""
command = '$GT'
time = self.sendCommand(command)
if time == ['OK','165', '165', '165', '165', '165', '85']:
return NULL
else:
return datetime.datetime(year = int(time[1])+2000,
month = int(time[2]),
day = int(time[3]),
hour = int(time[4]),
minute = int(time[5]),
second = int(time[6]))
def getUsageSession(self):
"""Get the energy usage for the current charging session. Returns the energy usage in Wh"""
command = '$GU'
usage = self.sendCommand(command)
return float(usage[1])/3600
def getUsageTotal(self):
"""Get the total energy usage. Returns the energy usage in Wh"""
command = '$GU'
usage = self.sendCommand(command)
return float(usage[2])
def getFirmwareVersion(self):
"""Returns the Firmware Version, as a string"""
command = '$GV'
version = self.sendCommand(command)
return version[1]
def getProtocolVersion(self):
"""Returns the Protocol Version, as a string"""
command = '$GV'
version = self.sendCommand(command)
return version[2]
|
miniconfig/python-openevse-wifi
|
openevsewifi/__init__.py
|
Charger.getChargeTimeElapsed
|
python
|
def getChargeTimeElapsed(self):
command = '$GS'
status = self.sendCommand(command)
if int(status[1]) == 3:
return int(status[2])
else:
return 0
|
Returns the charge time elapsed (in seconds), or 0 if is not currently charging
|
train
|
https://github.com/miniconfig/python-openevse-wifi/blob/42fabeae052a9f82092fa9220201413732e38bb4/openevsewifi/__init__.py#L47-L54
|
[
"def sendCommand(self, command):\n \"\"\"Sends a command through the web interface of the charger and parses the response\"\"\"\n data = { 'rapi' : command }\n full_url = self.url + urllib.parse.urlencode(data)\n data = urllib.request.urlopen(full_url)\n response = re.search('\\<p>>\\$(.+)\\<script', data.read().decode('utf-8'))\n if response == None:#If we are using version 1 - https://github.com/OpenEVSE/ESP8266_WiFi_v1.x/blob/master/OpenEVSE_RAPI_WiFi_ESP8266.ino#L357\n response = re.search('\\>\\>\\$(.+)\\<p>', data.read().decode('utf-8'))\n return response.group(1).split()\n"
] |
class Charger:
def __init__(self, host):
"""A connection to an OpenEVSE charging station equipped with the wifi kit."""
self.url = 'http://' + host + '/r?'
def sendCommand(self, command):
"""Sends a command through the web interface of the charger and parses the response"""
data = { 'rapi' : command }
full_url = self.url + urllib.parse.urlencode(data)
data = urllib.request.urlopen(full_url)
response = re.search('\<p>>\$(.+)\<script', data.read().decode('utf-8'))
if response == None:#If we are using version 1 - https://github.com/OpenEVSE/ESP8266_WiFi_v1.x/blob/master/OpenEVSE_RAPI_WiFi_ESP8266.ino#L357
response = re.search('\>\>\$(.+)\<p>', data.read().decode('utf-8'))
return response.group(1).split()
def getStatus(self):
"""Returns the charger's charge status, as a string"""
command = '$GS'
status = self.sendCommand(command)
return states[int(status[1])]
def getTimeLimit(self):
"""Returns the time limit in minutes or 0 if no limit is set"""
command = '$G3'
limit = self.sendCommand(command)
return int(limit[1])*15
def getAmmeterScaleFactor(self):
"""Returns the ammeter's current scale factor"""
command = '$GA'
settings = self.sendCommand(command)
return int(settings[1])
def getAmmeterOffset(self):
"""Returns the ammeter's current offset"""
command = '$GA'
settings = self.sendCommand(command)
return int(settings[2])
def getMinAmps(self):
"""Returns the capacity range minimum, in amps"""
command = '$GC'
caprange = self.sendCommand(command)
return int(caprange[1])
def getMaxAmps(self):
"""Returns the capacity range maximum, in amps"""
command = '$GC'
caprange = self.sendCommand(command)
return int(caprange[2])
def getCurrentCapacity(self):
"""Returns the current capacity, in amps"""
command = '$GE'
settings = self.sendCommand(command)
return int(settings[1])
def getServiceLevel(self):
"""Returns the service level"""
command = '$GE'
settings = self.sendCommand(command)
flags = int(settings[2], 16)
return (flags & 0x0001) + 1
def getDiodeCheckEnabled(self):
"""Returns True if enabled, False if disabled"""
command = '$GE'
settings = self.sendCommand(command)
flags = int(settings[2], 16)
return not (flags & 0x0002)
def getVentRequiredEnabled(self):
"""Returns True if enabled, False if disabled"""
command = '$GE'
settings = self.sendCommand(command)
flags = int(settings[2], 16)
return not (flags & 0x0004)
def getGroundCheckEnabled(self):
"""Returns True if enabled, False if disabled"""
command = '$GE'
settings = self.sendCommand(command)
flags = int(settings[2], 16)
return not (flags & 0x0008)
def getStuckRelayCheckEnabled(self):
"""Returns True if enabled, False if disabled"""
command = '$GE'
settings = self.sendCommand(command)
flags = int(settings[2], 16)
return not (flags & 0x0010)
def getAutoServiceLevelEnabled(self):
"""Returns True if enabled, False if disabled"""
command = '$GE'
settings = self.sendCommand(command)
flags = int(settings[2], 16)
return not (flags & 0x0020)
def getAutoStartEnabled(self):
"""Returns True if enabled, False if disabled"""
command = '$GE'
settings = self.sendCommand(command)
flags = int(settings[2], 16)
return not (flags & 0x0040)
def getSerialDebugEnabled(self):
"""Returns True if enabled, False if disabled"""
command = '$GE'
settings = self.sendCommand(command)
flags = int(settings[2], 16)
return not (flags & 0x0080)
def getLCDType(self):
"""Returns LCD type as a string, either monochrome or rgb"""
command = '$GE'
settings = self.sendCommand(command)
flags = int(settings[2], 16)
if flags & 0x0100:
lcdtype = 'monochrome'
else:
lcdtype = 'rgb'
return lcdtype
def getGFISelfTestEnabled(self):
"""Returns True if enabled, False if disabled"""
command = '$GE'
settings = self.sendCommand(command)
flags = int(settings[2], 16)
return not (flags & 0x0200)
def getGFITripCount(self):
"""Returns GFI Trip Count, as integer"""
command = '$GF'
faults = self.sendCommand(command)
return faults[1]
def getNoGndTripCount(self):
"""Returns No Ground Trip Count, as integer"""
command = '$GF'
faults = self.sendCommand(command)
return faults[2]
def getStuckRelayTripCount(self):
"""Returns Stuck Relay Trip Count, as integer"""
command = '$GF'
faults = self.sendCommand(command)
return faults[3]
def getChargingCurrent(self):
"""Returns the charging current, in amps, or 0.0 of not charging"""
command = '$GG'
currentAndVoltage = self.sendCommand(command)
amps = float(currentAndVoltage[1])/1000
return amps
def getChargingVoltage(self):
"""Returns the charging voltage, in volts, or 0.0 of not charging"""
command = '$GG'
currentAndVoltage = self.sendCommand(command)
volts = float(currentAndVoltage[2])/1000
return volts
def getChargeLimit(self):
"""Returns the charge limit in kWh"""
command = '$GH'
limit = self.sendCommand(command)
return limit[1]
def getVoltMeterScaleFactor(self):
"""Returns the voltmeter scale factor, or 0 if there is no voltmeter"""
command = '$GM'
voltMeterSettings = self.sendCommand(command)
if voltMeterSettings[0] == 'NK':
return 0
else:
return voltMeterSettings[1]
def getVoltMeterOffset(self):
"""Returns the voltmeter offset, or 0 if there is no voltmeter"""
command = '$GM'
voltMeterSettings = self.sendCommand(command)
if voltMeterSettings[0] == 'NK':
return 0
else:
return voltMeterSettings[2]
def getAmbientThreshold(self):
"""Returns the ambient temperature threshold in degrees Celcius, or 0 if no Threshold is set"""
command = '$GO'
threshold = self.sendCommand(command)
if threshold[0] == 'NK':
return 0
else:
return float(threshold[1])/10
def getIRThreshold(self):
"""Returns the IR temperature threshold in degrees Celcius, or 0 if no Threshold is set"""
command = '$GO'
threshold = self.sendCommand(command)
if threshold[0] == 'NK':
return 0
else:
return float(threshold[2])/10
def getRTCTemperature(self):
"""Returns the temperature of the real time clock sensor (DS3231), in degrees Celcius, or 0.0 if sensor is not installed"""
command = '$GP'
temperature = self.sendCommand(command)
return float(temperature[1])/10
def getAmbientTemperature(self):
"""Returns the temperature of the ambient sensor (MCP9808), in degrees Celcius, or 0.0 if sensor is not installed"""
command = '$GP'
temperature = self.sendCommand(command)
return float(temperature[2])/10
def getIRTemperature(self):
"""Returns the temperature of the IR remote sensor (TMP007), in degrees Celcius, or 0.0 if sensor is not installed"""
command = '$GP'
temperature = self.sendCommand(command)
return float(temperature[3])/10
def getTime(self):
"""Get the RTC time. Returns a datetime object, or NULL if the clock is not set"""
command = '$GT'
time = self.sendCommand(command)
if time == ['OK','165', '165', '165', '165', '165', '85']:
return NULL
else:
return datetime.datetime(year = int(time[1])+2000,
month = int(time[2]),
day = int(time[3]),
hour = int(time[4]),
minute = int(time[5]),
second = int(time[6]))
def getUsageSession(self):
"""Get the energy usage for the current charging session. Returns the energy usage in Wh"""
command = '$GU'
usage = self.sendCommand(command)
return float(usage[1])/3600
def getUsageTotal(self):
"""Get the total energy usage. Returns the energy usage in Wh"""
command = '$GU'
usage = self.sendCommand(command)
return float(usage[2])
def getFirmwareVersion(self):
"""Returns the Firmware Version, as a string"""
command = '$GV'
version = self.sendCommand(command)
return version[1]
def getProtocolVersion(self):
"""Returns the Protocol Version, as a string"""
command = '$GV'
version = self.sendCommand(command)
return version[2]
|
miniconfig/python-openevse-wifi
|
openevsewifi/__init__.py
|
Charger.getServiceLevel
|
python
|
def getServiceLevel(self):
command = '$GE'
settings = self.sendCommand(command)
flags = int(settings[2], 16)
return (flags & 0x0001) + 1
|
Returns the service level
|
train
|
https://github.com/miniconfig/python-openevse-wifi/blob/42fabeae052a9f82092fa9220201413732e38bb4/openevsewifi/__init__.py#L92-L97
|
[
"def sendCommand(self, command):\n \"\"\"Sends a command through the web interface of the charger and parses the response\"\"\"\n data = { 'rapi' : command }\n full_url = self.url + urllib.parse.urlencode(data)\n data = urllib.request.urlopen(full_url)\n response = re.search('\\<p>>\\$(.+)\\<script', data.read().decode('utf-8'))\n if response == None:#If we are using version 1 - https://github.com/OpenEVSE/ESP8266_WiFi_v1.x/blob/master/OpenEVSE_RAPI_WiFi_ESP8266.ino#L357\n response = re.search('\\>\\>\\$(.+)\\<p>', data.read().decode('utf-8'))\n return response.group(1).split()\n"
] |
class Charger:
def __init__(self, host):
"""A connection to an OpenEVSE charging station equipped with the wifi kit."""
self.url = 'http://' + host + '/r?'
def sendCommand(self, command):
"""Sends a command through the web interface of the charger and parses the response"""
data = { 'rapi' : command }
full_url = self.url + urllib.parse.urlencode(data)
data = urllib.request.urlopen(full_url)
response = re.search('\<p>>\$(.+)\<script', data.read().decode('utf-8'))
if response == None:#If we are using version 1 - https://github.com/OpenEVSE/ESP8266_WiFi_v1.x/blob/master/OpenEVSE_RAPI_WiFi_ESP8266.ino#L357
response = re.search('\>\>\$(.+)\<p>', data.read().decode('utf-8'))
return response.group(1).split()
def getStatus(self):
"""Returns the charger's charge status, as a string"""
command = '$GS'
status = self.sendCommand(command)
return states[int(status[1])]
def getChargeTimeElapsed(self):
"""Returns the charge time elapsed (in seconds), or 0 if is not currently charging"""
command = '$GS'
status = self.sendCommand(command)
if int(status[1]) == 3:
return int(status[2])
else:
return 0
def getTimeLimit(self):
"""Returns the time limit in minutes or 0 if no limit is set"""
command = '$G3'
limit = self.sendCommand(command)
return int(limit[1])*15
def getAmmeterScaleFactor(self):
"""Returns the ammeter's current scale factor"""
command = '$GA'
settings = self.sendCommand(command)
return int(settings[1])
def getAmmeterOffset(self):
"""Returns the ammeter's current offset"""
command = '$GA'
settings = self.sendCommand(command)
return int(settings[2])
def getMinAmps(self):
"""Returns the capacity range minimum, in amps"""
command = '$GC'
caprange = self.sendCommand(command)
return int(caprange[1])
def getMaxAmps(self):
"""Returns the capacity range maximum, in amps"""
command = '$GC'
caprange = self.sendCommand(command)
return int(caprange[2])
def getCurrentCapacity(self):
"""Returns the current capacity, in amps"""
command = '$GE'
settings = self.sendCommand(command)
return int(settings[1])
def getDiodeCheckEnabled(self):
"""Returns True if enabled, False if disabled"""
command = '$GE'
settings = self.sendCommand(command)
flags = int(settings[2], 16)
return not (flags & 0x0002)
def getVentRequiredEnabled(self):
"""Returns True if enabled, False if disabled"""
command = '$GE'
settings = self.sendCommand(command)
flags = int(settings[2], 16)
return not (flags & 0x0004)
def getGroundCheckEnabled(self):
"""Returns True if enabled, False if disabled"""
command = '$GE'
settings = self.sendCommand(command)
flags = int(settings[2], 16)
return not (flags & 0x0008)
def getStuckRelayCheckEnabled(self):
"""Returns True if enabled, False if disabled"""
command = '$GE'
settings = self.sendCommand(command)
flags = int(settings[2], 16)
return not (flags & 0x0010)
def getAutoServiceLevelEnabled(self):
"""Returns True if enabled, False if disabled"""
command = '$GE'
settings = self.sendCommand(command)
flags = int(settings[2], 16)
return not (flags & 0x0020)
def getAutoStartEnabled(self):
"""Returns True if enabled, False if disabled"""
command = '$GE'
settings = self.sendCommand(command)
flags = int(settings[2], 16)
return not (flags & 0x0040)
def getSerialDebugEnabled(self):
"""Returns True if enabled, False if disabled"""
command = '$GE'
settings = self.sendCommand(command)
flags = int(settings[2], 16)
return not (flags & 0x0080)
def getLCDType(self):
"""Returns LCD type as a string, either monochrome or rgb"""
command = '$GE'
settings = self.sendCommand(command)
flags = int(settings[2], 16)
if flags & 0x0100:
lcdtype = 'monochrome'
else:
lcdtype = 'rgb'
return lcdtype
def getGFISelfTestEnabled(self):
"""Returns True if enabled, False if disabled"""
command = '$GE'
settings = self.sendCommand(command)
flags = int(settings[2], 16)
return not (flags & 0x0200)
def getGFITripCount(self):
"""Returns GFI Trip Count, as integer"""
command = '$GF'
faults = self.sendCommand(command)
return faults[1]
def getNoGndTripCount(self):
"""Returns No Ground Trip Count, as integer"""
command = '$GF'
faults = self.sendCommand(command)
return faults[2]
def getStuckRelayTripCount(self):
"""Returns Stuck Relay Trip Count, as integer"""
command = '$GF'
faults = self.sendCommand(command)
return faults[3]
def getChargingCurrent(self):
"""Returns the charging current, in amps, or 0.0 of not charging"""
command = '$GG'
currentAndVoltage = self.sendCommand(command)
amps = float(currentAndVoltage[1])/1000
return amps
def getChargingVoltage(self):
"""Returns the charging voltage, in volts, or 0.0 of not charging"""
command = '$GG'
currentAndVoltage = self.sendCommand(command)
volts = float(currentAndVoltage[2])/1000
return volts
def getChargeLimit(self):
"""Returns the charge limit in kWh"""
command = '$GH'
limit = self.sendCommand(command)
return limit[1]
def getVoltMeterScaleFactor(self):
"""Returns the voltmeter scale factor, or 0 if there is no voltmeter"""
command = '$GM'
voltMeterSettings = self.sendCommand(command)
if voltMeterSettings[0] == 'NK':
return 0
else:
return voltMeterSettings[1]
def getVoltMeterOffset(self):
"""Returns the voltmeter offset, or 0 if there is no voltmeter"""
command = '$GM'
voltMeterSettings = self.sendCommand(command)
if voltMeterSettings[0] == 'NK':
return 0
else:
return voltMeterSettings[2]
def getAmbientThreshold(self):
"""Returns the ambient temperature threshold in degrees Celcius, or 0 if no Threshold is set"""
command = '$GO'
threshold = self.sendCommand(command)
if threshold[0] == 'NK':
return 0
else:
return float(threshold[1])/10
def getIRThreshold(self):
"""Returns the IR temperature threshold in degrees Celcius, or 0 if no Threshold is set"""
command = '$GO'
threshold = self.sendCommand(command)
if threshold[0] == 'NK':
return 0
else:
return float(threshold[2])/10
def getRTCTemperature(self):
"""Returns the temperature of the real time clock sensor (DS3231), in degrees Celcius, or 0.0 if sensor is not installed"""
command = '$GP'
temperature = self.sendCommand(command)
return float(temperature[1])/10
def getAmbientTemperature(self):
"""Returns the temperature of the ambient sensor (MCP9808), in degrees Celcius, or 0.0 if sensor is not installed"""
command = '$GP'
temperature = self.sendCommand(command)
return float(temperature[2])/10
def getIRTemperature(self):
"""Returns the temperature of the IR remote sensor (TMP007), in degrees Celcius, or 0.0 if sensor is not installed"""
command = '$GP'
temperature = self.sendCommand(command)
return float(temperature[3])/10
def getTime(self):
"""Get the RTC time. Returns a datetime object, or NULL if the clock is not set"""
command = '$GT'
time = self.sendCommand(command)
if time == ['OK','165', '165', '165', '165', '165', '85']:
return NULL
else:
return datetime.datetime(year = int(time[1])+2000,
month = int(time[2]),
day = int(time[3]),
hour = int(time[4]),
minute = int(time[5]),
second = int(time[6]))
def getUsageSession(self):
"""Get the energy usage for the current charging session. Returns the energy usage in Wh"""
command = '$GU'
usage = self.sendCommand(command)
return float(usage[1])/3600
def getUsageTotal(self):
"""Get the total energy usage. Returns the energy usage in Wh"""
command = '$GU'
usage = self.sendCommand(command)
return float(usage[2])
def getFirmwareVersion(self):
"""Returns the Firmware Version, as a string"""
command = '$GV'
version = self.sendCommand(command)
return version[1]
def getProtocolVersion(self):
"""Returns the Protocol Version, as a string"""
command = '$GV'
version = self.sendCommand(command)
return version[2]
|
miniconfig/python-openevse-wifi
|
openevsewifi/__init__.py
|
Charger.getDiodeCheckEnabled
|
python
|
def getDiodeCheckEnabled(self):
command = '$GE'
settings = self.sendCommand(command)
flags = int(settings[2], 16)
return not (flags & 0x0002)
|
Returns True if enabled, False if disabled
|
train
|
https://github.com/miniconfig/python-openevse-wifi/blob/42fabeae052a9f82092fa9220201413732e38bb4/openevsewifi/__init__.py#L99-L104
|
[
"def sendCommand(self, command):\n \"\"\"Sends a command through the web interface of the charger and parses the response\"\"\"\n data = { 'rapi' : command }\n full_url = self.url + urllib.parse.urlencode(data)\n data = urllib.request.urlopen(full_url)\n response = re.search('\\<p>>\\$(.+)\\<script', data.read().decode('utf-8'))\n if response == None:#If we are using version 1 - https://github.com/OpenEVSE/ESP8266_WiFi_v1.x/blob/master/OpenEVSE_RAPI_WiFi_ESP8266.ino#L357\n response = re.search('\\>\\>\\$(.+)\\<p>', data.read().decode('utf-8'))\n return response.group(1).split()\n"
] |
class Charger:
def __init__(self, host):
"""A connection to an OpenEVSE charging station equipped with the wifi kit."""
self.url = 'http://' + host + '/r?'
def sendCommand(self, command):
"""Sends a command through the web interface of the charger and parses the response"""
data = { 'rapi' : command }
full_url = self.url + urllib.parse.urlencode(data)
data = urllib.request.urlopen(full_url)
response = re.search('\<p>>\$(.+)\<script', data.read().decode('utf-8'))
if response == None:#If we are using version 1 - https://github.com/OpenEVSE/ESP8266_WiFi_v1.x/blob/master/OpenEVSE_RAPI_WiFi_ESP8266.ino#L357
response = re.search('\>\>\$(.+)\<p>', data.read().decode('utf-8'))
return response.group(1).split()
def getStatus(self):
"""Returns the charger's charge status, as a string"""
command = '$GS'
status = self.sendCommand(command)
return states[int(status[1])]
def getChargeTimeElapsed(self):
"""Returns the charge time elapsed (in seconds), or 0 if is not currently charging"""
command = '$GS'
status = self.sendCommand(command)
if int(status[1]) == 3:
return int(status[2])
else:
return 0
def getTimeLimit(self):
"""Returns the time limit in minutes or 0 if no limit is set"""
command = '$G3'
limit = self.sendCommand(command)
return int(limit[1])*15
def getAmmeterScaleFactor(self):
"""Returns the ammeter's current scale factor"""
command = '$GA'
settings = self.sendCommand(command)
return int(settings[1])
def getAmmeterOffset(self):
"""Returns the ammeter's current offset"""
command = '$GA'
settings = self.sendCommand(command)
return int(settings[2])
def getMinAmps(self):
"""Returns the capacity range minimum, in amps"""
command = '$GC'
caprange = self.sendCommand(command)
return int(caprange[1])
def getMaxAmps(self):
"""Returns the capacity range maximum, in amps"""
command = '$GC'
caprange = self.sendCommand(command)
return int(caprange[2])
def getCurrentCapacity(self):
"""Returns the current capacity, in amps"""
command = '$GE'
settings = self.sendCommand(command)
return int(settings[1])
def getServiceLevel(self):
"""Returns the service level"""
command = '$GE'
settings = self.sendCommand(command)
flags = int(settings[2], 16)
return (flags & 0x0001) + 1
def getVentRequiredEnabled(self):
"""Returns True if enabled, False if disabled"""
command = '$GE'
settings = self.sendCommand(command)
flags = int(settings[2], 16)
return not (flags & 0x0004)
def getGroundCheckEnabled(self):
"""Returns True if enabled, False if disabled"""
command = '$GE'
settings = self.sendCommand(command)
flags = int(settings[2], 16)
return not (flags & 0x0008)
def getStuckRelayCheckEnabled(self):
"""Returns True if enabled, False if disabled"""
command = '$GE'
settings = self.sendCommand(command)
flags = int(settings[2], 16)
return not (flags & 0x0010)
def getAutoServiceLevelEnabled(self):
"""Returns True if enabled, False if disabled"""
command = '$GE'
settings = self.sendCommand(command)
flags = int(settings[2], 16)
return not (flags & 0x0020)
def getAutoStartEnabled(self):
"""Returns True if enabled, False if disabled"""
command = '$GE'
settings = self.sendCommand(command)
flags = int(settings[2], 16)
return not (flags & 0x0040)
def getSerialDebugEnabled(self):
"""Returns True if enabled, False if disabled"""
command = '$GE'
settings = self.sendCommand(command)
flags = int(settings[2], 16)
return not (flags & 0x0080)
def getLCDType(self):
"""Returns LCD type as a string, either monochrome or rgb"""
command = '$GE'
settings = self.sendCommand(command)
flags = int(settings[2], 16)
if flags & 0x0100:
lcdtype = 'monochrome'
else:
lcdtype = 'rgb'
return lcdtype
def getGFISelfTestEnabled(self):
"""Returns True if enabled, False if disabled"""
command = '$GE'
settings = self.sendCommand(command)
flags = int(settings[2], 16)
return not (flags & 0x0200)
def getGFITripCount(self):
"""Returns GFI Trip Count, as integer"""
command = '$GF'
faults = self.sendCommand(command)
return faults[1]
def getNoGndTripCount(self):
"""Returns No Ground Trip Count, as integer"""
command = '$GF'
faults = self.sendCommand(command)
return faults[2]
def getStuckRelayTripCount(self):
"""Returns Stuck Relay Trip Count, as integer"""
command = '$GF'
faults = self.sendCommand(command)
return faults[3]
def getChargingCurrent(self):
"""Returns the charging current, in amps, or 0.0 of not charging"""
command = '$GG'
currentAndVoltage = self.sendCommand(command)
amps = float(currentAndVoltage[1])/1000
return amps
def getChargingVoltage(self):
"""Returns the charging voltage, in volts, or 0.0 of not charging"""
command = '$GG'
currentAndVoltage = self.sendCommand(command)
volts = float(currentAndVoltage[2])/1000
return volts
def getChargeLimit(self):
"""Returns the charge limit in kWh"""
command = '$GH'
limit = self.sendCommand(command)
return limit[1]
def getVoltMeterScaleFactor(self):
"""Returns the voltmeter scale factor, or 0 if there is no voltmeter"""
command = '$GM'
voltMeterSettings = self.sendCommand(command)
if voltMeterSettings[0] == 'NK':
return 0
else:
return voltMeterSettings[1]
def getVoltMeterOffset(self):
"""Returns the voltmeter offset, or 0 if there is no voltmeter"""
command = '$GM'
voltMeterSettings = self.sendCommand(command)
if voltMeterSettings[0] == 'NK':
return 0
else:
return voltMeterSettings[2]
def getAmbientThreshold(self):
"""Returns the ambient temperature threshold in degrees Celcius, or 0 if no Threshold is set"""
command = '$GO'
threshold = self.sendCommand(command)
if threshold[0] == 'NK':
return 0
else:
return float(threshold[1])/10
def getIRThreshold(self):
"""Returns the IR temperature threshold in degrees Celcius, or 0 if no Threshold is set"""
command = '$GO'
threshold = self.sendCommand(command)
if threshold[0] == 'NK':
return 0
else:
return float(threshold[2])/10
def getRTCTemperature(self):
"""Returns the temperature of the real time clock sensor (DS3231), in degrees Celcius, or 0.0 if sensor is not installed"""
command = '$GP'
temperature = self.sendCommand(command)
return float(temperature[1])/10
def getAmbientTemperature(self):
"""Returns the temperature of the ambient sensor (MCP9808), in degrees Celcius, or 0.0 if sensor is not installed"""
command = '$GP'
temperature = self.sendCommand(command)
return float(temperature[2])/10
def getIRTemperature(self):
"""Returns the temperature of the IR remote sensor (TMP007), in degrees Celcius, or 0.0 if sensor is not installed"""
command = '$GP'
temperature = self.sendCommand(command)
return float(temperature[3])/10
def getTime(self):
"""Get the RTC time. Returns a datetime object, or NULL if the clock is not set"""
command = '$GT'
time = self.sendCommand(command)
if time == ['OK','165', '165', '165', '165', '165', '85']:
return NULL
else:
return datetime.datetime(year = int(time[1])+2000,
month = int(time[2]),
day = int(time[3]),
hour = int(time[4]),
minute = int(time[5]),
second = int(time[6]))
def getUsageSession(self):
"""Get the energy usage for the current charging session. Returns the energy usage in Wh"""
command = '$GU'
usage = self.sendCommand(command)
return float(usage[1])/3600
def getUsageTotal(self):
"""Get the total energy usage. Returns the energy usage in Wh"""
command = '$GU'
usage = self.sendCommand(command)
return float(usage[2])
def getFirmwareVersion(self):
"""Returns the Firmware Version, as a string"""
command = '$GV'
version = self.sendCommand(command)
return version[1]
def getProtocolVersion(self):
"""Returns the Protocol Version, as a string"""
command = '$GV'
version = self.sendCommand(command)
return version[2]
|
miniconfig/python-openevse-wifi
|
openevsewifi/__init__.py
|
Charger.getVentRequiredEnabled
|
python
|
def getVentRequiredEnabled(self):
command = '$GE'
settings = self.sendCommand(command)
flags = int(settings[2], 16)
return not (flags & 0x0004)
|
Returns True if enabled, False if disabled
|
train
|
https://github.com/miniconfig/python-openevse-wifi/blob/42fabeae052a9f82092fa9220201413732e38bb4/openevsewifi/__init__.py#L106-L111
|
[
"def sendCommand(self, command):\n \"\"\"Sends a command through the web interface of the charger and parses the response\"\"\"\n data = { 'rapi' : command }\n full_url = self.url + urllib.parse.urlencode(data)\n data = urllib.request.urlopen(full_url)\n response = re.search('\\<p>>\\$(.+)\\<script', data.read().decode('utf-8'))\n if response == None:#If we are using version 1 - https://github.com/OpenEVSE/ESP8266_WiFi_v1.x/blob/master/OpenEVSE_RAPI_WiFi_ESP8266.ino#L357\n response = re.search('\\>\\>\\$(.+)\\<p>', data.read().decode('utf-8'))\n return response.group(1).split()\n"
] |
class Charger:
def __init__(self, host):
"""A connection to an OpenEVSE charging station equipped with the wifi kit."""
self.url = 'http://' + host + '/r?'
def sendCommand(self, command):
"""Sends a command through the web interface of the charger and parses the response"""
data = { 'rapi' : command }
full_url = self.url + urllib.parse.urlencode(data)
data = urllib.request.urlopen(full_url)
response = re.search('\<p>>\$(.+)\<script', data.read().decode('utf-8'))
if response == None:#If we are using version 1 - https://github.com/OpenEVSE/ESP8266_WiFi_v1.x/blob/master/OpenEVSE_RAPI_WiFi_ESP8266.ino#L357
response = re.search('\>\>\$(.+)\<p>', data.read().decode('utf-8'))
return response.group(1).split()
def getStatus(self):
"""Returns the charger's charge status, as a string"""
command = '$GS'
status = self.sendCommand(command)
return states[int(status[1])]
def getChargeTimeElapsed(self):
"""Returns the charge time elapsed (in seconds), or 0 if is not currently charging"""
command = '$GS'
status = self.sendCommand(command)
if int(status[1]) == 3:
return int(status[2])
else:
return 0
def getTimeLimit(self):
"""Returns the time limit in minutes or 0 if no limit is set"""
command = '$G3'
limit = self.sendCommand(command)
return int(limit[1])*15
def getAmmeterScaleFactor(self):
"""Returns the ammeter's current scale factor"""
command = '$GA'
settings = self.sendCommand(command)
return int(settings[1])
def getAmmeterOffset(self):
"""Returns the ammeter's current offset"""
command = '$GA'
settings = self.sendCommand(command)
return int(settings[2])
def getMinAmps(self):
"""Returns the capacity range minimum, in amps"""
command = '$GC'
caprange = self.sendCommand(command)
return int(caprange[1])
def getMaxAmps(self):
"""Returns the capacity range maximum, in amps"""
command = '$GC'
caprange = self.sendCommand(command)
return int(caprange[2])
def getCurrentCapacity(self):
"""Returns the current capacity, in amps"""
command = '$GE'
settings = self.sendCommand(command)
return int(settings[1])
def getServiceLevel(self):
"""Returns the service level"""
command = '$GE'
settings = self.sendCommand(command)
flags = int(settings[2], 16)
return (flags & 0x0001) + 1
def getDiodeCheckEnabled(self):
"""Returns True if enabled, False if disabled"""
command = '$GE'
settings = self.sendCommand(command)
flags = int(settings[2], 16)
return not (flags & 0x0002)
def getGroundCheckEnabled(self):
"""Returns True if enabled, False if disabled"""
command = '$GE'
settings = self.sendCommand(command)
flags = int(settings[2], 16)
return not (flags & 0x0008)
def getStuckRelayCheckEnabled(self):
"""Returns True if enabled, False if disabled"""
command = '$GE'
settings = self.sendCommand(command)
flags = int(settings[2], 16)
return not (flags & 0x0010)
def getAutoServiceLevelEnabled(self):
"""Returns True if enabled, False if disabled"""
command = '$GE'
settings = self.sendCommand(command)
flags = int(settings[2], 16)
return not (flags & 0x0020)
def getAutoStartEnabled(self):
"""Returns True if enabled, False if disabled"""
command = '$GE'
settings = self.sendCommand(command)
flags = int(settings[2], 16)
return not (flags & 0x0040)
def getSerialDebugEnabled(self):
"""Returns True if enabled, False if disabled"""
command = '$GE'
settings = self.sendCommand(command)
flags = int(settings[2], 16)
return not (flags & 0x0080)
def getLCDType(self):
"""Returns LCD type as a string, either monochrome or rgb"""
command = '$GE'
settings = self.sendCommand(command)
flags = int(settings[2], 16)
if flags & 0x0100:
lcdtype = 'monochrome'
else:
lcdtype = 'rgb'
return lcdtype
def getGFISelfTestEnabled(self):
"""Returns True if enabled, False if disabled"""
command = '$GE'
settings = self.sendCommand(command)
flags = int(settings[2], 16)
return not (flags & 0x0200)
def getGFITripCount(self):
"""Returns GFI Trip Count, as integer"""
command = '$GF'
faults = self.sendCommand(command)
return faults[1]
def getNoGndTripCount(self):
"""Returns No Ground Trip Count, as integer"""
command = '$GF'
faults = self.sendCommand(command)
return faults[2]
def getStuckRelayTripCount(self):
"""Returns Stuck Relay Trip Count, as integer"""
command = '$GF'
faults = self.sendCommand(command)
return faults[3]
def getChargingCurrent(self):
"""Returns the charging current, in amps, or 0.0 of not charging"""
command = '$GG'
currentAndVoltage = self.sendCommand(command)
amps = float(currentAndVoltage[1])/1000
return amps
def getChargingVoltage(self):
"""Returns the charging voltage, in volts, or 0.0 of not charging"""
command = '$GG'
currentAndVoltage = self.sendCommand(command)
volts = float(currentAndVoltage[2])/1000
return volts
def getChargeLimit(self):
"""Returns the charge limit in kWh"""
command = '$GH'
limit = self.sendCommand(command)
return limit[1]
def getVoltMeterScaleFactor(self):
"""Returns the voltmeter scale factor, or 0 if there is no voltmeter"""
command = '$GM'
voltMeterSettings = self.sendCommand(command)
if voltMeterSettings[0] == 'NK':
return 0
else:
return voltMeterSettings[1]
def getVoltMeterOffset(self):
"""Returns the voltmeter offset, or 0 if there is no voltmeter"""
command = '$GM'
voltMeterSettings = self.sendCommand(command)
if voltMeterSettings[0] == 'NK':
return 0
else:
return voltMeterSettings[2]
def getAmbientThreshold(self):
"""Returns the ambient temperature threshold in degrees Celcius, or 0 if no Threshold is set"""
command = '$GO'
threshold = self.sendCommand(command)
if threshold[0] == 'NK':
return 0
else:
return float(threshold[1])/10
def getIRThreshold(self):
"""Returns the IR temperature threshold in degrees Celcius, or 0 if no Threshold is set"""
command = '$GO'
threshold = self.sendCommand(command)
if threshold[0] == 'NK':
return 0
else:
return float(threshold[2])/10
def getRTCTemperature(self):
"""Returns the temperature of the real time clock sensor (DS3231), in degrees Celcius, or 0.0 if sensor is not installed"""
command = '$GP'
temperature = self.sendCommand(command)
return float(temperature[1])/10
def getAmbientTemperature(self):
"""Returns the temperature of the ambient sensor (MCP9808), in degrees Celcius, or 0.0 if sensor is not installed"""
command = '$GP'
temperature = self.sendCommand(command)
return float(temperature[2])/10
def getIRTemperature(self):
"""Returns the temperature of the IR remote sensor (TMP007), in degrees Celcius, or 0.0 if sensor is not installed"""
command = '$GP'
temperature = self.sendCommand(command)
return float(temperature[3])/10
def getTime(self):
"""Get the RTC time. Returns a datetime object, or NULL if the clock is not set"""
command = '$GT'
time = self.sendCommand(command)
if time == ['OK','165', '165', '165', '165', '165', '85']:
return NULL
else:
return datetime.datetime(year = int(time[1])+2000,
month = int(time[2]),
day = int(time[3]),
hour = int(time[4]),
minute = int(time[5]),
second = int(time[6]))
def getUsageSession(self):
"""Get the energy usage for the current charging session. Returns the energy usage in Wh"""
command = '$GU'
usage = self.sendCommand(command)
return float(usage[1])/3600
def getUsageTotal(self):
"""Get the total energy usage. Returns the energy usage in Wh"""
command = '$GU'
usage = self.sendCommand(command)
return float(usage[2])
def getFirmwareVersion(self):
"""Returns the Firmware Version, as a string"""
command = '$GV'
version = self.sendCommand(command)
return version[1]
def getProtocolVersion(self):
"""Returns the Protocol Version, as a string"""
command = '$GV'
version = self.sendCommand(command)
return version[2]
|
miniconfig/python-openevse-wifi
|
openevsewifi/__init__.py
|
Charger.getGroundCheckEnabled
|
python
|
def getGroundCheckEnabled(self):
command = '$GE'
settings = self.sendCommand(command)
flags = int(settings[2], 16)
return not (flags & 0x0008)
|
Returns True if enabled, False if disabled
|
train
|
https://github.com/miniconfig/python-openevse-wifi/blob/42fabeae052a9f82092fa9220201413732e38bb4/openevsewifi/__init__.py#L113-L118
|
[
"def sendCommand(self, command):\n \"\"\"Sends a command through the web interface of the charger and parses the response\"\"\"\n data = { 'rapi' : command }\n full_url = self.url + urllib.parse.urlencode(data)\n data = urllib.request.urlopen(full_url)\n response = re.search('\\<p>>\\$(.+)\\<script', data.read().decode('utf-8'))\n if response == None:#If we are using version 1 - https://github.com/OpenEVSE/ESP8266_WiFi_v1.x/blob/master/OpenEVSE_RAPI_WiFi_ESP8266.ino#L357\n response = re.search('\\>\\>\\$(.+)\\<p>', data.read().decode('utf-8'))\n return response.group(1).split()\n"
] |
class Charger:
def __init__(self, host):
"""A connection to an OpenEVSE charging station equipped with the wifi kit."""
self.url = 'http://' + host + '/r?'
def sendCommand(self, command):
"""Sends a command through the web interface of the charger and parses the response"""
data = { 'rapi' : command }
full_url = self.url + urllib.parse.urlencode(data)
data = urllib.request.urlopen(full_url)
response = re.search('\<p>>\$(.+)\<script', data.read().decode('utf-8'))
if response == None:#If we are using version 1 - https://github.com/OpenEVSE/ESP8266_WiFi_v1.x/blob/master/OpenEVSE_RAPI_WiFi_ESP8266.ino#L357
response = re.search('\>\>\$(.+)\<p>', data.read().decode('utf-8'))
return response.group(1).split()
def getStatus(self):
"""Returns the charger's charge status, as a string"""
command = '$GS'
status = self.sendCommand(command)
return states[int(status[1])]
def getChargeTimeElapsed(self):
"""Returns the charge time elapsed (in seconds), or 0 if is not currently charging"""
command = '$GS'
status = self.sendCommand(command)
if int(status[1]) == 3:
return int(status[2])
else:
return 0
def getTimeLimit(self):
"""Returns the time limit in minutes or 0 if no limit is set"""
command = '$G3'
limit = self.sendCommand(command)
return int(limit[1])*15
def getAmmeterScaleFactor(self):
"""Returns the ammeter's current scale factor"""
command = '$GA'
settings = self.sendCommand(command)
return int(settings[1])
def getAmmeterOffset(self):
"""Returns the ammeter's current offset"""
command = '$GA'
settings = self.sendCommand(command)
return int(settings[2])
def getMinAmps(self):
"""Returns the capacity range minimum, in amps"""
command = '$GC'
caprange = self.sendCommand(command)
return int(caprange[1])
def getMaxAmps(self):
"""Returns the capacity range maximum, in amps"""
command = '$GC'
caprange = self.sendCommand(command)
return int(caprange[2])
def getCurrentCapacity(self):
"""Returns the current capacity, in amps"""
command = '$GE'
settings = self.sendCommand(command)
return int(settings[1])
def getServiceLevel(self):
"""Returns the service level"""
command = '$GE'
settings = self.sendCommand(command)
flags = int(settings[2], 16)
return (flags & 0x0001) + 1
def getDiodeCheckEnabled(self):
"""Returns True if enabled, False if disabled"""
command = '$GE'
settings = self.sendCommand(command)
flags = int(settings[2], 16)
return not (flags & 0x0002)
def getVentRequiredEnabled(self):
"""Returns True if enabled, False if disabled"""
command = '$GE'
settings = self.sendCommand(command)
flags = int(settings[2], 16)
return not (flags & 0x0004)
def getStuckRelayCheckEnabled(self):
"""Returns True if enabled, False if disabled"""
command = '$GE'
settings = self.sendCommand(command)
flags = int(settings[2], 16)
return not (flags & 0x0010)
def getAutoServiceLevelEnabled(self):
"""Returns True if enabled, False if disabled"""
command = '$GE'
settings = self.sendCommand(command)
flags = int(settings[2], 16)
return not (flags & 0x0020)
def getAutoStartEnabled(self):
"""Returns True if enabled, False if disabled"""
command = '$GE'
settings = self.sendCommand(command)
flags = int(settings[2], 16)
return not (flags & 0x0040)
def getSerialDebugEnabled(self):
"""Returns True if enabled, False if disabled"""
command = '$GE'
settings = self.sendCommand(command)
flags = int(settings[2], 16)
return not (flags & 0x0080)
def getLCDType(self):
"""Returns LCD type as a string, either monochrome or rgb"""
command = '$GE'
settings = self.sendCommand(command)
flags = int(settings[2], 16)
if flags & 0x0100:
lcdtype = 'monochrome'
else:
lcdtype = 'rgb'
return lcdtype
def getGFISelfTestEnabled(self):
"""Returns True if enabled, False if disabled"""
command = '$GE'
settings = self.sendCommand(command)
flags = int(settings[2], 16)
return not (flags & 0x0200)
def getGFITripCount(self):
"""Returns GFI Trip Count, as integer"""
command = '$GF'
faults = self.sendCommand(command)
return faults[1]
def getNoGndTripCount(self):
"""Returns No Ground Trip Count, as integer"""
command = '$GF'
faults = self.sendCommand(command)
return faults[2]
def getStuckRelayTripCount(self):
"""Returns Stuck Relay Trip Count, as integer"""
command = '$GF'
faults = self.sendCommand(command)
return faults[3]
def getChargingCurrent(self):
"""Returns the charging current, in amps, or 0.0 of not charging"""
command = '$GG'
currentAndVoltage = self.sendCommand(command)
amps = float(currentAndVoltage[1])/1000
return amps
def getChargingVoltage(self):
"""Returns the charging voltage, in volts, or 0.0 of not charging"""
command = '$GG'
currentAndVoltage = self.sendCommand(command)
volts = float(currentAndVoltage[2])/1000
return volts
def getChargeLimit(self):
"""Returns the charge limit in kWh"""
command = '$GH'
limit = self.sendCommand(command)
return limit[1]
def getVoltMeterScaleFactor(self):
"""Returns the voltmeter scale factor, or 0 if there is no voltmeter"""
command = '$GM'
voltMeterSettings = self.sendCommand(command)
if voltMeterSettings[0] == 'NK':
return 0
else:
return voltMeterSettings[1]
def getVoltMeterOffset(self):
"""Returns the voltmeter offset, or 0 if there is no voltmeter"""
command = '$GM'
voltMeterSettings = self.sendCommand(command)
if voltMeterSettings[0] == 'NK':
return 0
else:
return voltMeterSettings[2]
def getAmbientThreshold(self):
"""Returns the ambient temperature threshold in degrees Celcius, or 0 if no Threshold is set"""
command = '$GO'
threshold = self.sendCommand(command)
if threshold[0] == 'NK':
return 0
else:
return float(threshold[1])/10
def getIRThreshold(self):
"""Returns the IR temperature threshold in degrees Celcius, or 0 if no Threshold is set"""
command = '$GO'
threshold = self.sendCommand(command)
if threshold[0] == 'NK':
return 0
else:
return float(threshold[2])/10
def getRTCTemperature(self):
"""Returns the temperature of the real time clock sensor (DS3231), in degrees Celcius, or 0.0 if sensor is not installed"""
command = '$GP'
temperature = self.sendCommand(command)
return float(temperature[1])/10
def getAmbientTemperature(self):
"""Returns the temperature of the ambient sensor (MCP9808), in degrees Celcius, or 0.0 if sensor is not installed"""
command = '$GP'
temperature = self.sendCommand(command)
return float(temperature[2])/10
def getIRTemperature(self):
"""Returns the temperature of the IR remote sensor (TMP007), in degrees Celcius, or 0.0 if sensor is not installed"""
command = '$GP'
temperature = self.sendCommand(command)
return float(temperature[3])/10
def getTime(self):
"""Get the RTC time. Returns a datetime object, or NULL if the clock is not set"""
command = '$GT'
time = self.sendCommand(command)
if time == ['OK','165', '165', '165', '165', '165', '85']:
return NULL
else:
return datetime.datetime(year = int(time[1])+2000,
month = int(time[2]),
day = int(time[3]),
hour = int(time[4]),
minute = int(time[5]),
second = int(time[6]))
def getUsageSession(self):
"""Get the energy usage for the current charging session. Returns the energy usage in Wh"""
command = '$GU'
usage = self.sendCommand(command)
return float(usage[1])/3600
def getUsageTotal(self):
"""Get the total energy usage. Returns the energy usage in Wh"""
command = '$GU'
usage = self.sendCommand(command)
return float(usage[2])
def getFirmwareVersion(self):
"""Returns the Firmware Version, as a string"""
command = '$GV'
version = self.sendCommand(command)
return version[1]
def getProtocolVersion(self):
"""Returns the Protocol Version, as a string"""
command = '$GV'
version = self.sendCommand(command)
return version[2]
|
miniconfig/python-openevse-wifi
|
openevsewifi/__init__.py
|
Charger.getStuckRelayCheckEnabled
|
python
|
def getStuckRelayCheckEnabled(self):
command = '$GE'
settings = self.sendCommand(command)
flags = int(settings[2], 16)
return not (flags & 0x0010)
|
Returns True if enabled, False if disabled
|
train
|
https://github.com/miniconfig/python-openevse-wifi/blob/42fabeae052a9f82092fa9220201413732e38bb4/openevsewifi/__init__.py#L120-L125
|
[
"def sendCommand(self, command):\n \"\"\"Sends a command through the web interface of the charger and parses the response\"\"\"\n data = { 'rapi' : command }\n full_url = self.url + urllib.parse.urlencode(data)\n data = urllib.request.urlopen(full_url)\n response = re.search('\\<p>>\\$(.+)\\<script', data.read().decode('utf-8'))\n if response == None:#If we are using version 1 - https://github.com/OpenEVSE/ESP8266_WiFi_v1.x/blob/master/OpenEVSE_RAPI_WiFi_ESP8266.ino#L357\n response = re.search('\\>\\>\\$(.+)\\<p>', data.read().decode('utf-8'))\n return response.group(1).split()\n"
] |
class Charger:
def __init__(self, host):
"""A connection to an OpenEVSE charging station equipped with the wifi kit."""
self.url = 'http://' + host + '/r?'
def sendCommand(self, command):
"""Sends a command through the web interface of the charger and parses the response"""
data = { 'rapi' : command }
full_url = self.url + urllib.parse.urlencode(data)
data = urllib.request.urlopen(full_url)
response = re.search('\<p>>\$(.+)\<script', data.read().decode('utf-8'))
if response == None:#If we are using version 1 - https://github.com/OpenEVSE/ESP8266_WiFi_v1.x/blob/master/OpenEVSE_RAPI_WiFi_ESP8266.ino#L357
response = re.search('\>\>\$(.+)\<p>', data.read().decode('utf-8'))
return response.group(1).split()
def getStatus(self):
"""Returns the charger's charge status, as a string"""
command = '$GS'
status = self.sendCommand(command)
return states[int(status[1])]
def getChargeTimeElapsed(self):
"""Returns the charge time elapsed (in seconds), or 0 if is not currently charging"""
command = '$GS'
status = self.sendCommand(command)
if int(status[1]) == 3:
return int(status[2])
else:
return 0
def getTimeLimit(self):
"""Returns the time limit in minutes or 0 if no limit is set"""
command = '$G3'
limit = self.sendCommand(command)
return int(limit[1])*15
def getAmmeterScaleFactor(self):
"""Returns the ammeter's current scale factor"""
command = '$GA'
settings = self.sendCommand(command)
return int(settings[1])
def getAmmeterOffset(self):
"""Returns the ammeter's current offset"""
command = '$GA'
settings = self.sendCommand(command)
return int(settings[2])
def getMinAmps(self):
"""Returns the capacity range minimum, in amps"""
command = '$GC'
caprange = self.sendCommand(command)
return int(caprange[1])
def getMaxAmps(self):
"""Returns the capacity range maximum, in amps"""
command = '$GC'
caprange = self.sendCommand(command)
return int(caprange[2])
def getCurrentCapacity(self):
"""Returns the current capacity, in amps"""
command = '$GE'
settings = self.sendCommand(command)
return int(settings[1])
def getServiceLevel(self):
"""Returns the service level"""
command = '$GE'
settings = self.sendCommand(command)
flags = int(settings[2], 16)
return (flags & 0x0001) + 1
def getDiodeCheckEnabled(self):
"""Returns True if enabled, False if disabled"""
command = '$GE'
settings = self.sendCommand(command)
flags = int(settings[2], 16)
return not (flags & 0x0002)
def getVentRequiredEnabled(self):
"""Returns True if enabled, False if disabled"""
command = '$GE'
settings = self.sendCommand(command)
flags = int(settings[2], 16)
return not (flags & 0x0004)
def getGroundCheckEnabled(self):
"""Returns True if enabled, False if disabled"""
command = '$GE'
settings = self.sendCommand(command)
flags = int(settings[2], 16)
return not (flags & 0x0008)
def getAutoServiceLevelEnabled(self):
"""Returns True if enabled, False if disabled"""
command = '$GE'
settings = self.sendCommand(command)
flags = int(settings[2], 16)
return not (flags & 0x0020)
def getAutoStartEnabled(self):
"""Returns True if enabled, False if disabled"""
command = '$GE'
settings = self.sendCommand(command)
flags = int(settings[2], 16)
return not (flags & 0x0040)
def getSerialDebugEnabled(self):
"""Returns True if enabled, False if disabled"""
command = '$GE'
settings = self.sendCommand(command)
flags = int(settings[2], 16)
return not (flags & 0x0080)
def getLCDType(self):
"""Returns LCD type as a string, either monochrome or rgb"""
command = '$GE'
settings = self.sendCommand(command)
flags = int(settings[2], 16)
if flags & 0x0100:
lcdtype = 'monochrome'
else:
lcdtype = 'rgb'
return lcdtype
def getGFISelfTestEnabled(self):
"""Returns True if enabled, False if disabled"""
command = '$GE'
settings = self.sendCommand(command)
flags = int(settings[2], 16)
return not (flags & 0x0200)
def getGFITripCount(self):
"""Returns GFI Trip Count, as integer"""
command = '$GF'
faults = self.sendCommand(command)
return faults[1]
def getNoGndTripCount(self):
"""Returns No Ground Trip Count, as integer"""
command = '$GF'
faults = self.sendCommand(command)
return faults[2]
def getStuckRelayTripCount(self):
"""Returns Stuck Relay Trip Count, as integer"""
command = '$GF'
faults = self.sendCommand(command)
return faults[3]
def getChargingCurrent(self):
"""Returns the charging current, in amps, or 0.0 of not charging"""
command = '$GG'
currentAndVoltage = self.sendCommand(command)
amps = float(currentAndVoltage[1])/1000
return amps
def getChargingVoltage(self):
"""Returns the charging voltage, in volts, or 0.0 of not charging"""
command = '$GG'
currentAndVoltage = self.sendCommand(command)
volts = float(currentAndVoltage[2])/1000
return volts
def getChargeLimit(self):
"""Returns the charge limit in kWh"""
command = '$GH'
limit = self.sendCommand(command)
return limit[1]
def getVoltMeterScaleFactor(self):
"""Returns the voltmeter scale factor, or 0 if there is no voltmeter"""
command = '$GM'
voltMeterSettings = self.sendCommand(command)
if voltMeterSettings[0] == 'NK':
return 0
else:
return voltMeterSettings[1]
def getVoltMeterOffset(self):
"""Returns the voltmeter offset, or 0 if there is no voltmeter"""
command = '$GM'
voltMeterSettings = self.sendCommand(command)
if voltMeterSettings[0] == 'NK':
return 0
else:
return voltMeterSettings[2]
def getAmbientThreshold(self):
"""Returns the ambient temperature threshold in degrees Celcius, or 0 if no Threshold is set"""
command = '$GO'
threshold = self.sendCommand(command)
if threshold[0] == 'NK':
return 0
else:
return float(threshold[1])/10
def getIRThreshold(self):
"""Returns the IR temperature threshold in degrees Celcius, or 0 if no Threshold is set"""
command = '$GO'
threshold = self.sendCommand(command)
if threshold[0] == 'NK':
return 0
else:
return float(threshold[2])/10
def getRTCTemperature(self):
"""Returns the temperature of the real time clock sensor (DS3231), in degrees Celcius, or 0.0 if sensor is not installed"""
command = '$GP'
temperature = self.sendCommand(command)
return float(temperature[1])/10
def getAmbientTemperature(self):
"""Returns the temperature of the ambient sensor (MCP9808), in degrees Celcius, or 0.0 if sensor is not installed"""
command = '$GP'
temperature = self.sendCommand(command)
return float(temperature[2])/10
def getIRTemperature(self):
"""Returns the temperature of the IR remote sensor (TMP007), in degrees Celcius, or 0.0 if sensor is not installed"""
command = '$GP'
temperature = self.sendCommand(command)
return float(temperature[3])/10
def getTime(self):
"""Get the RTC time. Returns a datetime object, or NULL if the clock is not set"""
command = '$GT'
time = self.sendCommand(command)
if time == ['OK','165', '165', '165', '165', '165', '85']:
return NULL
else:
return datetime.datetime(year = int(time[1])+2000,
month = int(time[2]),
day = int(time[3]),
hour = int(time[4]),
minute = int(time[5]),
second = int(time[6]))
def getUsageSession(self):
"""Get the energy usage for the current charging session. Returns the energy usage in Wh"""
command = '$GU'
usage = self.sendCommand(command)
return float(usage[1])/3600
def getUsageTotal(self):
"""Get the total energy usage. Returns the energy usage in Wh"""
command = '$GU'
usage = self.sendCommand(command)
return float(usage[2])
def getFirmwareVersion(self):
"""Returns the Firmware Version, as a string"""
command = '$GV'
version = self.sendCommand(command)
return version[1]
def getProtocolVersion(self):
"""Returns the Protocol Version, as a string"""
command = '$GV'
version = self.sendCommand(command)
return version[2]
|
miniconfig/python-openevse-wifi
|
openevsewifi/__init__.py
|
Charger.getAutoServiceLevelEnabled
|
python
|
def getAutoServiceLevelEnabled(self):
command = '$GE'
settings = self.sendCommand(command)
flags = int(settings[2], 16)
return not (flags & 0x0020)
|
Returns True if enabled, False if disabled
|
train
|
https://github.com/miniconfig/python-openevse-wifi/blob/42fabeae052a9f82092fa9220201413732e38bb4/openevsewifi/__init__.py#L127-L132
|
[
"def sendCommand(self, command):\n \"\"\"Sends a command through the web interface of the charger and parses the response\"\"\"\n data = { 'rapi' : command }\n full_url = self.url + urllib.parse.urlencode(data)\n data = urllib.request.urlopen(full_url)\n response = re.search('\\<p>>\\$(.+)\\<script', data.read().decode('utf-8'))\n if response == None:#If we are using version 1 - https://github.com/OpenEVSE/ESP8266_WiFi_v1.x/blob/master/OpenEVSE_RAPI_WiFi_ESP8266.ino#L357\n response = re.search('\\>\\>\\$(.+)\\<p>', data.read().decode('utf-8'))\n return response.group(1).split()\n"
] |
class Charger:
def __init__(self, host):
"""A connection to an OpenEVSE charging station equipped with the wifi kit."""
self.url = 'http://' + host + '/r?'
def sendCommand(self, command):
"""Sends a command through the web interface of the charger and parses the response"""
data = { 'rapi' : command }
full_url = self.url + urllib.parse.urlencode(data)
data = urllib.request.urlopen(full_url)
response = re.search('\<p>>\$(.+)\<script', data.read().decode('utf-8'))
if response == None:#If we are using version 1 - https://github.com/OpenEVSE/ESP8266_WiFi_v1.x/blob/master/OpenEVSE_RAPI_WiFi_ESP8266.ino#L357
response = re.search('\>\>\$(.+)\<p>', data.read().decode('utf-8'))
return response.group(1).split()
def getStatus(self):
"""Returns the charger's charge status, as a string"""
command = '$GS'
status = self.sendCommand(command)
return states[int(status[1])]
def getChargeTimeElapsed(self):
"""Returns the charge time elapsed (in seconds), or 0 if is not currently charging"""
command = '$GS'
status = self.sendCommand(command)
if int(status[1]) == 3:
return int(status[2])
else:
return 0
def getTimeLimit(self):
"""Returns the time limit in minutes or 0 if no limit is set"""
command = '$G3'
limit = self.sendCommand(command)
return int(limit[1])*15
def getAmmeterScaleFactor(self):
"""Returns the ammeter's current scale factor"""
command = '$GA'
settings = self.sendCommand(command)
return int(settings[1])
def getAmmeterOffset(self):
"""Returns the ammeter's current offset"""
command = '$GA'
settings = self.sendCommand(command)
return int(settings[2])
def getMinAmps(self):
"""Returns the capacity range minimum, in amps"""
command = '$GC'
caprange = self.sendCommand(command)
return int(caprange[1])
def getMaxAmps(self):
"""Returns the capacity range maximum, in amps"""
command = '$GC'
caprange = self.sendCommand(command)
return int(caprange[2])
def getCurrentCapacity(self):
"""Returns the current capacity, in amps"""
command = '$GE'
settings = self.sendCommand(command)
return int(settings[1])
def getServiceLevel(self):
"""Returns the service level"""
command = '$GE'
settings = self.sendCommand(command)
flags = int(settings[2], 16)
return (flags & 0x0001) + 1
def getDiodeCheckEnabled(self):
"""Returns True if enabled, False if disabled"""
command = '$GE'
settings = self.sendCommand(command)
flags = int(settings[2], 16)
return not (flags & 0x0002)
def getVentRequiredEnabled(self):
"""Returns True if enabled, False if disabled"""
command = '$GE'
settings = self.sendCommand(command)
flags = int(settings[2], 16)
return not (flags & 0x0004)
def getGroundCheckEnabled(self):
"""Returns True if enabled, False if disabled"""
command = '$GE'
settings = self.sendCommand(command)
flags = int(settings[2], 16)
return not (flags & 0x0008)
def getStuckRelayCheckEnabled(self):
"""Returns True if enabled, False if disabled"""
command = '$GE'
settings = self.sendCommand(command)
flags = int(settings[2], 16)
return not (flags & 0x0010)
def getAutoStartEnabled(self):
"""Returns True if enabled, False if disabled"""
command = '$GE'
settings = self.sendCommand(command)
flags = int(settings[2], 16)
return not (flags & 0x0040)
def getSerialDebugEnabled(self):
"""Returns True if enabled, False if disabled"""
command = '$GE'
settings = self.sendCommand(command)
flags = int(settings[2], 16)
return not (flags & 0x0080)
def getLCDType(self):
"""Returns LCD type as a string, either monochrome or rgb"""
command = '$GE'
settings = self.sendCommand(command)
flags = int(settings[2], 16)
if flags & 0x0100:
lcdtype = 'monochrome'
else:
lcdtype = 'rgb'
return lcdtype
def getGFISelfTestEnabled(self):
"""Returns True if enabled, False if disabled"""
command = '$GE'
settings = self.sendCommand(command)
flags = int(settings[2], 16)
return not (flags & 0x0200)
def getGFITripCount(self):
"""Returns GFI Trip Count, as integer"""
command = '$GF'
faults = self.sendCommand(command)
return faults[1]
def getNoGndTripCount(self):
"""Returns No Ground Trip Count, as integer"""
command = '$GF'
faults = self.sendCommand(command)
return faults[2]
def getStuckRelayTripCount(self):
"""Returns Stuck Relay Trip Count, as integer"""
command = '$GF'
faults = self.sendCommand(command)
return faults[3]
def getChargingCurrent(self):
"""Returns the charging current, in amps, or 0.0 of not charging"""
command = '$GG'
currentAndVoltage = self.sendCommand(command)
amps = float(currentAndVoltage[1])/1000
return amps
def getChargingVoltage(self):
"""Returns the charging voltage, in volts, or 0.0 of not charging"""
command = '$GG'
currentAndVoltage = self.sendCommand(command)
volts = float(currentAndVoltage[2])/1000
return volts
def getChargeLimit(self):
"""Returns the charge limit in kWh"""
command = '$GH'
limit = self.sendCommand(command)
return limit[1]
def getVoltMeterScaleFactor(self):
"""Returns the voltmeter scale factor, or 0 if there is no voltmeter"""
command = '$GM'
voltMeterSettings = self.sendCommand(command)
if voltMeterSettings[0] == 'NK':
return 0
else:
return voltMeterSettings[1]
def getVoltMeterOffset(self):
"""Returns the voltmeter offset, or 0 if there is no voltmeter"""
command = '$GM'
voltMeterSettings = self.sendCommand(command)
if voltMeterSettings[0] == 'NK':
return 0
else:
return voltMeterSettings[2]
def getAmbientThreshold(self):
"""Returns the ambient temperature threshold in degrees Celcius, or 0 if no Threshold is set"""
command = '$GO'
threshold = self.sendCommand(command)
if threshold[0] == 'NK':
return 0
else:
return float(threshold[1])/10
def getIRThreshold(self):
"""Returns the IR temperature threshold in degrees Celcius, or 0 if no Threshold is set"""
command = '$GO'
threshold = self.sendCommand(command)
if threshold[0] == 'NK':
return 0
else:
return float(threshold[2])/10
def getRTCTemperature(self):
"""Returns the temperature of the real time clock sensor (DS3231), in degrees Celcius, or 0.0 if sensor is not installed"""
command = '$GP'
temperature = self.sendCommand(command)
return float(temperature[1])/10
def getAmbientTemperature(self):
"""Returns the temperature of the ambient sensor (MCP9808), in degrees Celcius, or 0.0 if sensor is not installed"""
command = '$GP'
temperature = self.sendCommand(command)
return float(temperature[2])/10
def getIRTemperature(self):
"""Returns the temperature of the IR remote sensor (TMP007), in degrees Celcius, or 0.0 if sensor is not installed"""
command = '$GP'
temperature = self.sendCommand(command)
return float(temperature[3])/10
def getTime(self):
"""Get the RTC time. Returns a datetime object, or NULL if the clock is not set"""
command = '$GT'
time = self.sendCommand(command)
if time == ['OK','165', '165', '165', '165', '165', '85']:
return NULL
else:
return datetime.datetime(year = int(time[1])+2000,
month = int(time[2]),
day = int(time[3]),
hour = int(time[4]),
minute = int(time[5]),
second = int(time[6]))
def getUsageSession(self):
"""Get the energy usage for the current charging session. Returns the energy usage in Wh"""
command = '$GU'
usage = self.sendCommand(command)
return float(usage[1])/3600
def getUsageTotal(self):
"""Get the total energy usage. Returns the energy usage in Wh"""
command = '$GU'
usage = self.sendCommand(command)
return float(usage[2])
def getFirmwareVersion(self):
"""Returns the Firmware Version, as a string"""
command = '$GV'
version = self.sendCommand(command)
return version[1]
def getProtocolVersion(self):
"""Returns the Protocol Version, as a string"""
command = '$GV'
version = self.sendCommand(command)
return version[2]
|
miniconfig/python-openevse-wifi
|
openevsewifi/__init__.py
|
Charger.getAutoStartEnabled
|
python
|
def getAutoStartEnabled(self):
command = '$GE'
settings = self.sendCommand(command)
flags = int(settings[2], 16)
return not (flags & 0x0040)
|
Returns True if enabled, False if disabled
|
train
|
https://github.com/miniconfig/python-openevse-wifi/blob/42fabeae052a9f82092fa9220201413732e38bb4/openevsewifi/__init__.py#L134-L139
|
[
"def sendCommand(self, command):\n \"\"\"Sends a command through the web interface of the charger and parses the response\"\"\"\n data = { 'rapi' : command }\n full_url = self.url + urllib.parse.urlencode(data)\n data = urllib.request.urlopen(full_url)\n response = re.search('\\<p>>\\$(.+)\\<script', data.read().decode('utf-8'))\n if response == None:#If we are using version 1 - https://github.com/OpenEVSE/ESP8266_WiFi_v1.x/blob/master/OpenEVSE_RAPI_WiFi_ESP8266.ino#L357\n response = re.search('\\>\\>\\$(.+)\\<p>', data.read().decode('utf-8'))\n return response.group(1).split()\n"
] |
class Charger:
def __init__(self, host):
"""A connection to an OpenEVSE charging station equipped with the wifi kit."""
self.url = 'http://' + host + '/r?'
def sendCommand(self, command):
"""Sends a command through the web interface of the charger and parses the response"""
data = { 'rapi' : command }
full_url = self.url + urllib.parse.urlencode(data)
data = urllib.request.urlopen(full_url)
response = re.search('\<p>>\$(.+)\<script', data.read().decode('utf-8'))
if response == None:#If we are using version 1 - https://github.com/OpenEVSE/ESP8266_WiFi_v1.x/blob/master/OpenEVSE_RAPI_WiFi_ESP8266.ino#L357
response = re.search('\>\>\$(.+)\<p>', data.read().decode('utf-8'))
return response.group(1).split()
def getStatus(self):
"""Returns the charger's charge status, as a string"""
command = '$GS'
status = self.sendCommand(command)
return states[int(status[1])]
def getChargeTimeElapsed(self):
"""Returns the charge time elapsed (in seconds), or 0 if is not currently charging"""
command = '$GS'
status = self.sendCommand(command)
if int(status[1]) == 3:
return int(status[2])
else:
return 0
def getTimeLimit(self):
"""Returns the time limit in minutes or 0 if no limit is set"""
command = '$G3'
limit = self.sendCommand(command)
return int(limit[1])*15
def getAmmeterScaleFactor(self):
"""Returns the ammeter's current scale factor"""
command = '$GA'
settings = self.sendCommand(command)
return int(settings[1])
def getAmmeterOffset(self):
"""Returns the ammeter's current offset"""
command = '$GA'
settings = self.sendCommand(command)
return int(settings[2])
def getMinAmps(self):
"""Returns the capacity range minimum, in amps"""
command = '$GC'
caprange = self.sendCommand(command)
return int(caprange[1])
def getMaxAmps(self):
"""Returns the capacity range maximum, in amps"""
command = '$GC'
caprange = self.sendCommand(command)
return int(caprange[2])
def getCurrentCapacity(self):
"""Returns the current capacity, in amps"""
command = '$GE'
settings = self.sendCommand(command)
return int(settings[1])
def getServiceLevel(self):
"""Returns the service level"""
command = '$GE'
settings = self.sendCommand(command)
flags = int(settings[2], 16)
return (flags & 0x0001) + 1
def getDiodeCheckEnabled(self):
"""Returns True if enabled, False if disabled"""
command = '$GE'
settings = self.sendCommand(command)
flags = int(settings[2], 16)
return not (flags & 0x0002)
def getVentRequiredEnabled(self):
"""Returns True if enabled, False if disabled"""
command = '$GE'
settings = self.sendCommand(command)
flags = int(settings[2], 16)
return not (flags & 0x0004)
def getGroundCheckEnabled(self):
"""Returns True if enabled, False if disabled"""
command = '$GE'
settings = self.sendCommand(command)
flags = int(settings[2], 16)
return not (flags & 0x0008)
def getStuckRelayCheckEnabled(self):
"""Returns True if enabled, False if disabled"""
command = '$GE'
settings = self.sendCommand(command)
flags = int(settings[2], 16)
return not (flags & 0x0010)
def getAutoServiceLevelEnabled(self):
"""Returns True if enabled, False if disabled"""
command = '$GE'
settings = self.sendCommand(command)
flags = int(settings[2], 16)
return not (flags & 0x0020)
def getSerialDebugEnabled(self):
"""Returns True if enabled, False if disabled"""
command = '$GE'
settings = self.sendCommand(command)
flags = int(settings[2], 16)
return not (flags & 0x0080)
def getLCDType(self):
"""Returns LCD type as a string, either monochrome or rgb"""
command = '$GE'
settings = self.sendCommand(command)
flags = int(settings[2], 16)
if flags & 0x0100:
lcdtype = 'monochrome'
else:
lcdtype = 'rgb'
return lcdtype
def getGFISelfTestEnabled(self):
"""Returns True if enabled, False if disabled"""
command = '$GE'
settings = self.sendCommand(command)
flags = int(settings[2], 16)
return not (flags & 0x0200)
def getGFITripCount(self):
"""Returns GFI Trip Count, as integer"""
command = '$GF'
faults = self.sendCommand(command)
return faults[1]
def getNoGndTripCount(self):
"""Returns No Ground Trip Count, as integer"""
command = '$GF'
faults = self.sendCommand(command)
return faults[2]
def getStuckRelayTripCount(self):
"""Returns Stuck Relay Trip Count, as integer"""
command = '$GF'
faults = self.sendCommand(command)
return faults[3]
def getChargingCurrent(self):
"""Returns the charging current, in amps, or 0.0 of not charging"""
command = '$GG'
currentAndVoltage = self.sendCommand(command)
amps = float(currentAndVoltage[1])/1000
return amps
def getChargingVoltage(self):
"""Returns the charging voltage, in volts, or 0.0 of not charging"""
command = '$GG'
currentAndVoltage = self.sendCommand(command)
volts = float(currentAndVoltage[2])/1000
return volts
def getChargeLimit(self):
"""Returns the charge limit in kWh"""
command = '$GH'
limit = self.sendCommand(command)
return limit[1]
def getVoltMeterScaleFactor(self):
"""Returns the voltmeter scale factor, or 0 if there is no voltmeter"""
command = '$GM'
voltMeterSettings = self.sendCommand(command)
if voltMeterSettings[0] == 'NK':
return 0
else:
return voltMeterSettings[1]
def getVoltMeterOffset(self):
"""Returns the voltmeter offset, or 0 if there is no voltmeter"""
command = '$GM'
voltMeterSettings = self.sendCommand(command)
if voltMeterSettings[0] == 'NK':
return 0
else:
return voltMeterSettings[2]
def getAmbientThreshold(self):
"""Returns the ambient temperature threshold in degrees Celcius, or 0 if no Threshold is set"""
command = '$GO'
threshold = self.sendCommand(command)
if threshold[0] == 'NK':
return 0
else:
return float(threshold[1])/10
def getIRThreshold(self):
"""Returns the IR temperature threshold in degrees Celcius, or 0 if no Threshold is set"""
command = '$GO'
threshold = self.sendCommand(command)
if threshold[0] == 'NK':
return 0
else:
return float(threshold[2])/10
def getRTCTemperature(self):
"""Returns the temperature of the real time clock sensor (DS3231), in degrees Celcius, or 0.0 if sensor is not installed"""
command = '$GP'
temperature = self.sendCommand(command)
return float(temperature[1])/10
def getAmbientTemperature(self):
"""Returns the temperature of the ambient sensor (MCP9808), in degrees Celcius, or 0.0 if sensor is not installed"""
command = '$GP'
temperature = self.sendCommand(command)
return float(temperature[2])/10
def getIRTemperature(self):
"""Returns the temperature of the IR remote sensor (TMP007), in degrees Celcius, or 0.0 if sensor is not installed"""
command = '$GP'
temperature = self.sendCommand(command)
return float(temperature[3])/10
def getTime(self):
"""Get the RTC time. Returns a datetime object, or NULL if the clock is not set"""
command = '$GT'
time = self.sendCommand(command)
if time == ['OK','165', '165', '165', '165', '165', '85']:
return NULL
else:
return datetime.datetime(year = int(time[1])+2000,
month = int(time[2]),
day = int(time[3]),
hour = int(time[4]),
minute = int(time[5]),
second = int(time[6]))
def getUsageSession(self):
"""Get the energy usage for the current charging session. Returns the energy usage in Wh"""
command = '$GU'
usage = self.sendCommand(command)
return float(usage[1])/3600
def getUsageTotal(self):
"""Get the total energy usage. Returns the energy usage in Wh"""
command = '$GU'
usage = self.sendCommand(command)
return float(usage[2])
def getFirmwareVersion(self):
"""Returns the Firmware Version, as a string"""
command = '$GV'
version = self.sendCommand(command)
return version[1]
def getProtocolVersion(self):
"""Returns the Protocol Version, as a string"""
command = '$GV'
version = self.sendCommand(command)
return version[2]
|
miniconfig/python-openevse-wifi
|
openevsewifi/__init__.py
|
Charger.getSerialDebugEnabled
|
python
|
def getSerialDebugEnabled(self):
command = '$GE'
settings = self.sendCommand(command)
flags = int(settings[2], 16)
return not (flags & 0x0080)
|
Returns True if enabled, False if disabled
|
train
|
https://github.com/miniconfig/python-openevse-wifi/blob/42fabeae052a9f82092fa9220201413732e38bb4/openevsewifi/__init__.py#L141-L146
|
[
"def sendCommand(self, command):\n \"\"\"Sends a command through the web interface of the charger and parses the response\"\"\"\n data = { 'rapi' : command }\n full_url = self.url + urllib.parse.urlencode(data)\n data = urllib.request.urlopen(full_url)\n response = re.search('\\<p>>\\$(.+)\\<script', data.read().decode('utf-8'))\n if response == None:#If we are using version 1 - https://github.com/OpenEVSE/ESP8266_WiFi_v1.x/blob/master/OpenEVSE_RAPI_WiFi_ESP8266.ino#L357\n response = re.search('\\>\\>\\$(.+)\\<p>', data.read().decode('utf-8'))\n return response.group(1).split()\n"
] |
class Charger:
def __init__(self, host):
"""A connection to an OpenEVSE charging station equipped with the wifi kit."""
self.url = 'http://' + host + '/r?'
def sendCommand(self, command):
"""Sends a command through the web interface of the charger and parses the response"""
data = { 'rapi' : command }
full_url = self.url + urllib.parse.urlencode(data)
data = urllib.request.urlopen(full_url)
response = re.search('\<p>>\$(.+)\<script', data.read().decode('utf-8'))
if response == None:#If we are using version 1 - https://github.com/OpenEVSE/ESP8266_WiFi_v1.x/blob/master/OpenEVSE_RAPI_WiFi_ESP8266.ino#L357
response = re.search('\>\>\$(.+)\<p>', data.read().decode('utf-8'))
return response.group(1).split()
def getStatus(self):
"""Returns the charger's charge status, as a string"""
command = '$GS'
status = self.sendCommand(command)
return states[int(status[1])]
def getChargeTimeElapsed(self):
"""Returns the charge time elapsed (in seconds), or 0 if is not currently charging"""
command = '$GS'
status = self.sendCommand(command)
if int(status[1]) == 3:
return int(status[2])
else:
return 0
def getTimeLimit(self):
"""Returns the time limit in minutes or 0 if no limit is set"""
command = '$G3'
limit = self.sendCommand(command)
return int(limit[1])*15
def getAmmeterScaleFactor(self):
"""Returns the ammeter's current scale factor"""
command = '$GA'
settings = self.sendCommand(command)
return int(settings[1])
def getAmmeterOffset(self):
"""Returns the ammeter's current offset"""
command = '$GA'
settings = self.sendCommand(command)
return int(settings[2])
def getMinAmps(self):
"""Returns the capacity range minimum, in amps"""
command = '$GC'
caprange = self.sendCommand(command)
return int(caprange[1])
def getMaxAmps(self):
"""Returns the capacity range maximum, in amps"""
command = '$GC'
caprange = self.sendCommand(command)
return int(caprange[2])
def getCurrentCapacity(self):
"""Returns the current capacity, in amps"""
command = '$GE'
settings = self.sendCommand(command)
return int(settings[1])
def getServiceLevel(self):
"""Returns the service level"""
command = '$GE'
settings = self.sendCommand(command)
flags = int(settings[2], 16)
return (flags & 0x0001) + 1
def getDiodeCheckEnabled(self):
"""Returns True if enabled, False if disabled"""
command = '$GE'
settings = self.sendCommand(command)
flags = int(settings[2], 16)
return not (flags & 0x0002)
def getVentRequiredEnabled(self):
"""Returns True if enabled, False if disabled"""
command = '$GE'
settings = self.sendCommand(command)
flags = int(settings[2], 16)
return not (flags & 0x0004)
def getGroundCheckEnabled(self):
"""Returns True if enabled, False if disabled"""
command = '$GE'
settings = self.sendCommand(command)
flags = int(settings[2], 16)
return not (flags & 0x0008)
def getStuckRelayCheckEnabled(self):
"""Returns True if enabled, False if disabled"""
command = '$GE'
settings = self.sendCommand(command)
flags = int(settings[2], 16)
return not (flags & 0x0010)
def getAutoServiceLevelEnabled(self):
"""Returns True if enabled, False if disabled"""
command = '$GE'
settings = self.sendCommand(command)
flags = int(settings[2], 16)
return not (flags & 0x0020)
def getAutoStartEnabled(self):
"""Returns True if enabled, False if disabled"""
command = '$GE'
settings = self.sendCommand(command)
flags = int(settings[2], 16)
return not (flags & 0x0040)
def getLCDType(self):
"""Returns LCD type as a string, either monochrome or rgb"""
command = '$GE'
settings = self.sendCommand(command)
flags = int(settings[2], 16)
if flags & 0x0100:
lcdtype = 'monochrome'
else:
lcdtype = 'rgb'
return lcdtype
def getGFISelfTestEnabled(self):
"""Returns True if enabled, False if disabled"""
command = '$GE'
settings = self.sendCommand(command)
flags = int(settings[2], 16)
return not (flags & 0x0200)
def getGFITripCount(self):
"""Returns GFI Trip Count, as integer"""
command = '$GF'
faults = self.sendCommand(command)
return faults[1]
def getNoGndTripCount(self):
"""Returns No Ground Trip Count, as integer"""
command = '$GF'
faults = self.sendCommand(command)
return faults[2]
def getStuckRelayTripCount(self):
"""Returns Stuck Relay Trip Count, as integer"""
command = '$GF'
faults = self.sendCommand(command)
return faults[3]
def getChargingCurrent(self):
"""Returns the charging current, in amps, or 0.0 of not charging"""
command = '$GG'
currentAndVoltage = self.sendCommand(command)
amps = float(currentAndVoltage[1])/1000
return amps
def getChargingVoltage(self):
"""Returns the charging voltage, in volts, or 0.0 of not charging"""
command = '$GG'
currentAndVoltage = self.sendCommand(command)
volts = float(currentAndVoltage[2])/1000
return volts
def getChargeLimit(self):
"""Returns the charge limit in kWh"""
command = '$GH'
limit = self.sendCommand(command)
return limit[1]
def getVoltMeterScaleFactor(self):
"""Returns the voltmeter scale factor, or 0 if there is no voltmeter"""
command = '$GM'
voltMeterSettings = self.sendCommand(command)
if voltMeterSettings[0] == 'NK':
return 0
else:
return voltMeterSettings[1]
def getVoltMeterOffset(self):
"""Returns the voltmeter offset, or 0 if there is no voltmeter"""
command = '$GM'
voltMeterSettings = self.sendCommand(command)
if voltMeterSettings[0] == 'NK':
return 0
else:
return voltMeterSettings[2]
def getAmbientThreshold(self):
"""Returns the ambient temperature threshold in degrees Celcius, or 0 if no Threshold is set"""
command = '$GO'
threshold = self.sendCommand(command)
if threshold[0] == 'NK':
return 0
else:
return float(threshold[1])/10
def getIRThreshold(self):
"""Returns the IR temperature threshold in degrees Celcius, or 0 if no Threshold is set"""
command = '$GO'
threshold = self.sendCommand(command)
if threshold[0] == 'NK':
return 0
else:
return float(threshold[2])/10
def getRTCTemperature(self):
"""Returns the temperature of the real time clock sensor (DS3231), in degrees Celcius, or 0.0 if sensor is not installed"""
command = '$GP'
temperature = self.sendCommand(command)
return float(temperature[1])/10
def getAmbientTemperature(self):
"""Returns the temperature of the ambient sensor (MCP9808), in degrees Celcius, or 0.0 if sensor is not installed"""
command = '$GP'
temperature = self.sendCommand(command)
return float(temperature[2])/10
def getIRTemperature(self):
"""Returns the temperature of the IR remote sensor (TMP007), in degrees Celcius, or 0.0 if sensor is not installed"""
command = '$GP'
temperature = self.sendCommand(command)
return float(temperature[3])/10
def getTime(self):
"""Get the RTC time. Returns a datetime object, or NULL if the clock is not set"""
command = '$GT'
time = self.sendCommand(command)
if time == ['OK','165', '165', '165', '165', '165', '85']:
return NULL
else:
return datetime.datetime(year = int(time[1])+2000,
month = int(time[2]),
day = int(time[3]),
hour = int(time[4]),
minute = int(time[5]),
second = int(time[6]))
def getUsageSession(self):
"""Get the energy usage for the current charging session. Returns the energy usage in Wh"""
command = '$GU'
usage = self.sendCommand(command)
return float(usage[1])/3600
def getUsageTotal(self):
"""Get the total energy usage. Returns the energy usage in Wh"""
command = '$GU'
usage = self.sendCommand(command)
return float(usage[2])
def getFirmwareVersion(self):
"""Returns the Firmware Version, as a string"""
command = '$GV'
version = self.sendCommand(command)
return version[1]
def getProtocolVersion(self):
"""Returns the Protocol Version, as a string"""
command = '$GV'
version = self.sendCommand(command)
return version[2]
|
miniconfig/python-openevse-wifi
|
openevsewifi/__init__.py
|
Charger.getLCDType
|
python
|
def getLCDType(self):
command = '$GE'
settings = self.sendCommand(command)
flags = int(settings[2], 16)
if flags & 0x0100:
lcdtype = 'monochrome'
else:
lcdtype = 'rgb'
return lcdtype
|
Returns LCD type as a string, either monochrome or rgb
|
train
|
https://github.com/miniconfig/python-openevse-wifi/blob/42fabeae052a9f82092fa9220201413732e38bb4/openevsewifi/__init__.py#L148-L157
|
[
"def sendCommand(self, command):\n \"\"\"Sends a command through the web interface of the charger and parses the response\"\"\"\n data = { 'rapi' : command }\n full_url = self.url + urllib.parse.urlencode(data)\n data = urllib.request.urlopen(full_url)\n response = re.search('\\<p>>\\$(.+)\\<script', data.read().decode('utf-8'))\n if response == None:#If we are using version 1 - https://github.com/OpenEVSE/ESP8266_WiFi_v1.x/blob/master/OpenEVSE_RAPI_WiFi_ESP8266.ino#L357\n response = re.search('\\>\\>\\$(.+)\\<p>', data.read().decode('utf-8'))\n return response.group(1).split()\n"
] |
class Charger:
def __init__(self, host):
"""A connection to an OpenEVSE charging station equipped with the wifi kit."""
self.url = 'http://' + host + '/r?'
def sendCommand(self, command):
"""Sends a command through the web interface of the charger and parses the response"""
data = { 'rapi' : command }
full_url = self.url + urllib.parse.urlencode(data)
data = urllib.request.urlopen(full_url)
response = re.search('\<p>>\$(.+)\<script', data.read().decode('utf-8'))
if response == None:#If we are using version 1 - https://github.com/OpenEVSE/ESP8266_WiFi_v1.x/blob/master/OpenEVSE_RAPI_WiFi_ESP8266.ino#L357
response = re.search('\>\>\$(.+)\<p>', data.read().decode('utf-8'))
return response.group(1).split()
def getStatus(self):
"""Returns the charger's charge status, as a string"""
command = '$GS'
status = self.sendCommand(command)
return states[int(status[1])]
def getChargeTimeElapsed(self):
"""Returns the charge time elapsed (in seconds), or 0 if is not currently charging"""
command = '$GS'
status = self.sendCommand(command)
if int(status[1]) == 3:
return int(status[2])
else:
return 0
def getTimeLimit(self):
"""Returns the time limit in minutes or 0 if no limit is set"""
command = '$G3'
limit = self.sendCommand(command)
return int(limit[1])*15
def getAmmeterScaleFactor(self):
"""Returns the ammeter's current scale factor"""
command = '$GA'
settings = self.sendCommand(command)
return int(settings[1])
def getAmmeterOffset(self):
"""Returns the ammeter's current offset"""
command = '$GA'
settings = self.sendCommand(command)
return int(settings[2])
def getMinAmps(self):
"""Returns the capacity range minimum, in amps"""
command = '$GC'
caprange = self.sendCommand(command)
return int(caprange[1])
def getMaxAmps(self):
"""Returns the capacity range maximum, in amps"""
command = '$GC'
caprange = self.sendCommand(command)
return int(caprange[2])
def getCurrentCapacity(self):
"""Returns the current capacity, in amps"""
command = '$GE'
settings = self.sendCommand(command)
return int(settings[1])
def getServiceLevel(self):
"""Returns the service level"""
command = '$GE'
settings = self.sendCommand(command)
flags = int(settings[2], 16)
return (flags & 0x0001) + 1
def getDiodeCheckEnabled(self):
"""Returns True if enabled, False if disabled"""
command = '$GE'
settings = self.sendCommand(command)
flags = int(settings[2], 16)
return not (flags & 0x0002)
def getVentRequiredEnabled(self):
"""Returns True if enabled, False if disabled"""
command = '$GE'
settings = self.sendCommand(command)
flags = int(settings[2], 16)
return not (flags & 0x0004)
def getGroundCheckEnabled(self):
"""Returns True if enabled, False if disabled"""
command = '$GE'
settings = self.sendCommand(command)
flags = int(settings[2], 16)
return not (flags & 0x0008)
def getStuckRelayCheckEnabled(self):
"""Returns True if enabled, False if disabled"""
command = '$GE'
settings = self.sendCommand(command)
flags = int(settings[2], 16)
return not (flags & 0x0010)
def getAutoServiceLevelEnabled(self):
"""Returns True if enabled, False if disabled"""
command = '$GE'
settings = self.sendCommand(command)
flags = int(settings[2], 16)
return not (flags & 0x0020)
def getAutoStartEnabled(self):
"""Returns True if enabled, False if disabled"""
command = '$GE'
settings = self.sendCommand(command)
flags = int(settings[2], 16)
return not (flags & 0x0040)
def getSerialDebugEnabled(self):
"""Returns True if enabled, False if disabled"""
command = '$GE'
settings = self.sendCommand(command)
flags = int(settings[2], 16)
return not (flags & 0x0080)
def getGFISelfTestEnabled(self):
"""Returns True if enabled, False if disabled"""
command = '$GE'
settings = self.sendCommand(command)
flags = int(settings[2], 16)
return not (flags & 0x0200)
def getGFITripCount(self):
"""Returns GFI Trip Count, as integer"""
command = '$GF'
faults = self.sendCommand(command)
return faults[1]
def getNoGndTripCount(self):
"""Returns No Ground Trip Count, as integer"""
command = '$GF'
faults = self.sendCommand(command)
return faults[2]
def getStuckRelayTripCount(self):
"""Returns Stuck Relay Trip Count, as integer"""
command = '$GF'
faults = self.sendCommand(command)
return faults[3]
def getChargingCurrent(self):
"""Returns the charging current, in amps, or 0.0 of not charging"""
command = '$GG'
currentAndVoltage = self.sendCommand(command)
amps = float(currentAndVoltage[1])/1000
return amps
def getChargingVoltage(self):
"""Returns the charging voltage, in volts, or 0.0 of not charging"""
command = '$GG'
currentAndVoltage = self.sendCommand(command)
volts = float(currentAndVoltage[2])/1000
return volts
def getChargeLimit(self):
"""Returns the charge limit in kWh"""
command = '$GH'
limit = self.sendCommand(command)
return limit[1]
def getVoltMeterScaleFactor(self):
"""Returns the voltmeter scale factor, or 0 if there is no voltmeter"""
command = '$GM'
voltMeterSettings = self.sendCommand(command)
if voltMeterSettings[0] == 'NK':
return 0
else:
return voltMeterSettings[1]
def getVoltMeterOffset(self):
"""Returns the voltmeter offset, or 0 if there is no voltmeter"""
command = '$GM'
voltMeterSettings = self.sendCommand(command)
if voltMeterSettings[0] == 'NK':
return 0
else:
return voltMeterSettings[2]
def getAmbientThreshold(self):
"""Returns the ambient temperature threshold in degrees Celcius, or 0 if no Threshold is set"""
command = '$GO'
threshold = self.sendCommand(command)
if threshold[0] == 'NK':
return 0
else:
return float(threshold[1])/10
def getIRThreshold(self):
"""Returns the IR temperature threshold in degrees Celcius, or 0 if no Threshold is set"""
command = '$GO'
threshold = self.sendCommand(command)
if threshold[0] == 'NK':
return 0
else:
return float(threshold[2])/10
def getRTCTemperature(self):
"""Returns the temperature of the real time clock sensor (DS3231), in degrees Celcius, or 0.0 if sensor is not installed"""
command = '$GP'
temperature = self.sendCommand(command)
return float(temperature[1])/10
def getAmbientTemperature(self):
"""Returns the temperature of the ambient sensor (MCP9808), in degrees Celcius, or 0.0 if sensor is not installed"""
command = '$GP'
temperature = self.sendCommand(command)
return float(temperature[2])/10
def getIRTemperature(self):
"""Returns the temperature of the IR remote sensor (TMP007), in degrees Celcius, or 0.0 if sensor is not installed"""
command = '$GP'
temperature = self.sendCommand(command)
return float(temperature[3])/10
def getTime(self):
"""Get the RTC time. Returns a datetime object, or NULL if the clock is not set"""
command = '$GT'
time = self.sendCommand(command)
if time == ['OK','165', '165', '165', '165', '165', '85']:
return NULL
else:
return datetime.datetime(year = int(time[1])+2000,
month = int(time[2]),
day = int(time[3]),
hour = int(time[4]),
minute = int(time[5]),
second = int(time[6]))
def getUsageSession(self):
"""Get the energy usage for the current charging session. Returns the energy usage in Wh"""
command = '$GU'
usage = self.sendCommand(command)
return float(usage[1])/3600
def getUsageTotal(self):
"""Get the total energy usage. Returns the energy usage in Wh"""
command = '$GU'
usage = self.sendCommand(command)
return float(usage[2])
def getFirmwareVersion(self):
"""Returns the Firmware Version, as a string"""
command = '$GV'
version = self.sendCommand(command)
return version[1]
def getProtocolVersion(self):
"""Returns the Protocol Version, as a string"""
command = '$GV'
version = self.sendCommand(command)
return version[2]
|
miniconfig/python-openevse-wifi
|
openevsewifi/__init__.py
|
Charger.getChargingCurrent
|
python
|
def getChargingCurrent(self):
command = '$GG'
currentAndVoltage = self.sendCommand(command)
amps = float(currentAndVoltage[1])/1000
return amps
|
Returns the charging current, in amps, or 0.0 of not charging
|
train
|
https://github.com/miniconfig/python-openevse-wifi/blob/42fabeae052a9f82092fa9220201413732e38bb4/openevsewifi/__init__.py#L184-L189
|
[
"def sendCommand(self, command):\n \"\"\"Sends a command through the web interface of the charger and parses the response\"\"\"\n data = { 'rapi' : command }\n full_url = self.url + urllib.parse.urlencode(data)\n data = urllib.request.urlopen(full_url)\n response = re.search('\\<p>>\\$(.+)\\<script', data.read().decode('utf-8'))\n if response == None:#If we are using version 1 - https://github.com/OpenEVSE/ESP8266_WiFi_v1.x/blob/master/OpenEVSE_RAPI_WiFi_ESP8266.ino#L357\n response = re.search('\\>\\>\\$(.+)\\<p>', data.read().decode('utf-8'))\n return response.group(1).split()\n"
] |
class Charger:
def __init__(self, host):
"""A connection to an OpenEVSE charging station equipped with the wifi kit."""
self.url = 'http://' + host + '/r?'
def sendCommand(self, command):
"""Sends a command through the web interface of the charger and parses the response"""
data = { 'rapi' : command }
full_url = self.url + urllib.parse.urlencode(data)
data = urllib.request.urlopen(full_url)
response = re.search('\<p>>\$(.+)\<script', data.read().decode('utf-8'))
if response == None:#If we are using version 1 - https://github.com/OpenEVSE/ESP8266_WiFi_v1.x/blob/master/OpenEVSE_RAPI_WiFi_ESP8266.ino#L357
response = re.search('\>\>\$(.+)\<p>', data.read().decode('utf-8'))
return response.group(1).split()
def getStatus(self):
"""Returns the charger's charge status, as a string"""
command = '$GS'
status = self.sendCommand(command)
return states[int(status[1])]
def getChargeTimeElapsed(self):
"""Returns the charge time elapsed (in seconds), or 0 if is not currently charging"""
command = '$GS'
status = self.sendCommand(command)
if int(status[1]) == 3:
return int(status[2])
else:
return 0
def getTimeLimit(self):
"""Returns the time limit in minutes or 0 if no limit is set"""
command = '$G3'
limit = self.sendCommand(command)
return int(limit[1])*15
def getAmmeterScaleFactor(self):
"""Returns the ammeter's current scale factor"""
command = '$GA'
settings = self.sendCommand(command)
return int(settings[1])
def getAmmeterOffset(self):
"""Returns the ammeter's current offset"""
command = '$GA'
settings = self.sendCommand(command)
return int(settings[2])
def getMinAmps(self):
"""Returns the capacity range minimum, in amps"""
command = '$GC'
caprange = self.sendCommand(command)
return int(caprange[1])
def getMaxAmps(self):
"""Returns the capacity range maximum, in amps"""
command = '$GC'
caprange = self.sendCommand(command)
return int(caprange[2])
def getCurrentCapacity(self):
"""Returns the current capacity, in amps"""
command = '$GE'
settings = self.sendCommand(command)
return int(settings[1])
def getServiceLevel(self):
"""Returns the service level"""
command = '$GE'
settings = self.sendCommand(command)
flags = int(settings[2], 16)
return (flags & 0x0001) + 1
def getDiodeCheckEnabled(self):
"""Returns True if enabled, False if disabled"""
command = '$GE'
settings = self.sendCommand(command)
flags = int(settings[2], 16)
return not (flags & 0x0002)
def getVentRequiredEnabled(self):
"""Returns True if enabled, False if disabled"""
command = '$GE'
settings = self.sendCommand(command)
flags = int(settings[2], 16)
return not (flags & 0x0004)
def getGroundCheckEnabled(self):
"""Returns True if enabled, False if disabled"""
command = '$GE'
settings = self.sendCommand(command)
flags = int(settings[2], 16)
return not (flags & 0x0008)
def getStuckRelayCheckEnabled(self):
"""Returns True if enabled, False if disabled"""
command = '$GE'
settings = self.sendCommand(command)
flags = int(settings[2], 16)
return not (flags & 0x0010)
def getAutoServiceLevelEnabled(self):
"""Returns True if enabled, False if disabled"""
command = '$GE'
settings = self.sendCommand(command)
flags = int(settings[2], 16)
return not (flags & 0x0020)
def getAutoStartEnabled(self):
"""Returns True if enabled, False if disabled"""
command = '$GE'
settings = self.sendCommand(command)
flags = int(settings[2], 16)
return not (flags & 0x0040)
def getSerialDebugEnabled(self):
"""Returns True if enabled, False if disabled"""
command = '$GE'
settings = self.sendCommand(command)
flags = int(settings[2], 16)
return not (flags & 0x0080)
def getLCDType(self):
"""Returns LCD type as a string, either monochrome or rgb"""
command = '$GE'
settings = self.sendCommand(command)
flags = int(settings[2], 16)
if flags & 0x0100:
lcdtype = 'monochrome'
else:
lcdtype = 'rgb'
return lcdtype
def getGFISelfTestEnabled(self):
"""Returns True if enabled, False if disabled"""
command = '$GE'
settings = self.sendCommand(command)
flags = int(settings[2], 16)
return not (flags & 0x0200)
def getGFITripCount(self):
"""Returns GFI Trip Count, as integer"""
command = '$GF'
faults = self.sendCommand(command)
return faults[1]
def getNoGndTripCount(self):
"""Returns No Ground Trip Count, as integer"""
command = '$GF'
faults = self.sendCommand(command)
return faults[2]
def getStuckRelayTripCount(self):
"""Returns Stuck Relay Trip Count, as integer"""
command = '$GF'
faults = self.sendCommand(command)
return faults[3]
def getChargingVoltage(self):
"""Returns the charging voltage, in volts, or 0.0 of not charging"""
command = '$GG'
currentAndVoltage = self.sendCommand(command)
volts = float(currentAndVoltage[2])/1000
return volts
def getChargeLimit(self):
"""Returns the charge limit in kWh"""
command = '$GH'
limit = self.sendCommand(command)
return limit[1]
def getVoltMeterScaleFactor(self):
"""Returns the voltmeter scale factor, or 0 if there is no voltmeter"""
command = '$GM'
voltMeterSettings = self.sendCommand(command)
if voltMeterSettings[0] == 'NK':
return 0
else:
return voltMeterSettings[1]
def getVoltMeterOffset(self):
"""Returns the voltmeter offset, or 0 if there is no voltmeter"""
command = '$GM'
voltMeterSettings = self.sendCommand(command)
if voltMeterSettings[0] == 'NK':
return 0
else:
return voltMeterSettings[2]
def getAmbientThreshold(self):
"""Returns the ambient temperature threshold in degrees Celcius, or 0 if no Threshold is set"""
command = '$GO'
threshold = self.sendCommand(command)
if threshold[0] == 'NK':
return 0
else:
return float(threshold[1])/10
def getIRThreshold(self):
"""Returns the IR temperature threshold in degrees Celcius, or 0 if no Threshold is set"""
command = '$GO'
threshold = self.sendCommand(command)
if threshold[0] == 'NK':
return 0
else:
return float(threshold[2])/10
def getRTCTemperature(self):
"""Returns the temperature of the real time clock sensor (DS3231), in degrees Celcius, or 0.0 if sensor is not installed"""
command = '$GP'
temperature = self.sendCommand(command)
return float(temperature[1])/10
def getAmbientTemperature(self):
"""Returns the temperature of the ambient sensor (MCP9808), in degrees Celcius, or 0.0 if sensor is not installed"""
command = '$GP'
temperature = self.sendCommand(command)
return float(temperature[2])/10
def getIRTemperature(self):
"""Returns the temperature of the IR remote sensor (TMP007), in degrees Celcius, or 0.0 if sensor is not installed"""
command = '$GP'
temperature = self.sendCommand(command)
return float(temperature[3])/10
def getTime(self):
"""Get the RTC time. Returns a datetime object, or NULL if the clock is not set"""
command = '$GT'
time = self.sendCommand(command)
if time == ['OK','165', '165', '165', '165', '165', '85']:
return NULL
else:
return datetime.datetime(year = int(time[1])+2000,
month = int(time[2]),
day = int(time[3]),
hour = int(time[4]),
minute = int(time[5]),
second = int(time[6]))
def getUsageSession(self):
"""Get the energy usage for the current charging session. Returns the energy usage in Wh"""
command = '$GU'
usage = self.sendCommand(command)
return float(usage[1])/3600
def getUsageTotal(self):
"""Get the total energy usage. Returns the energy usage in Wh"""
command = '$GU'
usage = self.sendCommand(command)
return float(usage[2])
def getFirmwareVersion(self):
"""Returns the Firmware Version, as a string"""
command = '$GV'
version = self.sendCommand(command)
return version[1]
def getProtocolVersion(self):
"""Returns the Protocol Version, as a string"""
command = '$GV'
version = self.sendCommand(command)
return version[2]
|
miniconfig/python-openevse-wifi
|
openevsewifi/__init__.py
|
Charger.getChargingVoltage
|
python
|
def getChargingVoltage(self):
command = '$GG'
currentAndVoltage = self.sendCommand(command)
volts = float(currentAndVoltage[2])/1000
return volts
|
Returns the charging voltage, in volts, or 0.0 of not charging
|
train
|
https://github.com/miniconfig/python-openevse-wifi/blob/42fabeae052a9f82092fa9220201413732e38bb4/openevsewifi/__init__.py#L191-L196
|
[
"def sendCommand(self, command):\n \"\"\"Sends a command through the web interface of the charger and parses the response\"\"\"\n data = { 'rapi' : command }\n full_url = self.url + urllib.parse.urlencode(data)\n data = urllib.request.urlopen(full_url)\n response = re.search('\\<p>>\\$(.+)\\<script', data.read().decode('utf-8'))\n if response == None:#If we are using version 1 - https://github.com/OpenEVSE/ESP8266_WiFi_v1.x/blob/master/OpenEVSE_RAPI_WiFi_ESP8266.ino#L357\n response = re.search('\\>\\>\\$(.+)\\<p>', data.read().decode('utf-8'))\n return response.group(1).split()\n"
] |
class Charger:
def __init__(self, host):
"""A connection to an OpenEVSE charging station equipped with the wifi kit."""
self.url = 'http://' + host + '/r?'
def sendCommand(self, command):
"""Sends a command through the web interface of the charger and parses the response"""
data = { 'rapi' : command }
full_url = self.url + urllib.parse.urlencode(data)
data = urllib.request.urlopen(full_url)
response = re.search('\<p>>\$(.+)\<script', data.read().decode('utf-8'))
if response == None:#If we are using version 1 - https://github.com/OpenEVSE/ESP8266_WiFi_v1.x/blob/master/OpenEVSE_RAPI_WiFi_ESP8266.ino#L357
response = re.search('\>\>\$(.+)\<p>', data.read().decode('utf-8'))
return response.group(1).split()
def getStatus(self):
"""Returns the charger's charge status, as a string"""
command = '$GS'
status = self.sendCommand(command)
return states[int(status[1])]
def getChargeTimeElapsed(self):
"""Returns the charge time elapsed (in seconds), or 0 if is not currently charging"""
command = '$GS'
status = self.sendCommand(command)
if int(status[1]) == 3:
return int(status[2])
else:
return 0
def getTimeLimit(self):
"""Returns the time limit in minutes or 0 if no limit is set"""
command = '$G3'
limit = self.sendCommand(command)
return int(limit[1])*15
def getAmmeterScaleFactor(self):
"""Returns the ammeter's current scale factor"""
command = '$GA'
settings = self.sendCommand(command)
return int(settings[1])
def getAmmeterOffset(self):
"""Returns the ammeter's current offset"""
command = '$GA'
settings = self.sendCommand(command)
return int(settings[2])
def getMinAmps(self):
"""Returns the capacity range minimum, in amps"""
command = '$GC'
caprange = self.sendCommand(command)
return int(caprange[1])
def getMaxAmps(self):
"""Returns the capacity range maximum, in amps"""
command = '$GC'
caprange = self.sendCommand(command)
return int(caprange[2])
def getCurrentCapacity(self):
"""Returns the current capacity, in amps"""
command = '$GE'
settings = self.sendCommand(command)
return int(settings[1])
def getServiceLevel(self):
"""Returns the service level"""
command = '$GE'
settings = self.sendCommand(command)
flags = int(settings[2], 16)
return (flags & 0x0001) + 1
def getDiodeCheckEnabled(self):
"""Returns True if enabled, False if disabled"""
command = '$GE'
settings = self.sendCommand(command)
flags = int(settings[2], 16)
return not (flags & 0x0002)
def getVentRequiredEnabled(self):
"""Returns True if enabled, False if disabled"""
command = '$GE'
settings = self.sendCommand(command)
flags = int(settings[2], 16)
return not (flags & 0x0004)
def getGroundCheckEnabled(self):
"""Returns True if enabled, False if disabled"""
command = '$GE'
settings = self.sendCommand(command)
flags = int(settings[2], 16)
return not (flags & 0x0008)
def getStuckRelayCheckEnabled(self):
"""Returns True if enabled, False if disabled"""
command = '$GE'
settings = self.sendCommand(command)
flags = int(settings[2], 16)
return not (flags & 0x0010)
def getAutoServiceLevelEnabled(self):
"""Returns True if enabled, False if disabled"""
command = '$GE'
settings = self.sendCommand(command)
flags = int(settings[2], 16)
return not (flags & 0x0020)
def getAutoStartEnabled(self):
"""Returns True if enabled, False if disabled"""
command = '$GE'
settings = self.sendCommand(command)
flags = int(settings[2], 16)
return not (flags & 0x0040)
def getSerialDebugEnabled(self):
"""Returns True if enabled, False if disabled"""
command = '$GE'
settings = self.sendCommand(command)
flags = int(settings[2], 16)
return not (flags & 0x0080)
def getLCDType(self):
"""Returns LCD type as a string, either monochrome or rgb"""
command = '$GE'
settings = self.sendCommand(command)
flags = int(settings[2], 16)
if flags & 0x0100:
lcdtype = 'monochrome'
else:
lcdtype = 'rgb'
return lcdtype
def getGFISelfTestEnabled(self):
"""Returns True if enabled, False if disabled"""
command = '$GE'
settings = self.sendCommand(command)
flags = int(settings[2], 16)
return not (flags & 0x0200)
def getGFITripCount(self):
"""Returns GFI Trip Count, as integer"""
command = '$GF'
faults = self.sendCommand(command)
return faults[1]
def getNoGndTripCount(self):
"""Returns No Ground Trip Count, as integer"""
command = '$GF'
faults = self.sendCommand(command)
return faults[2]
def getStuckRelayTripCount(self):
"""Returns Stuck Relay Trip Count, as integer"""
command = '$GF'
faults = self.sendCommand(command)
return faults[3]
def getChargingCurrent(self):
"""Returns the charging current, in amps, or 0.0 of not charging"""
command = '$GG'
currentAndVoltage = self.sendCommand(command)
amps = float(currentAndVoltage[1])/1000
return amps
def getChargeLimit(self):
"""Returns the charge limit in kWh"""
command = '$GH'
limit = self.sendCommand(command)
return limit[1]
def getVoltMeterScaleFactor(self):
"""Returns the voltmeter scale factor, or 0 if there is no voltmeter"""
command = '$GM'
voltMeterSettings = self.sendCommand(command)
if voltMeterSettings[0] == 'NK':
return 0
else:
return voltMeterSettings[1]
def getVoltMeterOffset(self):
"""Returns the voltmeter offset, or 0 if there is no voltmeter"""
command = '$GM'
voltMeterSettings = self.sendCommand(command)
if voltMeterSettings[0] == 'NK':
return 0
else:
return voltMeterSettings[2]
def getAmbientThreshold(self):
"""Returns the ambient temperature threshold in degrees Celcius, or 0 if no Threshold is set"""
command = '$GO'
threshold = self.sendCommand(command)
if threshold[0] == 'NK':
return 0
else:
return float(threshold[1])/10
def getIRThreshold(self):
"""Returns the IR temperature threshold in degrees Celcius, or 0 if no Threshold is set"""
command = '$GO'
threshold = self.sendCommand(command)
if threshold[0] == 'NK':
return 0
else:
return float(threshold[2])/10
def getRTCTemperature(self):
"""Returns the temperature of the real time clock sensor (DS3231), in degrees Celcius, or 0.0 if sensor is not installed"""
command = '$GP'
temperature = self.sendCommand(command)
return float(temperature[1])/10
def getAmbientTemperature(self):
"""Returns the temperature of the ambient sensor (MCP9808), in degrees Celcius, or 0.0 if sensor is not installed"""
command = '$GP'
temperature = self.sendCommand(command)
return float(temperature[2])/10
def getIRTemperature(self):
"""Returns the temperature of the IR remote sensor (TMP007), in degrees Celcius, or 0.0 if sensor is not installed"""
command = '$GP'
temperature = self.sendCommand(command)
return float(temperature[3])/10
def getTime(self):
"""Get the RTC time. Returns a datetime object, or NULL if the clock is not set"""
command = '$GT'
time = self.sendCommand(command)
if time == ['OK','165', '165', '165', '165', '165', '85']:
return NULL
else:
return datetime.datetime(year = int(time[1])+2000,
month = int(time[2]),
day = int(time[3]),
hour = int(time[4]),
minute = int(time[5]),
second = int(time[6]))
def getUsageSession(self):
"""Get the energy usage for the current charging session. Returns the energy usage in Wh"""
command = '$GU'
usage = self.sendCommand(command)
return float(usage[1])/3600
def getUsageTotal(self):
"""Get the total energy usage. Returns the energy usage in Wh"""
command = '$GU'
usage = self.sendCommand(command)
return float(usage[2])
def getFirmwareVersion(self):
"""Returns the Firmware Version, as a string"""
command = '$GV'
version = self.sendCommand(command)
return version[1]
def getProtocolVersion(self):
"""Returns the Protocol Version, as a string"""
command = '$GV'
version = self.sendCommand(command)
return version[2]
|
miniconfig/python-openevse-wifi
|
openevsewifi/__init__.py
|
Charger.getAmbientThreshold
|
python
|
def getAmbientThreshold(self):
command = '$GO'
threshold = self.sendCommand(command)
if threshold[0] == 'NK':
return 0
else:
return float(threshold[1])/10
|
Returns the ambient temperature threshold in degrees Celcius, or 0 if no Threshold is set
|
train
|
https://github.com/miniconfig/python-openevse-wifi/blob/42fabeae052a9f82092fa9220201413732e38bb4/openevsewifi/__init__.py#L222-L229
|
[
"def sendCommand(self, command):\n \"\"\"Sends a command through the web interface of the charger and parses the response\"\"\"\n data = { 'rapi' : command }\n full_url = self.url + urllib.parse.urlencode(data)\n data = urllib.request.urlopen(full_url)\n response = re.search('\\<p>>\\$(.+)\\<script', data.read().decode('utf-8'))\n if response == None:#If we are using version 1 - https://github.com/OpenEVSE/ESP8266_WiFi_v1.x/blob/master/OpenEVSE_RAPI_WiFi_ESP8266.ino#L357\n response = re.search('\\>\\>\\$(.+)\\<p>', data.read().decode('utf-8'))\n return response.group(1).split()\n"
] |
class Charger:
def __init__(self, host):
"""A connection to an OpenEVSE charging station equipped with the wifi kit."""
self.url = 'http://' + host + '/r?'
def sendCommand(self, command):
"""Sends a command through the web interface of the charger and parses the response"""
data = { 'rapi' : command }
full_url = self.url + urllib.parse.urlencode(data)
data = urllib.request.urlopen(full_url)
response = re.search('\<p>>\$(.+)\<script', data.read().decode('utf-8'))
if response == None:#If we are using version 1 - https://github.com/OpenEVSE/ESP8266_WiFi_v1.x/blob/master/OpenEVSE_RAPI_WiFi_ESP8266.ino#L357
response = re.search('\>\>\$(.+)\<p>', data.read().decode('utf-8'))
return response.group(1).split()
def getStatus(self):
"""Returns the charger's charge status, as a string"""
command = '$GS'
status = self.sendCommand(command)
return states[int(status[1])]
def getChargeTimeElapsed(self):
"""Returns the charge time elapsed (in seconds), or 0 if is not currently charging"""
command = '$GS'
status = self.sendCommand(command)
if int(status[1]) == 3:
return int(status[2])
else:
return 0
def getTimeLimit(self):
"""Returns the time limit in minutes or 0 if no limit is set"""
command = '$G3'
limit = self.sendCommand(command)
return int(limit[1])*15
def getAmmeterScaleFactor(self):
"""Returns the ammeter's current scale factor"""
command = '$GA'
settings = self.sendCommand(command)
return int(settings[1])
def getAmmeterOffset(self):
"""Returns the ammeter's current offset"""
command = '$GA'
settings = self.sendCommand(command)
return int(settings[2])
def getMinAmps(self):
"""Returns the capacity range minimum, in amps"""
command = '$GC'
caprange = self.sendCommand(command)
return int(caprange[1])
def getMaxAmps(self):
"""Returns the capacity range maximum, in amps"""
command = '$GC'
caprange = self.sendCommand(command)
return int(caprange[2])
def getCurrentCapacity(self):
"""Returns the current capacity, in amps"""
command = '$GE'
settings = self.sendCommand(command)
return int(settings[1])
def getServiceLevel(self):
"""Returns the service level"""
command = '$GE'
settings = self.sendCommand(command)
flags = int(settings[2], 16)
return (flags & 0x0001) + 1
def getDiodeCheckEnabled(self):
"""Returns True if enabled, False if disabled"""
command = '$GE'
settings = self.sendCommand(command)
flags = int(settings[2], 16)
return not (flags & 0x0002)
def getVentRequiredEnabled(self):
"""Returns True if enabled, False if disabled"""
command = '$GE'
settings = self.sendCommand(command)
flags = int(settings[2], 16)
return not (flags & 0x0004)
def getGroundCheckEnabled(self):
"""Returns True if enabled, False if disabled"""
command = '$GE'
settings = self.sendCommand(command)
flags = int(settings[2], 16)
return not (flags & 0x0008)
def getStuckRelayCheckEnabled(self):
"""Returns True if enabled, False if disabled"""
command = '$GE'
settings = self.sendCommand(command)
flags = int(settings[2], 16)
return not (flags & 0x0010)
def getAutoServiceLevelEnabled(self):
"""Returns True if enabled, False if disabled"""
command = '$GE'
settings = self.sendCommand(command)
flags = int(settings[2], 16)
return not (flags & 0x0020)
def getAutoStartEnabled(self):
"""Returns True if enabled, False if disabled"""
command = '$GE'
settings = self.sendCommand(command)
flags = int(settings[2], 16)
return not (flags & 0x0040)
def getSerialDebugEnabled(self):
"""Returns True if enabled, False if disabled"""
command = '$GE'
settings = self.sendCommand(command)
flags = int(settings[2], 16)
return not (flags & 0x0080)
def getLCDType(self):
"""Returns LCD type as a string, either monochrome or rgb"""
command = '$GE'
settings = self.sendCommand(command)
flags = int(settings[2], 16)
if flags & 0x0100:
lcdtype = 'monochrome'
else:
lcdtype = 'rgb'
return lcdtype
def getGFISelfTestEnabled(self):
"""Returns True if enabled, False if disabled"""
command = '$GE'
settings = self.sendCommand(command)
flags = int(settings[2], 16)
return not (flags & 0x0200)
def getGFITripCount(self):
"""Returns GFI Trip Count, as integer"""
command = '$GF'
faults = self.sendCommand(command)
return faults[1]
def getNoGndTripCount(self):
"""Returns No Ground Trip Count, as integer"""
command = '$GF'
faults = self.sendCommand(command)
return faults[2]
def getStuckRelayTripCount(self):
"""Returns Stuck Relay Trip Count, as integer"""
command = '$GF'
faults = self.sendCommand(command)
return faults[3]
def getChargingCurrent(self):
"""Returns the charging current, in amps, or 0.0 of not charging"""
command = '$GG'
currentAndVoltage = self.sendCommand(command)
amps = float(currentAndVoltage[1])/1000
return amps
def getChargingVoltage(self):
"""Returns the charging voltage, in volts, or 0.0 of not charging"""
command = '$GG'
currentAndVoltage = self.sendCommand(command)
volts = float(currentAndVoltage[2])/1000
return volts
def getChargeLimit(self):
"""Returns the charge limit in kWh"""
command = '$GH'
limit = self.sendCommand(command)
return limit[1]
def getVoltMeterScaleFactor(self):
"""Returns the voltmeter scale factor, or 0 if there is no voltmeter"""
command = '$GM'
voltMeterSettings = self.sendCommand(command)
if voltMeterSettings[0] == 'NK':
return 0
else:
return voltMeterSettings[1]
def getVoltMeterOffset(self):
"""Returns the voltmeter offset, or 0 if there is no voltmeter"""
command = '$GM'
voltMeterSettings = self.sendCommand(command)
if voltMeterSettings[0] == 'NK':
return 0
else:
return voltMeterSettings[2]
def getIRThreshold(self):
"""Returns the IR temperature threshold in degrees Celcius, or 0 if no Threshold is set"""
command = '$GO'
threshold = self.sendCommand(command)
if threshold[0] == 'NK':
return 0
else:
return float(threshold[2])/10
def getRTCTemperature(self):
"""Returns the temperature of the real time clock sensor (DS3231), in degrees Celcius, or 0.0 if sensor is not installed"""
command = '$GP'
temperature = self.sendCommand(command)
return float(temperature[1])/10
def getAmbientTemperature(self):
"""Returns the temperature of the ambient sensor (MCP9808), in degrees Celcius, or 0.0 if sensor is not installed"""
command = '$GP'
temperature = self.sendCommand(command)
return float(temperature[2])/10
def getIRTemperature(self):
"""Returns the temperature of the IR remote sensor (TMP007), in degrees Celcius, or 0.0 if sensor is not installed"""
command = '$GP'
temperature = self.sendCommand(command)
return float(temperature[3])/10
def getTime(self):
"""Get the RTC time. Returns a datetime object, or NULL if the clock is not set"""
command = '$GT'
time = self.sendCommand(command)
if time == ['OK','165', '165', '165', '165', '165', '85']:
return NULL
else:
return datetime.datetime(year = int(time[1])+2000,
month = int(time[2]),
day = int(time[3]),
hour = int(time[4]),
minute = int(time[5]),
second = int(time[6]))
def getUsageSession(self):
"""Get the energy usage for the current charging session. Returns the energy usage in Wh"""
command = '$GU'
usage = self.sendCommand(command)
return float(usage[1])/3600
def getUsageTotal(self):
"""Get the total energy usage. Returns the energy usage in Wh"""
command = '$GU'
usage = self.sendCommand(command)
return float(usage[2])
def getFirmwareVersion(self):
"""Returns the Firmware Version, as a string"""
command = '$GV'
version = self.sendCommand(command)
return version[1]
def getProtocolVersion(self):
"""Returns the Protocol Version, as a string"""
command = '$GV'
version = self.sendCommand(command)
return version[2]
|
miniconfig/python-openevse-wifi
|
openevsewifi/__init__.py
|
Charger.getIRThreshold
|
python
|
def getIRThreshold(self):
command = '$GO'
threshold = self.sendCommand(command)
if threshold[0] == 'NK':
return 0
else:
return float(threshold[2])/10
|
Returns the IR temperature threshold in degrees Celcius, or 0 if no Threshold is set
|
train
|
https://github.com/miniconfig/python-openevse-wifi/blob/42fabeae052a9f82092fa9220201413732e38bb4/openevsewifi/__init__.py#L231-L238
|
[
"def sendCommand(self, command):\n \"\"\"Sends a command through the web interface of the charger and parses the response\"\"\"\n data = { 'rapi' : command }\n full_url = self.url + urllib.parse.urlencode(data)\n data = urllib.request.urlopen(full_url)\n response = re.search('\\<p>>\\$(.+)\\<script', data.read().decode('utf-8'))\n if response == None:#If we are using version 1 - https://github.com/OpenEVSE/ESP8266_WiFi_v1.x/blob/master/OpenEVSE_RAPI_WiFi_ESP8266.ino#L357\n response = re.search('\\>\\>\\$(.+)\\<p>', data.read().decode('utf-8'))\n return response.group(1).split()\n"
] |
class Charger:
def __init__(self, host):
"""A connection to an OpenEVSE charging station equipped with the wifi kit."""
self.url = 'http://' + host + '/r?'
def sendCommand(self, command):
"""Sends a command through the web interface of the charger and parses the response"""
data = { 'rapi' : command }
full_url = self.url + urllib.parse.urlencode(data)
data = urllib.request.urlopen(full_url)
response = re.search('\<p>>\$(.+)\<script', data.read().decode('utf-8'))
if response == None:#If we are using version 1 - https://github.com/OpenEVSE/ESP8266_WiFi_v1.x/blob/master/OpenEVSE_RAPI_WiFi_ESP8266.ino#L357
response = re.search('\>\>\$(.+)\<p>', data.read().decode('utf-8'))
return response.group(1).split()
def getStatus(self):
"""Returns the charger's charge status, as a string"""
command = '$GS'
status = self.sendCommand(command)
return states[int(status[1])]
def getChargeTimeElapsed(self):
"""Returns the charge time elapsed (in seconds), or 0 if is not currently charging"""
command = '$GS'
status = self.sendCommand(command)
if int(status[1]) == 3:
return int(status[2])
else:
return 0
def getTimeLimit(self):
"""Returns the time limit in minutes or 0 if no limit is set"""
command = '$G3'
limit = self.sendCommand(command)
return int(limit[1])*15
def getAmmeterScaleFactor(self):
"""Returns the ammeter's current scale factor"""
command = '$GA'
settings = self.sendCommand(command)
return int(settings[1])
def getAmmeterOffset(self):
"""Returns the ammeter's current offset"""
command = '$GA'
settings = self.sendCommand(command)
return int(settings[2])
def getMinAmps(self):
"""Returns the capacity range minimum, in amps"""
command = '$GC'
caprange = self.sendCommand(command)
return int(caprange[1])
def getMaxAmps(self):
"""Returns the capacity range maximum, in amps"""
command = '$GC'
caprange = self.sendCommand(command)
return int(caprange[2])
def getCurrentCapacity(self):
"""Returns the current capacity, in amps"""
command = '$GE'
settings = self.sendCommand(command)
return int(settings[1])
def getServiceLevel(self):
"""Returns the service level"""
command = '$GE'
settings = self.sendCommand(command)
flags = int(settings[2], 16)
return (flags & 0x0001) + 1
def getDiodeCheckEnabled(self):
"""Returns True if enabled, False if disabled"""
command = '$GE'
settings = self.sendCommand(command)
flags = int(settings[2], 16)
return not (flags & 0x0002)
def getVentRequiredEnabled(self):
"""Returns True if enabled, False if disabled"""
command = '$GE'
settings = self.sendCommand(command)
flags = int(settings[2], 16)
return not (flags & 0x0004)
def getGroundCheckEnabled(self):
"""Returns True if enabled, False if disabled"""
command = '$GE'
settings = self.sendCommand(command)
flags = int(settings[2], 16)
return not (flags & 0x0008)
def getStuckRelayCheckEnabled(self):
"""Returns True if enabled, False if disabled"""
command = '$GE'
settings = self.sendCommand(command)
flags = int(settings[2], 16)
return not (flags & 0x0010)
def getAutoServiceLevelEnabled(self):
"""Returns True if enabled, False if disabled"""
command = '$GE'
settings = self.sendCommand(command)
flags = int(settings[2], 16)
return not (flags & 0x0020)
def getAutoStartEnabled(self):
"""Returns True if enabled, False if disabled"""
command = '$GE'
settings = self.sendCommand(command)
flags = int(settings[2], 16)
return not (flags & 0x0040)
def getSerialDebugEnabled(self):
"""Returns True if enabled, False if disabled"""
command = '$GE'
settings = self.sendCommand(command)
flags = int(settings[2], 16)
return not (flags & 0x0080)
def getLCDType(self):
"""Returns LCD type as a string, either monochrome or rgb"""
command = '$GE'
settings = self.sendCommand(command)
flags = int(settings[2], 16)
if flags & 0x0100:
lcdtype = 'monochrome'
else:
lcdtype = 'rgb'
return lcdtype
def getGFISelfTestEnabled(self):
"""Returns True if enabled, False if disabled"""
command = '$GE'
settings = self.sendCommand(command)
flags = int(settings[2], 16)
return not (flags & 0x0200)
def getGFITripCount(self):
"""Returns GFI Trip Count, as integer"""
command = '$GF'
faults = self.sendCommand(command)
return faults[1]
def getNoGndTripCount(self):
"""Returns No Ground Trip Count, as integer"""
command = '$GF'
faults = self.sendCommand(command)
return faults[2]
def getStuckRelayTripCount(self):
"""Returns Stuck Relay Trip Count, as integer"""
command = '$GF'
faults = self.sendCommand(command)
return faults[3]
def getChargingCurrent(self):
"""Returns the charging current, in amps, or 0.0 of not charging"""
command = '$GG'
currentAndVoltage = self.sendCommand(command)
amps = float(currentAndVoltage[1])/1000
return amps
def getChargingVoltage(self):
"""Returns the charging voltage, in volts, or 0.0 of not charging"""
command = '$GG'
currentAndVoltage = self.sendCommand(command)
volts = float(currentAndVoltage[2])/1000
return volts
def getChargeLimit(self):
"""Returns the charge limit in kWh"""
command = '$GH'
limit = self.sendCommand(command)
return limit[1]
def getVoltMeterScaleFactor(self):
"""Returns the voltmeter scale factor, or 0 if there is no voltmeter"""
command = '$GM'
voltMeterSettings = self.sendCommand(command)
if voltMeterSettings[0] == 'NK':
return 0
else:
return voltMeterSettings[1]
def getVoltMeterOffset(self):
"""Returns the voltmeter offset, or 0 if there is no voltmeter"""
command = '$GM'
voltMeterSettings = self.sendCommand(command)
if voltMeterSettings[0] == 'NK':
return 0
else:
return voltMeterSettings[2]
def getAmbientThreshold(self):
"""Returns the ambient temperature threshold in degrees Celcius, or 0 if no Threshold is set"""
command = '$GO'
threshold = self.sendCommand(command)
if threshold[0] == 'NK':
return 0
else:
return float(threshold[1])/10
def getRTCTemperature(self):
"""Returns the temperature of the real time clock sensor (DS3231), in degrees Celcius, or 0.0 if sensor is not installed"""
command = '$GP'
temperature = self.sendCommand(command)
return float(temperature[1])/10
def getAmbientTemperature(self):
"""Returns the temperature of the ambient sensor (MCP9808), in degrees Celcius, or 0.0 if sensor is not installed"""
command = '$GP'
temperature = self.sendCommand(command)
return float(temperature[2])/10
def getIRTemperature(self):
"""Returns the temperature of the IR remote sensor (TMP007), in degrees Celcius, or 0.0 if sensor is not installed"""
command = '$GP'
temperature = self.sendCommand(command)
return float(temperature[3])/10
def getTime(self):
"""Get the RTC time. Returns a datetime object, or NULL if the clock is not set"""
command = '$GT'
time = self.sendCommand(command)
if time == ['OK','165', '165', '165', '165', '165', '85']:
return NULL
else:
return datetime.datetime(year = int(time[1])+2000,
month = int(time[2]),
day = int(time[3]),
hour = int(time[4]),
minute = int(time[5]),
second = int(time[6]))
def getUsageSession(self):
"""Get the energy usage for the current charging session. Returns the energy usage in Wh"""
command = '$GU'
usage = self.sendCommand(command)
return float(usage[1])/3600
def getUsageTotal(self):
"""Get the total energy usage. Returns the energy usage in Wh"""
command = '$GU'
usage = self.sendCommand(command)
return float(usage[2])
def getFirmwareVersion(self):
"""Returns the Firmware Version, as a string"""
command = '$GV'
version = self.sendCommand(command)
return version[1]
def getProtocolVersion(self):
"""Returns the Protocol Version, as a string"""
command = '$GV'
version = self.sendCommand(command)
return version[2]
|
miniconfig/python-openevse-wifi
|
openevsewifi/__init__.py
|
Charger.getTime
|
python
|
def getTime(self):
command = '$GT'
time = self.sendCommand(command)
if time == ['OK','165', '165', '165', '165', '165', '85']:
return NULL
else:
return datetime.datetime(year = int(time[1])+2000,
month = int(time[2]),
day = int(time[3]),
hour = int(time[4]),
minute = int(time[5]),
second = int(time[6]))
|
Get the RTC time. Returns a datetime object, or NULL if the clock is not set
|
train
|
https://github.com/miniconfig/python-openevse-wifi/blob/42fabeae052a9f82092fa9220201413732e38bb4/openevsewifi/__init__.py#L258-L270
|
[
"def sendCommand(self, command):\n \"\"\"Sends a command through the web interface of the charger and parses the response\"\"\"\n data = { 'rapi' : command }\n full_url = self.url + urllib.parse.urlencode(data)\n data = urllib.request.urlopen(full_url)\n response = re.search('\\<p>>\\$(.+)\\<script', data.read().decode('utf-8'))\n if response == None:#If we are using version 1 - https://github.com/OpenEVSE/ESP8266_WiFi_v1.x/blob/master/OpenEVSE_RAPI_WiFi_ESP8266.ino#L357\n response = re.search('\\>\\>\\$(.+)\\<p>', data.read().decode('utf-8'))\n return response.group(1).split()\n"
] |
class Charger:
def __init__(self, host):
"""A connection to an OpenEVSE charging station equipped with the wifi kit."""
self.url = 'http://' + host + '/r?'
def sendCommand(self, command):
"""Sends a command through the web interface of the charger and parses the response"""
data = { 'rapi' : command }
full_url = self.url + urllib.parse.urlencode(data)
data = urllib.request.urlopen(full_url)
response = re.search('\<p>>\$(.+)\<script', data.read().decode('utf-8'))
if response == None:#If we are using version 1 - https://github.com/OpenEVSE/ESP8266_WiFi_v1.x/blob/master/OpenEVSE_RAPI_WiFi_ESP8266.ino#L357
response = re.search('\>\>\$(.+)\<p>', data.read().decode('utf-8'))
return response.group(1).split()
def getStatus(self):
"""Returns the charger's charge status, as a string"""
command = '$GS'
status = self.sendCommand(command)
return states[int(status[1])]
def getChargeTimeElapsed(self):
"""Returns the charge time elapsed (in seconds), or 0 if is not currently charging"""
command = '$GS'
status = self.sendCommand(command)
if int(status[1]) == 3:
return int(status[2])
else:
return 0
def getTimeLimit(self):
"""Returns the time limit in minutes or 0 if no limit is set"""
command = '$G3'
limit = self.sendCommand(command)
return int(limit[1])*15
def getAmmeterScaleFactor(self):
"""Returns the ammeter's current scale factor"""
command = '$GA'
settings = self.sendCommand(command)
return int(settings[1])
def getAmmeterOffset(self):
"""Returns the ammeter's current offset"""
command = '$GA'
settings = self.sendCommand(command)
return int(settings[2])
def getMinAmps(self):
"""Returns the capacity range minimum, in amps"""
command = '$GC'
caprange = self.sendCommand(command)
return int(caprange[1])
def getMaxAmps(self):
"""Returns the capacity range maximum, in amps"""
command = '$GC'
caprange = self.sendCommand(command)
return int(caprange[2])
def getCurrentCapacity(self):
"""Returns the current capacity, in amps"""
command = '$GE'
settings = self.sendCommand(command)
return int(settings[1])
def getServiceLevel(self):
"""Returns the service level"""
command = '$GE'
settings = self.sendCommand(command)
flags = int(settings[2], 16)
return (flags & 0x0001) + 1
def getDiodeCheckEnabled(self):
"""Returns True if enabled, False if disabled"""
command = '$GE'
settings = self.sendCommand(command)
flags = int(settings[2], 16)
return not (flags & 0x0002)
def getVentRequiredEnabled(self):
"""Returns True if enabled, False if disabled"""
command = '$GE'
settings = self.sendCommand(command)
flags = int(settings[2], 16)
return not (flags & 0x0004)
def getGroundCheckEnabled(self):
"""Returns True if enabled, False if disabled"""
command = '$GE'
settings = self.sendCommand(command)
flags = int(settings[2], 16)
return not (flags & 0x0008)
def getStuckRelayCheckEnabled(self):
"""Returns True if enabled, False if disabled"""
command = '$GE'
settings = self.sendCommand(command)
flags = int(settings[2], 16)
return not (flags & 0x0010)
def getAutoServiceLevelEnabled(self):
"""Returns True if enabled, False if disabled"""
command = '$GE'
settings = self.sendCommand(command)
flags = int(settings[2], 16)
return not (flags & 0x0020)
def getAutoStartEnabled(self):
"""Returns True if enabled, False if disabled"""
command = '$GE'
settings = self.sendCommand(command)
flags = int(settings[2], 16)
return not (flags & 0x0040)
def getSerialDebugEnabled(self):
"""Returns True if enabled, False if disabled"""
command = '$GE'
settings = self.sendCommand(command)
flags = int(settings[2], 16)
return not (flags & 0x0080)
def getLCDType(self):
"""Returns LCD type as a string, either monochrome or rgb"""
command = '$GE'
settings = self.sendCommand(command)
flags = int(settings[2], 16)
if flags & 0x0100:
lcdtype = 'monochrome'
else:
lcdtype = 'rgb'
return lcdtype
def getGFISelfTestEnabled(self):
"""Returns True if enabled, False if disabled"""
command = '$GE'
settings = self.sendCommand(command)
flags = int(settings[2], 16)
return not (flags & 0x0200)
def getGFITripCount(self):
"""Returns GFI Trip Count, as integer"""
command = '$GF'
faults = self.sendCommand(command)
return faults[1]
def getNoGndTripCount(self):
"""Returns No Ground Trip Count, as integer"""
command = '$GF'
faults = self.sendCommand(command)
return faults[2]
def getStuckRelayTripCount(self):
"""Returns Stuck Relay Trip Count, as integer"""
command = '$GF'
faults = self.sendCommand(command)
return faults[3]
def getChargingCurrent(self):
"""Returns the charging current, in amps, or 0.0 of not charging"""
command = '$GG'
currentAndVoltage = self.sendCommand(command)
amps = float(currentAndVoltage[1])/1000
return amps
def getChargingVoltage(self):
"""Returns the charging voltage, in volts, or 0.0 of not charging"""
command = '$GG'
currentAndVoltage = self.sendCommand(command)
volts = float(currentAndVoltage[2])/1000
return volts
def getChargeLimit(self):
"""Returns the charge limit in kWh"""
command = '$GH'
limit = self.sendCommand(command)
return limit[1]
def getVoltMeterScaleFactor(self):
"""Returns the voltmeter scale factor, or 0 if there is no voltmeter"""
command = '$GM'
voltMeterSettings = self.sendCommand(command)
if voltMeterSettings[0] == 'NK':
return 0
else:
return voltMeterSettings[1]
def getVoltMeterOffset(self):
"""Returns the voltmeter offset, or 0 if there is no voltmeter"""
command = '$GM'
voltMeterSettings = self.sendCommand(command)
if voltMeterSettings[0] == 'NK':
return 0
else:
return voltMeterSettings[2]
def getAmbientThreshold(self):
"""Returns the ambient temperature threshold in degrees Celcius, or 0 if no Threshold is set"""
command = '$GO'
threshold = self.sendCommand(command)
if threshold[0] == 'NK':
return 0
else:
return float(threshold[1])/10
def getIRThreshold(self):
"""Returns the IR temperature threshold in degrees Celcius, or 0 if no Threshold is set"""
command = '$GO'
threshold = self.sendCommand(command)
if threshold[0] == 'NK':
return 0
else:
return float(threshold[2])/10
def getRTCTemperature(self):
"""Returns the temperature of the real time clock sensor (DS3231), in degrees Celcius, or 0.0 if sensor is not installed"""
command = '$GP'
temperature = self.sendCommand(command)
return float(temperature[1])/10
def getAmbientTemperature(self):
"""Returns the temperature of the ambient sensor (MCP9808), in degrees Celcius, or 0.0 if sensor is not installed"""
command = '$GP'
temperature = self.sendCommand(command)
return float(temperature[2])/10
def getIRTemperature(self):
"""Returns the temperature of the IR remote sensor (TMP007), in degrees Celcius, or 0.0 if sensor is not installed"""
command = '$GP'
temperature = self.sendCommand(command)
return float(temperature[3])/10
def getUsageSession(self):
"""Get the energy usage for the current charging session. Returns the energy usage in Wh"""
command = '$GU'
usage = self.sendCommand(command)
return float(usage[1])/3600
def getUsageTotal(self):
"""Get the total energy usage. Returns the energy usage in Wh"""
command = '$GU'
usage = self.sendCommand(command)
return float(usage[2])
def getFirmwareVersion(self):
"""Returns the Firmware Version, as a string"""
command = '$GV'
version = self.sendCommand(command)
return version[1]
def getProtocolVersion(self):
"""Returns the Protocol Version, as a string"""
command = '$GV'
version = self.sendCommand(command)
return version[2]
|
uber-common/opentracing-python-instrumentation
|
opentracing_instrumentation/client_hooks/__init__.py
|
install_all_patches
|
python
|
def install_all_patches():
from . import mysqldb
from . import psycopg2
from . import strict_redis
from . import sqlalchemy
from . import tornado_http
from . import urllib
from . import urllib2
from . import requests
mysqldb.install_patches()
psycopg2.install_patches()
strict_redis.install_patches()
sqlalchemy.install_patches()
tornado_http.install_patches()
urllib.install_patches()
urllib2.install_patches()
requests.install_patches()
|
A convenience method that installs all available hooks.
If a specific module is not available on the path, it is ignored.
|
train
|
https://github.com/uber-common/opentracing-python-instrumentation/blob/57b29fb9f647e073cde8c75155f4708cb5661d20/opentracing_instrumentation/client_hooks/__init__.py#L33-L55
| null |
# Copyright (c) 2015-2017 Uber Technologies, Inc.
#
# Permission is hereby granted, free of charge, to any person obtaining a copy
# of this software and associated documentation files (the "Software"), to deal
# in the Software without restriction, including without limitation the rights
# to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
# copies of the Software, and to permit persons to whom the Software is
# furnished to do so, subject to the following conditions:
#
# The above copyright notice and this permission notice shall be included in
# all copies or substantial portions of the Software.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
# AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
# THE SOFTWARE.
from __future__ import absolute_import
import six
if six.PY2:
from collections import Sequence
else:
from collections.abc import Sequence
import importlib
import logging
from ._current_span import set_current_span_func # noqa
def install_patches(patchers='all'):
"""
Usually called from middleware to install client hooks
specified in the client_hooks section of the configuration.
:param patchers: a list of patchers to run. Acceptable values include:
* None - installs all client patches
* 'all' - installs all client patches
* empty list - does not install any patches
* list of function names - executes the functions
"""
if patchers is None or patchers == 'all':
install_all_patches()
return
if not _valid_args(patchers):
raise ValueError('patchers argument must be None, "all", or a list')
for patch_func_name in patchers:
logging.info('Loading client hook %s', patch_func_name)
patch_func = _load_symbol(patch_func_name)
logging.info('Applying client hook %s', patch_func_name)
patch_func()
def install_client_interceptors(client_interceptors=()):
"""
Install client interceptors for the patchers.
:param client_interceptors: a list of client interceptors to install.
Should be a list of classes
"""
if not _valid_args(client_interceptors):
raise ValueError('client_interceptors argument must be a list')
from ..http_client import ClientInterceptors
for client_interceptor in client_interceptors:
logging.info('Loading client interceptor %s', client_interceptor)
interceptor_class = _load_symbol(client_interceptor)
logging.info('Adding client interceptor %s', client_interceptor)
ClientInterceptors.append(interceptor_class())
def _valid_args(value):
return isinstance(value, Sequence) and \
not isinstance(value, six.string_types)
def _load_symbol(name):
"""Load a symbol by name.
:param str name: The name to load, specified by `module.attr`.
:returns: The attribute value. If the specified module does not contain
the requested attribute then `None` is returned.
"""
module_name, key = name.rsplit('.', 1)
try:
module = importlib.import_module(module_name)
except ImportError as err:
# it's possible the symbol is a class method
module_name, class_name = module_name.rsplit('.', 1)
module = importlib.import_module(module_name)
cls = getattr(module, class_name, None)
if cls:
attr = getattr(cls, key, None)
else:
raise err
else:
attr = getattr(module, key, None)
if not callable(attr):
raise ValueError('%s is not callable (was %r)' % (name, attr))
return attr
|
uber-common/opentracing-python-instrumentation
|
opentracing_instrumentation/client_hooks/__init__.py
|
install_patches
|
python
|
def install_patches(patchers='all'):
if patchers is None or patchers == 'all':
install_all_patches()
return
if not _valid_args(patchers):
raise ValueError('patchers argument must be None, "all", or a list')
for patch_func_name in patchers:
logging.info('Loading client hook %s', patch_func_name)
patch_func = _load_symbol(patch_func_name)
logging.info('Applying client hook %s', patch_func_name)
patch_func()
|
Usually called from middleware to install client hooks
specified in the client_hooks section of the configuration.
:param patchers: a list of patchers to run. Acceptable values include:
* None - installs all client patches
* 'all' - installs all client patches
* empty list - does not install any patches
* list of function names - executes the functions
|
train
|
https://github.com/uber-common/opentracing-python-instrumentation/blob/57b29fb9f647e073cde8c75155f4708cb5661d20/opentracing_instrumentation/client_hooks/__init__.py#L58-L79
|
[
"def install_all_patches():\n \"\"\"\n A convenience method that installs all available hooks.\n\n If a specific module is not available on the path, it is ignored.\n \"\"\"\n from . import mysqldb\n from . import psycopg2\n from . import strict_redis\n from . import sqlalchemy\n from . import tornado_http\n from . import urllib\n from . import urllib2\n from . import requests\n\n mysqldb.install_patches()\n psycopg2.install_patches()\n strict_redis.install_patches()\n sqlalchemy.install_patches()\n tornado_http.install_patches()\n urllib.install_patches()\n urllib2.install_patches()\n requests.install_patches()\n",
"def _valid_args(value):\n return isinstance(value, Sequence) and \\\n not isinstance(value, six.string_types)\n"
] |
# Copyright (c) 2015-2017 Uber Technologies, Inc.
#
# Permission is hereby granted, free of charge, to any person obtaining a copy
# of this software and associated documentation files (the "Software"), to deal
# in the Software without restriction, including without limitation the rights
# to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
# copies of the Software, and to permit persons to whom the Software is
# furnished to do so, subject to the following conditions:
#
# The above copyright notice and this permission notice shall be included in
# all copies or substantial portions of the Software.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
# AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
# THE SOFTWARE.
from __future__ import absolute_import
import six
if six.PY2:
from collections import Sequence
else:
from collections.abc import Sequence
import importlib
import logging
from ._current_span import set_current_span_func # noqa
def install_all_patches():
"""
A convenience method that installs all available hooks.
If a specific module is not available on the path, it is ignored.
"""
from . import mysqldb
from . import psycopg2
from . import strict_redis
from . import sqlalchemy
from . import tornado_http
from . import urllib
from . import urllib2
from . import requests
mysqldb.install_patches()
psycopg2.install_patches()
strict_redis.install_patches()
sqlalchemy.install_patches()
tornado_http.install_patches()
urllib.install_patches()
urllib2.install_patches()
requests.install_patches()
def install_client_interceptors(client_interceptors=()):
"""
Install client interceptors for the patchers.
:param client_interceptors: a list of client interceptors to install.
Should be a list of classes
"""
if not _valid_args(client_interceptors):
raise ValueError('client_interceptors argument must be a list')
from ..http_client import ClientInterceptors
for client_interceptor in client_interceptors:
logging.info('Loading client interceptor %s', client_interceptor)
interceptor_class = _load_symbol(client_interceptor)
logging.info('Adding client interceptor %s', client_interceptor)
ClientInterceptors.append(interceptor_class())
def _valid_args(value):
return isinstance(value, Sequence) and \
not isinstance(value, six.string_types)
def _load_symbol(name):
"""Load a symbol by name.
:param str name: The name to load, specified by `module.attr`.
:returns: The attribute value. If the specified module does not contain
the requested attribute then `None` is returned.
"""
module_name, key = name.rsplit('.', 1)
try:
module = importlib.import_module(module_name)
except ImportError as err:
# it's possible the symbol is a class method
module_name, class_name = module_name.rsplit('.', 1)
module = importlib.import_module(module_name)
cls = getattr(module, class_name, None)
if cls:
attr = getattr(cls, key, None)
else:
raise err
else:
attr = getattr(module, key, None)
if not callable(attr):
raise ValueError('%s is not callable (was %r)' % (name, attr))
return attr
|
uber-common/opentracing-python-instrumentation
|
opentracing_instrumentation/client_hooks/__init__.py
|
install_client_interceptors
|
python
|
def install_client_interceptors(client_interceptors=()):
if not _valid_args(client_interceptors):
raise ValueError('client_interceptors argument must be a list')
from ..http_client import ClientInterceptors
for client_interceptor in client_interceptors:
logging.info('Loading client interceptor %s', client_interceptor)
interceptor_class = _load_symbol(client_interceptor)
logging.info('Adding client interceptor %s', client_interceptor)
ClientInterceptors.append(interceptor_class())
|
Install client interceptors for the patchers.
:param client_interceptors: a list of client interceptors to install.
Should be a list of classes
|
train
|
https://github.com/uber-common/opentracing-python-instrumentation/blob/57b29fb9f647e073cde8c75155f4708cb5661d20/opentracing_instrumentation/client_hooks/__init__.py#L82-L98
|
[
"def _valid_args(value):\n return isinstance(value, Sequence) and \\\n not isinstance(value, six.string_types)\n",
"def _load_symbol(name):\n \"\"\"Load a symbol by name.\n\n :param str name: The name to load, specified by `module.attr`.\n :returns: The attribute value. If the specified module does not contain\n the requested attribute then `None` is returned.\n \"\"\"\n module_name, key = name.rsplit('.', 1)\n try:\n module = importlib.import_module(module_name)\n except ImportError as err:\n # it's possible the symbol is a class method\n module_name, class_name = module_name.rsplit('.', 1)\n module = importlib.import_module(module_name)\n cls = getattr(module, class_name, None)\n if cls:\n attr = getattr(cls, key, None)\n else:\n raise err\n else:\n attr = getattr(module, key, None)\n if not callable(attr):\n raise ValueError('%s is not callable (was %r)' % (name, attr))\n return attr\n",
"def append(cls, interceptor):\n \"\"\"\n Add interceptor to the end of the internal list.\n\n Note: Raises ``ValueError`` if interceptor\n does not extend ``OpenTracingInterceptor``\n \"\"\"\n cls._check(interceptor)\n cls._interceptors.append(interceptor)\n"
] |
# Copyright (c) 2015-2017 Uber Technologies, Inc.
#
# Permission is hereby granted, free of charge, to any person obtaining a copy
# of this software and associated documentation files (the "Software"), to deal
# in the Software without restriction, including without limitation the rights
# to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
# copies of the Software, and to permit persons to whom the Software is
# furnished to do so, subject to the following conditions:
#
# The above copyright notice and this permission notice shall be included in
# all copies or substantial portions of the Software.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
# AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
# THE SOFTWARE.
from __future__ import absolute_import
import six
if six.PY2:
from collections import Sequence
else:
from collections.abc import Sequence
import importlib
import logging
from ._current_span import set_current_span_func # noqa
def install_all_patches():
"""
A convenience method that installs all available hooks.
If a specific module is not available on the path, it is ignored.
"""
from . import mysqldb
from . import psycopg2
from . import strict_redis
from . import sqlalchemy
from . import tornado_http
from . import urllib
from . import urllib2
from . import requests
mysqldb.install_patches()
psycopg2.install_patches()
strict_redis.install_patches()
sqlalchemy.install_patches()
tornado_http.install_patches()
urllib.install_patches()
urllib2.install_patches()
requests.install_patches()
def install_patches(patchers='all'):
"""
Usually called from middleware to install client hooks
specified in the client_hooks section of the configuration.
:param patchers: a list of patchers to run. Acceptable values include:
* None - installs all client patches
* 'all' - installs all client patches
* empty list - does not install any patches
* list of function names - executes the functions
"""
if patchers is None or patchers == 'all':
install_all_patches()
return
if not _valid_args(patchers):
raise ValueError('patchers argument must be None, "all", or a list')
for patch_func_name in patchers:
logging.info('Loading client hook %s', patch_func_name)
patch_func = _load_symbol(patch_func_name)
logging.info('Applying client hook %s', patch_func_name)
patch_func()
def _valid_args(value):
return isinstance(value, Sequence) and \
not isinstance(value, six.string_types)
def _load_symbol(name):
"""Load a symbol by name.
:param str name: The name to load, specified by `module.attr`.
:returns: The attribute value. If the specified module does not contain
the requested attribute then `None` is returned.
"""
module_name, key = name.rsplit('.', 1)
try:
module = importlib.import_module(module_name)
except ImportError as err:
# it's possible the symbol is a class method
module_name, class_name = module_name.rsplit('.', 1)
module = importlib.import_module(module_name)
cls = getattr(module, class_name, None)
if cls:
attr = getattr(cls, key, None)
else:
raise err
else:
attr = getattr(module, key, None)
if not callable(attr):
raise ValueError('%s is not callable (was %r)' % (name, attr))
return attr
|
uber-common/opentracing-python-instrumentation
|
opentracing_instrumentation/client_hooks/__init__.py
|
_load_symbol
|
python
|
def _load_symbol(name):
module_name, key = name.rsplit('.', 1)
try:
module = importlib.import_module(module_name)
except ImportError as err:
# it's possible the symbol is a class method
module_name, class_name = module_name.rsplit('.', 1)
module = importlib.import_module(module_name)
cls = getattr(module, class_name, None)
if cls:
attr = getattr(cls, key, None)
else:
raise err
else:
attr = getattr(module, key, None)
if not callable(attr):
raise ValueError('%s is not callable (was %r)' % (name, attr))
return attr
|
Load a symbol by name.
:param str name: The name to load, specified by `module.attr`.
:returns: The attribute value. If the specified module does not contain
the requested attribute then `None` is returned.
|
train
|
https://github.com/uber-common/opentracing-python-instrumentation/blob/57b29fb9f647e073cde8c75155f4708cb5661d20/opentracing_instrumentation/client_hooks/__init__.py#L106-L129
| null |
# Copyright (c) 2015-2017 Uber Technologies, Inc.
#
# Permission is hereby granted, free of charge, to any person obtaining a copy
# of this software and associated documentation files (the "Software"), to deal
# in the Software without restriction, including without limitation the rights
# to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
# copies of the Software, and to permit persons to whom the Software is
# furnished to do so, subject to the following conditions:
#
# The above copyright notice and this permission notice shall be included in
# all copies or substantial portions of the Software.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
# AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
# THE SOFTWARE.
from __future__ import absolute_import
import six
if six.PY2:
from collections import Sequence
else:
from collections.abc import Sequence
import importlib
import logging
from ._current_span import set_current_span_func # noqa
def install_all_patches():
"""
A convenience method that installs all available hooks.
If a specific module is not available on the path, it is ignored.
"""
from . import mysqldb
from . import psycopg2
from . import strict_redis
from . import sqlalchemy
from . import tornado_http
from . import urllib
from . import urllib2
from . import requests
mysqldb.install_patches()
psycopg2.install_patches()
strict_redis.install_patches()
sqlalchemy.install_patches()
tornado_http.install_patches()
urllib.install_patches()
urllib2.install_patches()
requests.install_patches()
def install_patches(patchers='all'):
"""
Usually called from middleware to install client hooks
specified in the client_hooks section of the configuration.
:param patchers: a list of patchers to run. Acceptable values include:
* None - installs all client patches
* 'all' - installs all client patches
* empty list - does not install any patches
* list of function names - executes the functions
"""
if patchers is None or patchers == 'all':
install_all_patches()
return
if not _valid_args(patchers):
raise ValueError('patchers argument must be None, "all", or a list')
for patch_func_name in patchers:
logging.info('Loading client hook %s', patch_func_name)
patch_func = _load_symbol(patch_func_name)
logging.info('Applying client hook %s', patch_func_name)
patch_func()
def install_client_interceptors(client_interceptors=()):
"""
Install client interceptors for the patchers.
:param client_interceptors: a list of client interceptors to install.
Should be a list of classes
"""
if not _valid_args(client_interceptors):
raise ValueError('client_interceptors argument must be a list')
from ..http_client import ClientInterceptors
for client_interceptor in client_interceptors:
logging.info('Loading client interceptor %s', client_interceptor)
interceptor_class = _load_symbol(client_interceptor)
logging.info('Adding client interceptor %s', client_interceptor)
ClientInterceptors.append(interceptor_class())
def _valid_args(value):
return isinstance(value, Sequence) and \
not isinstance(value, six.string_types)
|
uber-common/opentracing-python-instrumentation
|
opentracing_instrumentation/request_context.py
|
get_current_span
|
python
|
def get_current_span():
# Check against the old, ScopeManager-less implementation,
# for backwards compatibility.
context = RequestContextManager.current_context()
if context is not None:
return context.span
active = opentracing.tracer.scope_manager.active
return active.span if active else None
|
Access current request context and extract current Span from it.
:return:
Return current span associated with the current request context.
If no request context is present in thread local, or the context
has no span, return None.
|
train
|
https://github.com/uber-common/opentracing-python-instrumentation/blob/57b29fb9f647e073cde8c75155f4708cb5661d20/opentracing_instrumentation/request_context.py#L117-L132
|
[
"def current_context(cls):\n \"\"\"Get the current request context.\n\n :rtype: opentracing_instrumentation.RequestContext\n :returns: The current request context, or None.\n \"\"\"\n return getattr(cls._state, 'context', None)\n"
] |
# Copyright (c) 2015 Uber Technologies, Inc.
#
# Permission is hereby granted, free of charge, to any person obtaining a copy
# of this software and associated documentation files (the "Software"), to deal
# in the Software without restriction, including without limitation the rights
# to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
# copies of the Software, and to permit persons to whom the Software is
# furnished to do so, subject to the following conditions:
#
# The above copyright notice and this permission notice shall be included in
# all copies or substantial portions of the Software.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
# AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
# THE SOFTWARE.
from __future__ import absolute_import
from builtins import object
import threading
import opentracing
from opentracing.scope_managers.tornado import TornadoScopeManager
from opentracing.scope_managers.tornado import tracer_stack_context
from opentracing.scope_managers.tornado import ThreadSafeStackContext # noqa
class RequestContext(object):
"""
DEPRECATED, use either span_in_context() or span_in_stack_context()
instead.
RequestContext represents the context of a request being executed.
Useful when a service needs to make downstream calls to other services
and requires access to some aspects of the original request, such as
tracing information.
It is designed to hold a reference to the current OpenTracing Span,
but the class can be extended to store more information.
"""
__slots__ = ('span', )
def __init__(self, span):
self.span = span
class RequestContextManager(object):
"""
DEPRECATED, use either span_in_context() or span_in_stack_context()
instead.
A context manager that saves RequestContext in thread-local state.
Intended for use with ThreadSafeStackContext (a thread-safe
replacement for Tornado's StackContext) or as context manager
in a WSGI middleware.
"""
_state = threading.local()
_state.context = None
@classmethod
def current_context(cls):
"""Get the current request context.
:rtype: opentracing_instrumentation.RequestContext
:returns: The current request context, or None.
"""
return getattr(cls._state, 'context', None)
def __init__(self, context=None, span=None):
# normally we want the context parameter, but for backwards
# compatibility we make it optional and allow span as well
if span:
self._context = RequestContext(span=span)
elif isinstance(context, opentracing.Span):
self._context = RequestContext(span=context)
else:
self._context = context
def __enter__(self):
self._prev_context = self.__class__.current_context()
self.__class__._state.context = self._context
return self._context
def __exit__(self, *_):
self.__class__._state.context = self._prev_context
self._prev_context = None
return False
class _TracerEnteredStackContext(object):
"""
An entered tracer_stack_context() object.
Intended to have a ready-to-use context where
Span objects can be activated before the context
itself is returned to the user.
"""
def __init__(self, context):
self._context = context
self._deactivation_cb = context.__enter__()
def __enter__(self):
return self._deactivation_cb
def __exit__(self, type, value, traceback):
return self._context.__exit__(type, value, traceback)
def span_in_context(span):
"""
Create a context manager that stores the given span in the thread-local
request context. This function should only be used in single-threaded
applications like Flask / uWSGI.
## Usage example in WSGI middleware:
.. code-block:: python
from opentracing_instrumentation.http_server import WSGIRequestWrapper
from opentracing_instrumentation.http_server import before_request
from opentracing_instrumentation import request_context
def create_wsgi_tracing_middleware(other_wsgi):
def wsgi_tracing_middleware(environ, start_response):
request = WSGIRequestWrapper.from_wsgi_environ(environ)
span = before_request(request=request, tracer=tracer)
# Wrapper around the real start_response object to log
# additional information to opentracing Span
def start_response_wrapper(status, response_headers,
exc_info=None):
if exc_info is not None:
span.log(event='exception', payload=exc_info)
span.finish()
return start_response(status, response_headers)
with request_context.span_in_context(span):
return other_wsgi(environ, start_response_wrapper)
return wsgi_tracing_middleware
:param span: OpenTracing Span
:return:
Return context manager that wraps the request context.
"""
# Return a no-op Scope if None was specified.
if span is None:
return opentracing.Scope(None, None)
return opentracing.tracer.scope_manager.activate(span, False)
def span_in_stack_context(span):
"""
Create Tornado's StackContext that stores the given span in the
thread-local request context. This function is intended for use
in Tornado applications based on IOLoop, although will work fine
in single-threaded apps like Flask, albeit with more overhead.
## Usage example in Tornado application
Suppose you have a method `handle_request(request)` in the http server.
Instead of calling it directly, use a wrapper:
.. code-block:: python
from opentracing_instrumentation import request_context
@tornado.gen.coroutine
def handle_request_wrapper(request, actual_handler, *args, **kwargs)
request_wrapper = TornadoRequestWrapper(request=request)
span = http_server.before_request(request=request_wrapper)
with request_context.span_in_stack_context(span):
return actual_handler(*args, **kwargs)
:param span:
:return:
Return StackContext that wraps the request context.
"""
if not isinstance(opentracing.tracer.scope_manager, TornadoScopeManager):
raise RuntimeError('scope_manager is not TornadoScopeManager')
# Enter the newly created stack context so we have
# storage available for Span activation.
context = tracer_stack_context()
entered_context = _TracerEnteredStackContext(context)
if span is None:
return entered_context
opentracing.tracer.scope_manager.activate(span, False)
assert opentracing.tracer.active_span is not None
assert opentracing.tracer.active_span is span
return entered_context
|
uber-common/opentracing-python-instrumentation
|
opentracing_instrumentation/request_context.py
|
span_in_context
|
python
|
def span_in_context(span):
# Return a no-op Scope if None was specified.
if span is None:
return opentracing.Scope(None, None)
return opentracing.tracer.scope_manager.activate(span, False)
|
Create a context manager that stores the given span in the thread-local
request context. This function should only be used in single-threaded
applications like Flask / uWSGI.
## Usage example in WSGI middleware:
.. code-block:: python
from opentracing_instrumentation.http_server import WSGIRequestWrapper
from opentracing_instrumentation.http_server import before_request
from opentracing_instrumentation import request_context
def create_wsgi_tracing_middleware(other_wsgi):
def wsgi_tracing_middleware(environ, start_response):
request = WSGIRequestWrapper.from_wsgi_environ(environ)
span = before_request(request=request, tracer=tracer)
# Wrapper around the real start_response object to log
# additional information to opentracing Span
def start_response_wrapper(status, response_headers,
exc_info=None):
if exc_info is not None:
span.log(event='exception', payload=exc_info)
span.finish()
return start_response(status, response_headers)
with request_context.span_in_context(span):
return other_wsgi(environ, start_response_wrapper)
return wsgi_tracing_middleware
:param span: OpenTracing Span
:return:
Return context manager that wraps the request context.
|
train
|
https://github.com/uber-common/opentracing-python-instrumentation/blob/57b29fb9f647e073cde8c75155f4708cb5661d20/opentracing_instrumentation/request_context.py#L135-L178
| null |
# Copyright (c) 2015 Uber Technologies, Inc.
#
# Permission is hereby granted, free of charge, to any person obtaining a copy
# of this software and associated documentation files (the "Software"), to deal
# in the Software without restriction, including without limitation the rights
# to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
# copies of the Software, and to permit persons to whom the Software is
# furnished to do so, subject to the following conditions:
#
# The above copyright notice and this permission notice shall be included in
# all copies or substantial portions of the Software.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
# AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
# THE SOFTWARE.
from __future__ import absolute_import
from builtins import object
import threading
import opentracing
from opentracing.scope_managers.tornado import TornadoScopeManager
from opentracing.scope_managers.tornado import tracer_stack_context
from opentracing.scope_managers.tornado import ThreadSafeStackContext # noqa
class RequestContext(object):
"""
DEPRECATED, use either span_in_context() or span_in_stack_context()
instead.
RequestContext represents the context of a request being executed.
Useful when a service needs to make downstream calls to other services
and requires access to some aspects of the original request, such as
tracing information.
It is designed to hold a reference to the current OpenTracing Span,
but the class can be extended to store more information.
"""
__slots__ = ('span', )
def __init__(self, span):
self.span = span
class RequestContextManager(object):
"""
DEPRECATED, use either span_in_context() or span_in_stack_context()
instead.
A context manager that saves RequestContext in thread-local state.
Intended for use with ThreadSafeStackContext (a thread-safe
replacement for Tornado's StackContext) or as context manager
in a WSGI middleware.
"""
_state = threading.local()
_state.context = None
@classmethod
def current_context(cls):
"""Get the current request context.
:rtype: opentracing_instrumentation.RequestContext
:returns: The current request context, or None.
"""
return getattr(cls._state, 'context', None)
def __init__(self, context=None, span=None):
# normally we want the context parameter, but for backwards
# compatibility we make it optional and allow span as well
if span:
self._context = RequestContext(span=span)
elif isinstance(context, opentracing.Span):
self._context = RequestContext(span=context)
else:
self._context = context
def __enter__(self):
self._prev_context = self.__class__.current_context()
self.__class__._state.context = self._context
return self._context
def __exit__(self, *_):
self.__class__._state.context = self._prev_context
self._prev_context = None
return False
class _TracerEnteredStackContext(object):
"""
An entered tracer_stack_context() object.
Intended to have a ready-to-use context where
Span objects can be activated before the context
itself is returned to the user.
"""
def __init__(self, context):
self._context = context
self._deactivation_cb = context.__enter__()
def __enter__(self):
return self._deactivation_cb
def __exit__(self, type, value, traceback):
return self._context.__exit__(type, value, traceback)
def get_current_span():
"""
Access current request context and extract current Span from it.
:return:
Return current span associated with the current request context.
If no request context is present in thread local, or the context
has no span, return None.
"""
# Check against the old, ScopeManager-less implementation,
# for backwards compatibility.
context = RequestContextManager.current_context()
if context is not None:
return context.span
active = opentracing.tracer.scope_manager.active
return active.span if active else None
def span_in_stack_context(span):
"""
Create Tornado's StackContext that stores the given span in the
thread-local request context. This function is intended for use
in Tornado applications based on IOLoop, although will work fine
in single-threaded apps like Flask, albeit with more overhead.
## Usage example in Tornado application
Suppose you have a method `handle_request(request)` in the http server.
Instead of calling it directly, use a wrapper:
.. code-block:: python
from opentracing_instrumentation import request_context
@tornado.gen.coroutine
def handle_request_wrapper(request, actual_handler, *args, **kwargs)
request_wrapper = TornadoRequestWrapper(request=request)
span = http_server.before_request(request=request_wrapper)
with request_context.span_in_stack_context(span):
return actual_handler(*args, **kwargs)
:param span:
:return:
Return StackContext that wraps the request context.
"""
if not isinstance(opentracing.tracer.scope_manager, TornadoScopeManager):
raise RuntimeError('scope_manager is not TornadoScopeManager')
# Enter the newly created stack context so we have
# storage available for Span activation.
context = tracer_stack_context()
entered_context = _TracerEnteredStackContext(context)
if span is None:
return entered_context
opentracing.tracer.scope_manager.activate(span, False)
assert opentracing.tracer.active_span is not None
assert opentracing.tracer.active_span is span
return entered_context
|
uber-common/opentracing-python-instrumentation
|
opentracing_instrumentation/request_context.py
|
span_in_stack_context
|
python
|
def span_in_stack_context(span):
if not isinstance(opentracing.tracer.scope_manager, TornadoScopeManager):
raise RuntimeError('scope_manager is not TornadoScopeManager')
# Enter the newly created stack context so we have
# storage available for Span activation.
context = tracer_stack_context()
entered_context = _TracerEnteredStackContext(context)
if span is None:
return entered_context
opentracing.tracer.scope_manager.activate(span, False)
assert opentracing.tracer.active_span is not None
assert opentracing.tracer.active_span is span
return entered_context
|
Create Tornado's StackContext that stores the given span in the
thread-local request context. This function is intended for use
in Tornado applications based on IOLoop, although will work fine
in single-threaded apps like Flask, albeit with more overhead.
## Usage example in Tornado application
Suppose you have a method `handle_request(request)` in the http server.
Instead of calling it directly, use a wrapper:
.. code-block:: python
from opentracing_instrumentation import request_context
@tornado.gen.coroutine
def handle_request_wrapper(request, actual_handler, *args, **kwargs)
request_wrapper = TornadoRequestWrapper(request=request)
span = http_server.before_request(request=request_wrapper)
with request_context.span_in_stack_context(span):
return actual_handler(*args, **kwargs)
:param span:
:return:
Return StackContext that wraps the request context.
|
train
|
https://github.com/uber-common/opentracing-python-instrumentation/blob/57b29fb9f647e073cde8c75155f4708cb5661d20/opentracing_instrumentation/request_context.py#L181-L226
| null |
# Copyright (c) 2015 Uber Technologies, Inc.
#
# Permission is hereby granted, free of charge, to any person obtaining a copy
# of this software and associated documentation files (the "Software"), to deal
# in the Software without restriction, including without limitation the rights
# to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
# copies of the Software, and to permit persons to whom the Software is
# furnished to do so, subject to the following conditions:
#
# The above copyright notice and this permission notice shall be included in
# all copies or substantial portions of the Software.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
# AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
# THE SOFTWARE.
from __future__ import absolute_import
from builtins import object
import threading
import opentracing
from opentracing.scope_managers.tornado import TornadoScopeManager
from opentracing.scope_managers.tornado import tracer_stack_context
from opentracing.scope_managers.tornado import ThreadSafeStackContext # noqa
class RequestContext(object):
"""
DEPRECATED, use either span_in_context() or span_in_stack_context()
instead.
RequestContext represents the context of a request being executed.
Useful when a service needs to make downstream calls to other services
and requires access to some aspects of the original request, such as
tracing information.
It is designed to hold a reference to the current OpenTracing Span,
but the class can be extended to store more information.
"""
__slots__ = ('span', )
def __init__(self, span):
self.span = span
class RequestContextManager(object):
"""
DEPRECATED, use either span_in_context() or span_in_stack_context()
instead.
A context manager that saves RequestContext in thread-local state.
Intended for use with ThreadSafeStackContext (a thread-safe
replacement for Tornado's StackContext) or as context manager
in a WSGI middleware.
"""
_state = threading.local()
_state.context = None
@classmethod
def current_context(cls):
"""Get the current request context.
:rtype: opentracing_instrumentation.RequestContext
:returns: The current request context, or None.
"""
return getattr(cls._state, 'context', None)
def __init__(self, context=None, span=None):
# normally we want the context parameter, but for backwards
# compatibility we make it optional and allow span as well
if span:
self._context = RequestContext(span=span)
elif isinstance(context, opentracing.Span):
self._context = RequestContext(span=context)
else:
self._context = context
def __enter__(self):
self._prev_context = self.__class__.current_context()
self.__class__._state.context = self._context
return self._context
def __exit__(self, *_):
self.__class__._state.context = self._prev_context
self._prev_context = None
return False
class _TracerEnteredStackContext(object):
"""
An entered tracer_stack_context() object.
Intended to have a ready-to-use context where
Span objects can be activated before the context
itself is returned to the user.
"""
def __init__(self, context):
self._context = context
self._deactivation_cb = context.__enter__()
def __enter__(self):
return self._deactivation_cb
def __exit__(self, type, value, traceback):
return self._context.__exit__(type, value, traceback)
def get_current_span():
"""
Access current request context and extract current Span from it.
:return:
Return current span associated with the current request context.
If no request context is present in thread local, or the context
has no span, return None.
"""
# Check against the old, ScopeManager-less implementation,
# for backwards compatibility.
context = RequestContextManager.current_context()
if context is not None:
return context.span
active = opentracing.tracer.scope_manager.active
return active.span if active else None
def span_in_context(span):
"""
Create a context manager that stores the given span in the thread-local
request context. This function should only be used in single-threaded
applications like Flask / uWSGI.
## Usage example in WSGI middleware:
.. code-block:: python
from opentracing_instrumentation.http_server import WSGIRequestWrapper
from opentracing_instrumentation.http_server import before_request
from opentracing_instrumentation import request_context
def create_wsgi_tracing_middleware(other_wsgi):
def wsgi_tracing_middleware(environ, start_response):
request = WSGIRequestWrapper.from_wsgi_environ(environ)
span = before_request(request=request, tracer=tracer)
# Wrapper around the real start_response object to log
# additional information to opentracing Span
def start_response_wrapper(status, response_headers,
exc_info=None):
if exc_info is not None:
span.log(event='exception', payload=exc_info)
span.finish()
return start_response(status, response_headers)
with request_context.span_in_context(span):
return other_wsgi(environ, start_response_wrapper)
return wsgi_tracing_middleware
:param span: OpenTracing Span
:return:
Return context manager that wraps the request context.
"""
# Return a no-op Scope if None was specified.
if span is None:
return opentracing.Scope(None, None)
return opentracing.tracer.scope_manager.activate(span, False)
|
uber-common/opentracing-python-instrumentation
|
opentracing_instrumentation/local_span.py
|
func_span
|
python
|
def func_span(func, tags=None, require_active_trace=False):
current_span = get_current_span()
if current_span is None and require_active_trace:
@contextlib2.contextmanager
def empty_ctx_mgr():
yield None
return empty_ctx_mgr()
# TODO convert func to a proper name: module:class.func
operation_name = str(func)
return utils.start_child_span(
operation_name=operation_name, parent=current_span, tags=tags)
|
Creates a new local span for execution of the given `func`.
The returned span is best used as a context manager, e.g.
.. code-block:: python
with func_span('my_function'):
return my_function(...)
At this time the func should be a string name. In the future this code
can be enhanced to accept a real function and derive its qualified name.
:param func: name of the function or method
:param tags: optional tags to add to the child span
:param require_active_trace: controls what to do when there is no active
trace. If require_active_trace=True, then no span is created.
If require_active_trace=False, a new trace is started.
:return: new child span, or a dummy context manager if there is no
active/current parent span
|
train
|
https://github.com/uber-common/opentracing-python-instrumentation/blob/57b29fb9f647e073cde8c75155f4708cb5661d20/opentracing_instrumentation/local_span.py#L28-L61
|
[
"def get_current_span():\n \"\"\"\n Access current request context and extract current Span from it.\n :return:\n Return current span associated with the current request context.\n If no request context is present in thread local, or the context\n has no span, return None.\n \"\"\"\n # Check against the old, ScopeManager-less implementation,\n # for backwards compatibility.\n context = RequestContextManager.current_context()\n if context is not None:\n return context.span\n\n active = opentracing.tracer.scope_manager.active\n return active.span if active else None\n",
"def start_child_span(operation_name, tracer=None, parent=None, tags=None):\n \"\"\"\n Start a new span as a child of parent_span. If parent_span is None,\n start a new root span.\n\n :param operation_name: operation name\n :param tracer: Tracer or None (defaults to opentracing.tracer)\n :param parent: parent Span or None\n :param tags: optional tags\n :return: new span\n \"\"\"\n tracer = tracer or opentracing.tracer\n return tracer.start_span(\n operation_name=operation_name,\n child_of=parent.context if parent else None,\n tags=tags\n )\n"
] |
# Copyright (c) 2015 Uber Technologies, Inc.
#
# Permission is hereby granted, free of charge, to any person obtaining a copy
# of this software and associated documentation files (the "Software"), to deal
# in the Software without restriction, including without limitation the rights
# to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
# copies of the Software, and to permit persons to whom the Software is
# furnished to do so, subject to the following conditions:
#
# The above copyright notice and this permission notice shall be included in
# all copies or substantial portions of the Software.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
# AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
# THE SOFTWARE.
from __future__ import absolute_import
from builtins import str
import functools
import contextlib2
import tornado.concurrent
from . import get_current_span, span_in_stack_context, utils
def traced_function(func=None, name=None, on_start=None,
require_active_trace=False):
"""
A decorator that enables tracing of the wrapped function or
Tornado co-routine provided there is a parent span already established.
.. code-block:: python
@traced_function
def my_function1(arg1, arg2=None)
...
:param func: decorated function or Tornado co-routine
:param name: optional name to use as the Span.operation_name.
If not provided, func.__name__ will be used.
:param on_start: an optional callback to be executed once the child span
is started, but before the decorated function is called. It can be
used to set any additional tags on the span, perhaps by inspecting
the decorated function arguments. The callback must have a signature
`(span, *args, *kwargs)`, where the last two collections are the
arguments passed to the actual decorated function.
.. code-block:: python
def extract_call_site_tag(span, *args, *kwargs)
if 'call_site_tag' in kwargs:
span.set_tag('call_site_tag', kwargs['call_site_tag'])
@traced_function(on_start=extract_call_site_tag)
@tornado.get.coroutine
def my_function(arg1, arg2=None, call_site_tag=None)
...
:param require_active_trace: controls what to do when there is no active
trace. If require_active_trace=True, then no span is created.
If require_active_trace=False, a new trace is started.
:return: returns a tracing decorator
"""
if func is None:
return functools.partial(traced_function, name=name,
on_start=on_start,
require_active_trace=require_active_trace)
if name:
operation_name = name
else:
operation_name = func.__name__
@functools.wraps(func)
def decorator(*args, **kwargs):
parent_span = get_current_span()
if parent_span is None and require_active_trace:
return func(*args, **kwargs)
span = utils.start_child_span(
operation_name=operation_name, parent=parent_span)
if callable(on_start):
on_start(span, *args, **kwargs)
# We explicitly invoke deactivation callback for the StackContext,
# because there are scenarios when it gets retained forever, for
# example when a Periodic Callback is scheduled lazily while in the
# scope of a tracing StackContext.
with span_in_stack_context(span) as deactivate_cb:
try:
res = func(*args, **kwargs)
# Tornado co-routines usually return futures, so we must wait
# until the future is completed, in order to accurately
# capture the function's execution time.
if tornado.concurrent.is_future(res):
def done_callback(future):
deactivate_cb()
exception = future.exception()
if exception is not None:
span.log(event='exception', payload=exception)
span.set_tag('error', 'true')
span.finish()
res.add_done_callback(done_callback)
else:
deactivate_cb()
span.finish()
return res
except Exception as e:
deactivate_cb()
span.log(event='exception', payload=e)
span.set_tag('error', 'true')
span.finish()
raise
return decorator
|
uber-common/opentracing-python-instrumentation
|
opentracing_instrumentation/local_span.py
|
traced_function
|
python
|
def traced_function(func=None, name=None, on_start=None,
require_active_trace=False):
if func is None:
return functools.partial(traced_function, name=name,
on_start=on_start,
require_active_trace=require_active_trace)
if name:
operation_name = name
else:
operation_name = func.__name__
@functools.wraps(func)
def decorator(*args, **kwargs):
parent_span = get_current_span()
if parent_span is None and require_active_trace:
return func(*args, **kwargs)
span = utils.start_child_span(
operation_name=operation_name, parent=parent_span)
if callable(on_start):
on_start(span, *args, **kwargs)
# We explicitly invoke deactivation callback for the StackContext,
# because there are scenarios when it gets retained forever, for
# example when a Periodic Callback is scheduled lazily while in the
# scope of a tracing StackContext.
with span_in_stack_context(span) as deactivate_cb:
try:
res = func(*args, **kwargs)
# Tornado co-routines usually return futures, so we must wait
# until the future is completed, in order to accurately
# capture the function's execution time.
if tornado.concurrent.is_future(res):
def done_callback(future):
deactivate_cb()
exception = future.exception()
if exception is not None:
span.log(event='exception', payload=exception)
span.set_tag('error', 'true')
span.finish()
res.add_done_callback(done_callback)
else:
deactivate_cb()
span.finish()
return res
except Exception as e:
deactivate_cb()
span.log(event='exception', payload=e)
span.set_tag('error', 'true')
span.finish()
raise
return decorator
|
A decorator that enables tracing of the wrapped function or
Tornado co-routine provided there is a parent span already established.
.. code-block:: python
@traced_function
def my_function1(arg1, arg2=None)
...
:param func: decorated function or Tornado co-routine
:param name: optional name to use as the Span.operation_name.
If not provided, func.__name__ will be used.
:param on_start: an optional callback to be executed once the child span
is started, but before the decorated function is called. It can be
used to set any additional tags on the span, perhaps by inspecting
the decorated function arguments. The callback must have a signature
`(span, *args, *kwargs)`, where the last two collections are the
arguments passed to the actual decorated function.
.. code-block:: python
def extract_call_site_tag(span, *args, *kwargs)
if 'call_site_tag' in kwargs:
span.set_tag('call_site_tag', kwargs['call_site_tag'])
@traced_function(on_start=extract_call_site_tag)
@tornado.get.coroutine
def my_function(arg1, arg2=None, call_site_tag=None)
...
:param require_active_trace: controls what to do when there is no active
trace. If require_active_trace=True, then no span is created.
If require_active_trace=False, a new trace is started.
:return: returns a tracing decorator
|
train
|
https://github.com/uber-common/opentracing-python-instrumentation/blob/57b29fb9f647e073cde8c75155f4708cb5661d20/opentracing_instrumentation/local_span.py#L64-L153
| null |
# Copyright (c) 2015 Uber Technologies, Inc.
#
# Permission is hereby granted, free of charge, to any person obtaining a copy
# of this software and associated documentation files (the "Software"), to deal
# in the Software without restriction, including without limitation the rights
# to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
# copies of the Software, and to permit persons to whom the Software is
# furnished to do so, subject to the following conditions:
#
# The above copyright notice and this permission notice shall be included in
# all copies or substantial portions of the Software.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
# AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
# THE SOFTWARE.
from __future__ import absolute_import
from builtins import str
import functools
import contextlib2
import tornado.concurrent
from . import get_current_span, span_in_stack_context, utils
def func_span(func, tags=None, require_active_trace=False):
"""
Creates a new local span for execution of the given `func`.
The returned span is best used as a context manager, e.g.
.. code-block:: python
with func_span('my_function'):
return my_function(...)
At this time the func should be a string name. In the future this code
can be enhanced to accept a real function and derive its qualified name.
:param func: name of the function or method
:param tags: optional tags to add to the child span
:param require_active_trace: controls what to do when there is no active
trace. If require_active_trace=True, then no span is created.
If require_active_trace=False, a new trace is started.
:return: new child span, or a dummy context manager if there is no
active/current parent span
"""
current_span = get_current_span()
if current_span is None and require_active_trace:
@contextlib2.contextmanager
def empty_ctx_mgr():
yield None
return empty_ctx_mgr()
# TODO convert func to a proper name: module:class.func
operation_name = str(func)
return utils.start_child_span(
operation_name=operation_name, parent=current_span, tags=tags)
|
uber-common/opentracing-python-instrumentation
|
opentracing_instrumentation/utils.py
|
start_child_span
|
python
|
def start_child_span(operation_name, tracer=None, parent=None, tags=None):
tracer = tracer or opentracing.tracer
return tracer.start_span(
operation_name=operation_name,
child_of=parent.context if parent else None,
tags=tags
)
|
Start a new span as a child of parent_span. If parent_span is None,
start a new root span.
:param operation_name: operation name
:param tracer: Tracer or None (defaults to opentracing.tracer)
:param parent: parent Span or None
:param tags: optional tags
:return: new span
|
train
|
https://github.com/uber-common/opentracing-python-instrumentation/blob/57b29fb9f647e073cde8c75155f4708cb5661d20/opentracing_instrumentation/utils.py#L25-L41
| null |
# Copyright (c) 2015 Uber Technologies, Inc.
#
# Permission is hereby granted, free of charge, to any person obtaining a copy
# of this software and associated documentation files (the "Software"), to deal
# in the Software without restriction, including without limitation the rights
# to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
# copies of the Software, and to permit persons to whom the Software is
# furnished to do so, subject to the following conditions:
#
# The above copyright notice and this permission notice shall be included in
# all copies or substantial portions of the Software.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
# AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
# THE SOFTWARE.
from __future__ import absolute_import
import opentracing
|
uber-common/opentracing-python-instrumentation
|
opentracing_instrumentation/http_server.py
|
before_request
|
python
|
def before_request(request, tracer=None):
if tracer is None: # pragma: no cover
tracer = opentracing.tracer
# we need to prepare tags upfront, mainly because RPC_SERVER tag must be
# set when starting the span, to support Zipkin's one-span-per-RPC model
tags_dict = {
tags.SPAN_KIND: tags.SPAN_KIND_RPC_SERVER,
tags.HTTP_URL: request.full_url,
}
remote_ip = request.remote_ip
if remote_ip:
tags_dict[tags.PEER_HOST_IPV4] = remote_ip
caller_name = request.caller_name
if caller_name:
tags_dict[tags.PEER_SERVICE] = caller_name
remote_port = request.remote_port
if remote_port:
tags_dict[tags.PEER_PORT] = remote_port
operation = request.operation
try:
carrier = {}
for key, value in six.iteritems(request.headers):
carrier[key] = value
parent_ctx = tracer.extract(
format=Format.HTTP_HEADERS, carrier=carrier
)
except Exception as e:
logging.exception('trace extract failed: %s' % e)
parent_ctx = None
span = tracer.start_span(
operation_name=operation,
child_of=parent_ctx,
tags=tags_dict)
return span
|
Attempts to extract a tracing span from incoming request.
If no tracing context is passed in the headers, or the data
cannot be parsed, a new root span is started.
:param request: HTTP request with `.headers` property exposed
that satisfies a regular dictionary interface
:param tracer: optional tracer instance to use. If not specified
the global opentracing.tracer will be used.
:return: returns a new, already started span.
|
train
|
https://github.com/uber-common/opentracing-python-instrumentation/blob/57b29fb9f647e073cde8c75155f4708cb5661d20/opentracing_instrumentation/http_server.py#L35-L86
| null |
# Copyright (c) 2015 Uber Technologies, Inc.
#
# Permission is hereby granted, free of charge, to any person obtaining a copy
# of this software and associated documentation files (the "Software"), to deal
# in the Software without restriction, including without limitation the rights
# to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
# copies of the Software, and to permit persons to whom the Software is
# furnished to do so, subject to the following conditions:
#
# The above copyright notice and this permission notice shall be included in
# all copies or substantial portions of the Software.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
# AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
# THE SOFTWARE.
from __future__ import absolute_import
from future import standard_library
standard_library.install_aliases()
from builtins import object
import logging
import urllib.parse
import opentracing
import six
from opentracing import Format
from opentracing.ext import tags
from opentracing_instrumentation import config
class AbstractRequestWrapper(object):
"""
Exposes several properties used by the tracing methods.
"""
@property
def caller_name(self):
for header in config.CONFIG.caller_name_headers:
caller = self.headers.get(header.lower(), None)
if caller is not None:
return caller
return None
@property
def full_url(self):
raise NotImplementedError('full_url')
@property
def headers(self):
raise NotImplementedError('headers')
@property
def method(self):
raise NotImplementedError('method')
@property
def remote_ip(self):
raise NotImplementedError('remote_ip')
@property
def remote_port(self):
return None
@property
def server_port(self):
return None
@property
def operation(self):
return self.method
class TornadoRequestWrapper(AbstractRequestWrapper):
"""
Wraps tornado.httputils.HTTPServerRequest and exposes several properties
used by the tracing methods.
"""
def __init__(self, request):
self.request = request
@property
def full_url(self):
return self.request.full_url()
@property
def headers(self):
return self.request.headers
@property
def method(self):
return self.request.method
@property
def remote_ip(self):
return self.request.remote_ip
class WSGIRequestWrapper(AbstractRequestWrapper):
"""
Wraps WSGI environment and exposes several properties
used by the tracing methods.
"""
def __init__(self, wsgi_environ, headers):
self.wsgi_environ = wsgi_environ
self._headers = headers
@classmethod
def from_wsgi_environ(cls, wsgi_environ):
instance = cls(wsgi_environ=wsgi_environ,
headers=cls._parse_wsgi_headers(wsgi_environ))
return instance
@staticmethod
def _parse_wsgi_headers(wsgi_environ):
"""
HTTP headers are presented in WSGI environment with 'HTTP_' prefix.
This method finds those headers, removes the prefix, converts
underscores to dashes, and converts to lower case.
:param wsgi_environ:
:return: returns a dictionary of headers
"""
prefix = 'HTTP_'
p_len = len(prefix)
# use .items() despite suspected memory pressure bc GC occasionally
# collects wsgi_environ.iteritems() during iteration.
headers = {
key[p_len:].replace('_', '-').lower():
val for (key, val) in wsgi_environ.items()
if key.startswith(prefix)}
return headers
@property
def full_url(self):
"""
Taken from
http://legacy.python.org/dev/peps/pep-3333/#url-reconstruction
:return: Reconstructed URL from WSGI environment.
"""
environ = self.wsgi_environ
url = environ['wsgi.url_scheme'] + '://'
if environ.get('HTTP_HOST'):
url += environ['HTTP_HOST']
else:
url += environ['SERVER_NAME']
if environ['wsgi.url_scheme'] == 'https':
if environ['SERVER_PORT'] != '443':
url += ':' + environ['SERVER_PORT']
else:
if environ['SERVER_PORT'] != '80':
url += ':' + environ['SERVER_PORT']
url += urllib.parse.quote(environ.get('SCRIPT_NAME', ''))
url += urllib.parse.quote(environ.get('PATH_INFO', ''))
if environ.get('QUERY_STRING'):
url += '?' + environ['QUERY_STRING']
return url
@property
def headers(self):
return self._headers
@property
def method(self):
return self.wsgi_environ.get('REQUEST_METHOD')
@property
def remote_ip(self):
return self.wsgi_environ.get('REMOTE_ADDR', None)
@property
def remote_port(self):
return self.wsgi_environ.get('REMOTE_PORT', None)
@property
def server_port(self):
return self.wsgi_environ.get('SERVER_PORT', None)
|
uber-common/opentracing-python-instrumentation
|
opentracing_instrumentation/http_server.py
|
WSGIRequestWrapper._parse_wsgi_headers
|
python
|
def _parse_wsgi_headers(wsgi_environ):
prefix = 'HTTP_'
p_len = len(prefix)
# use .items() despite suspected memory pressure bc GC occasionally
# collects wsgi_environ.iteritems() during iteration.
headers = {
key[p_len:].replace('_', '-').lower():
val for (key, val) in wsgi_environ.items()
if key.startswith(prefix)}
return headers
|
HTTP headers are presented in WSGI environment with 'HTTP_' prefix.
This method finds those headers, removes the prefix, converts
underscores to dashes, and converts to lower case.
:param wsgi_environ:
:return: returns a dictionary of headers
|
train
|
https://github.com/uber-common/opentracing-python-instrumentation/blob/57b29fb9f647e073cde8c75155f4708cb5661d20/opentracing_instrumentation/http_server.py#L174-L191
| null |
class WSGIRequestWrapper(AbstractRequestWrapper):
"""
Wraps WSGI environment and exposes several properties
used by the tracing methods.
"""
def __init__(self, wsgi_environ, headers):
self.wsgi_environ = wsgi_environ
self._headers = headers
@classmethod
def from_wsgi_environ(cls, wsgi_environ):
instance = cls(wsgi_environ=wsgi_environ,
headers=cls._parse_wsgi_headers(wsgi_environ))
return instance
@staticmethod
@property
def full_url(self):
"""
Taken from
http://legacy.python.org/dev/peps/pep-3333/#url-reconstruction
:return: Reconstructed URL from WSGI environment.
"""
environ = self.wsgi_environ
url = environ['wsgi.url_scheme'] + '://'
if environ.get('HTTP_HOST'):
url += environ['HTTP_HOST']
else:
url += environ['SERVER_NAME']
if environ['wsgi.url_scheme'] == 'https':
if environ['SERVER_PORT'] != '443':
url += ':' + environ['SERVER_PORT']
else:
if environ['SERVER_PORT'] != '80':
url += ':' + environ['SERVER_PORT']
url += urllib.parse.quote(environ.get('SCRIPT_NAME', ''))
url += urllib.parse.quote(environ.get('PATH_INFO', ''))
if environ.get('QUERY_STRING'):
url += '?' + environ['QUERY_STRING']
return url
@property
def headers(self):
return self._headers
@property
def method(self):
return self.wsgi_environ.get('REQUEST_METHOD')
@property
def remote_ip(self):
return self.wsgi_environ.get('REMOTE_ADDR', None)
@property
def remote_port(self):
return self.wsgi_environ.get('REMOTE_PORT', None)
@property
def server_port(self):
return self.wsgi_environ.get('SERVER_PORT', None)
|
uber-common/opentracing-python-instrumentation
|
opentracing_instrumentation/http_server.py
|
WSGIRequestWrapper.full_url
|
python
|
def full_url(self):
environ = self.wsgi_environ
url = environ['wsgi.url_scheme'] + '://'
if environ.get('HTTP_HOST'):
url += environ['HTTP_HOST']
else:
url += environ['SERVER_NAME']
if environ['wsgi.url_scheme'] == 'https':
if environ['SERVER_PORT'] != '443':
url += ':' + environ['SERVER_PORT']
else:
if environ['SERVER_PORT'] != '80':
url += ':' + environ['SERVER_PORT']
url += urllib.parse.quote(environ.get('SCRIPT_NAME', ''))
url += urllib.parse.quote(environ.get('PATH_INFO', ''))
if environ.get('QUERY_STRING'):
url += '?' + environ['QUERY_STRING']
return url
|
Taken from
http://legacy.python.org/dev/peps/pep-3333/#url-reconstruction
:return: Reconstructed URL from WSGI environment.
|
train
|
https://github.com/uber-common/opentracing-python-instrumentation/blob/57b29fb9f647e073cde8c75155f4708cb5661d20/opentracing_instrumentation/http_server.py#L194-L220
| null |
class WSGIRequestWrapper(AbstractRequestWrapper):
"""
Wraps WSGI environment and exposes several properties
used by the tracing methods.
"""
def __init__(self, wsgi_environ, headers):
self.wsgi_environ = wsgi_environ
self._headers = headers
@classmethod
def from_wsgi_environ(cls, wsgi_environ):
instance = cls(wsgi_environ=wsgi_environ,
headers=cls._parse_wsgi_headers(wsgi_environ))
return instance
@staticmethod
def _parse_wsgi_headers(wsgi_environ):
"""
HTTP headers are presented in WSGI environment with 'HTTP_' prefix.
This method finds those headers, removes the prefix, converts
underscores to dashes, and converts to lower case.
:param wsgi_environ:
:return: returns a dictionary of headers
"""
prefix = 'HTTP_'
p_len = len(prefix)
# use .items() despite suspected memory pressure bc GC occasionally
# collects wsgi_environ.iteritems() during iteration.
headers = {
key[p_len:].replace('_', '-').lower():
val for (key, val) in wsgi_environ.items()
if key.startswith(prefix)}
return headers
@property
@property
def headers(self):
return self._headers
@property
def method(self):
return self.wsgi_environ.get('REQUEST_METHOD')
@property
def remote_ip(self):
return self.wsgi_environ.get('REMOTE_ADDR', None)
@property
def remote_port(self):
return self.wsgi_environ.get('REMOTE_PORT', None)
@property
def server_port(self):
return self.wsgi_environ.get('SERVER_PORT', None)
|
uber-common/opentracing-python-instrumentation
|
opentracing_instrumentation/interceptors.py
|
ClientInterceptors.append
|
python
|
def append(cls, interceptor):
cls._check(interceptor)
cls._interceptors.append(interceptor)
|
Add interceptor to the end of the internal list.
Note: Raises ``ValueError`` if interceptor
does not extend ``OpenTracingInterceptor``
|
train
|
https://github.com/uber-common/opentracing-python-instrumentation/blob/57b29fb9f647e073cde8c75155f4708cb5661d20/opentracing_instrumentation/interceptors.py#L80-L88
| null |
class ClientInterceptors(object):
"""
Client interceptors executed between span creation and injection.
Subclassed implementations of ``OpenTracingInterceptor`` can be added
and are executed in order in which they are added, after child
span for current request is created, but before the span baggage
contents are injected into the outbound request.
A code sample of expected usage:
from opentracing_instrumentation.interceptors import ClientInterceptors
from my_project.interceptors import CustomOpenTracingInterceptor
my_interceptor = CustomOpenTracingInterceptor()
ClientInterceptors.append(my_interceptor)
"""
_interceptors = []
@classmethod
@classmethod
def insert(cls, index, interceptor):
"""
Add interceptor to the given index in the internal list.
Note: Raises ``ValueError`` if interceptor
does not extend ``OpenTracingInterceptor``
"""
cls._check(interceptor)
cls._interceptors.insert(index, interceptor)
@classmethod
def _check(cls, interceptor):
if not isinstance(interceptor, OpenTracingInterceptor):
raise ValueError('ClientInterceptors only accepts instances '
'of OpenTracingInterceptor')
@classmethod
def get_interceptors(cls):
"""Return a list of interceptors."""
return cls._interceptors
@classmethod
def clear(cls):
"""Clear the internal list of interceptors."""
del cls._interceptors[:]
|
uber-common/opentracing-python-instrumentation
|
opentracing_instrumentation/interceptors.py
|
ClientInterceptors.insert
|
python
|
def insert(cls, index, interceptor):
cls._check(interceptor)
cls._interceptors.insert(index, interceptor)
|
Add interceptor to the given index in the internal list.
Note: Raises ``ValueError`` if interceptor
does not extend ``OpenTracingInterceptor``
|
train
|
https://github.com/uber-common/opentracing-python-instrumentation/blob/57b29fb9f647e073cde8c75155f4708cb5661d20/opentracing_instrumentation/interceptors.py#L91-L99
| null |
class ClientInterceptors(object):
"""
Client interceptors executed between span creation and injection.
Subclassed implementations of ``OpenTracingInterceptor`` can be added
and are executed in order in which they are added, after child
span for current request is created, but before the span baggage
contents are injected into the outbound request.
A code sample of expected usage:
from opentracing_instrumentation.interceptors import ClientInterceptors
from my_project.interceptors import CustomOpenTracingInterceptor
my_interceptor = CustomOpenTracingInterceptor()
ClientInterceptors.append(my_interceptor)
"""
_interceptors = []
@classmethod
def append(cls, interceptor):
"""
Add interceptor to the end of the internal list.
Note: Raises ``ValueError`` if interceptor
does not extend ``OpenTracingInterceptor``
"""
cls._check(interceptor)
cls._interceptors.append(interceptor)
@classmethod
@classmethod
def _check(cls, interceptor):
if not isinstance(interceptor, OpenTracingInterceptor):
raise ValueError('ClientInterceptors only accepts instances '
'of OpenTracingInterceptor')
@classmethod
def get_interceptors(cls):
"""Return a list of interceptors."""
return cls._interceptors
@classmethod
def clear(cls):
"""Clear the internal list of interceptors."""
del cls._interceptors[:]
|
uber-common/opentracing-python-instrumentation
|
opentracing_instrumentation/client_hooks/_singleton.py
|
singleton
|
python
|
def singleton(func):
@functools.wraps(func)
def wrapper(*args, **kwargs):
if wrapper.__call_state__ == CALLED:
return
ret = func(*args, **kwargs)
wrapper.__call_state__ = CALLED
return ret
def reset():
wrapper.__call_state__ = NOT_CALLED
wrapper.reset = reset
reset()
# save original func to be able to patch and restore multiple times from
# unit tests
wrapper.__original_func = func
return wrapper
|
This decorator allows you to make sure that a function is called once and
only once. Note that recursive functions will still work.
WARNING: Not thread-safe!!!
|
train
|
https://github.com/uber-common/opentracing-python-instrumentation/blob/57b29fb9f647e073cde8c75155f4708cb5661d20/opentracing_instrumentation/client_hooks/_singleton.py#L30-L55
|
[
"def reset():\n wrapper.__call_state__ = NOT_CALLED\n"
] |
# Copyright (c) 2015,2018 Uber Technologies, Inc.
#
# Permission is hereby granted, free of charge, to any person obtaining a copy
# of this software and associated documentation files (the "Software"), to deal
# in the Software without restriction, including without limitation the rights
# to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
# copies of the Software, and to permit persons to whom the Software is
# furnished to do so, subject to the following conditions:
#
# The above copyright notice and this permission notice shall be included in
# all copies or substantial portions of the Software.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
# AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
# THE SOFTWARE.
from __future__ import absolute_import
import functools
NOT_CALLED = 1
CALLED = 2
|
uber-common/opentracing-python-instrumentation
|
opentracing_instrumentation/http_client.py
|
before_http_request
|
python
|
def before_http_request(request, current_span_extractor):
span = utils.start_child_span(
operation_name=request.operation,
parent=current_span_extractor()
)
span.set_tag(tags.SPAN_KIND, tags.SPAN_KIND_RPC_CLIENT)
span.set_tag(tags.HTTP_URL, request.full_url)
service_name = request.service_name
host, port = request.host_port
if service_name:
span.set_tag(tags.PEER_SERVICE, service_name)
if host:
span.set_tag(tags.PEER_HOST_IPV4, host)
if port:
span.set_tag(tags.PEER_PORT, port)
# fire interceptors
for interceptor in ClientInterceptors.get_interceptors():
interceptor.process(request=request, span=span)
try:
carrier = {}
opentracing.tracer.inject(span_context=span.context,
format=Format.HTTP_HEADERS,
carrier=carrier)
for key, value in six.iteritems(carrier):
request.add_header(key, value)
except opentracing.UnsupportedFormatException:
pass
return span
|
A hook to be executed before HTTP request is executed.
It returns a Span object that can be used as a context manager around
the actual HTTP call implementation, or in case of async callback,
it needs its `finish()` method to be called explicitly.
:param request: request must match API defined by AbstractRequestWrapper
:param current_span_extractor: function that extracts current span
from some context
:return: returns child tracing span encapsulating this request
|
train
|
https://github.com/uber-common/opentracing-python-instrumentation/blob/57b29fb9f647e073cde8c75155f4708cb5661d20/opentracing_instrumentation/http_client.py#L35-L79
|
[
"def get_current_span():\n \"\"\"\n Access current request context and extract current Span from it.\n :return:\n Return current span associated with the current request context.\n If no request context is present in thread local, or the context\n has no span, return None.\n \"\"\"\n # Check against the old, ScopeManager-less implementation,\n # for backwards compatibility.\n context = RequestContextManager.current_context()\n if context is not None:\n return context.span\n\n active = opentracing.tracer.scope_manager.active\n return active.span if active else None\n",
"def start_child_span(operation_name, tracer=None, parent=None, tags=None):\n \"\"\"\n Start a new span as a child of parent_span. If parent_span is None,\n start a new root span.\n\n :param operation_name: operation name\n :param tracer: Tracer or None (defaults to opentracing.tracer)\n :param parent: parent Span or None\n :param tags: optional tags\n :return: new span\n \"\"\"\n tracer = tracer or opentracing.tracer\n return tracer.start_span(\n operation_name=operation_name,\n child_of=parent.context if parent else None,\n tags=tags\n )\n",
"def get_interceptors(cls):\n \"\"\"Return a list of interceptors.\"\"\"\n return cls._interceptors\n",
"def add_header(self, key, value):\n self.request.headers[key] = value\n"
] |
# Copyright (c) 2015 Uber Technologies, Inc.
#
# Permission is hereby granted, free of charge, to any person obtaining a copy
# of this software and associated documentation files (the "Software"), to deal
# in the Software without restriction, including without limitation the rights
# to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
# copies of the Software, and to permit persons to whom the Software is
# furnished to do so, subject to the following conditions:
#
# The above copyright notice and this permission notice shall be included in
# all copies or substantial portions of the Software.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
# AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
# THE SOFTWARE.
from __future__ import absolute_import
from builtins import object
import re
import opentracing
import six
from opentracing import Format
from opentracing.ext import tags
from opentracing_instrumentation.config import CONFIG
from opentracing_instrumentation.interceptors import ClientInterceptors
from opentracing_instrumentation import utils
class AbstractRequestWrapper(object):
def add_header(self, key, value):
pass
@property
def _headers(self):
return {}
@property
def host_port(self):
return None, None
@property
def service_name(self):
for header in CONFIG.callee_name_headers:
value = self._headers.get(header, None)
if value is not None:
return value
return None
@property
def operation(self):
for header in CONFIG.callee_endpoint_headers:
value = self._headers.get(header, None)
if value is not None:
return '%s:%s' % (self.method, value)
return self.method
@property
def method(self):
raise NotImplementedError
@property
def full_url(self):
raise NotImplementedError
HOST_PORT_RE = re.compile(r'^(.*):(\d+)$')
def split_host_and_port(host_string, scheme='http'):
is_secure = True if scheme == 'https' else False
m = HOST_PORT_RE.match(host_string)
if m:
host, port = m.groups()
return host, int(port)
elif is_secure is None:
return host_string, None
elif is_secure:
return host_string, 443
else:
return host_string, 80
|
jreese/aiosqlite
|
aiosqlite/core.py
|
connect
|
python
|
def connect(
database: Union[str, Path], *, loop: asyncio.AbstractEventLoop = None, **kwargs: Any
) -> Connection:
if loop is None:
loop = asyncio.get_event_loop()
def connector() -> sqlite3.Connection:
if isinstance(database, str):
loc = database
elif isinstance(database, bytes):
loc = database.decode("utf-8")
else:
loc = str(database)
return sqlite3.connect(loc, **kwargs)
return Connection(connector, loop)
|
Create and return a connection proxy to the sqlite database.
|
train
|
https://github.com/jreese/aiosqlite/blob/3f548b568b8db9a57022b6e2c9627f5cdefb983f/aiosqlite/core.py#L287-L304
| null |
# Copyright 2018 John Reese
# Licensed under the MIT license
"""
Core implementation of aiosqlite proxies
"""
import asyncio
import logging
import sqlite3
from functools import partial
from pathlib import Path
from queue import Queue, Empty
from threading import Thread
from typing import Any, Callable, Generator, Iterable, Optional, Tuple, Type, Union
from .context import contextmanager
__all__ = ["connect", "Connection", "Cursor"]
LOG = logging.getLogger("aiosqlite")
class Cursor:
def __init__(self, conn: "Connection", cursor: sqlite3.Cursor) -> None:
self._conn = conn
self._cursor = cursor
def __aiter__(self) -> "Cursor":
"""The cursor proxy is also an async iterator."""
return self
async def __anext__(self) -> sqlite3.Row:
"""Use `cursor.fetchone()` to provide an async iterable."""
row = await self.fetchone()
if row is None:
raise StopAsyncIteration
return row
async def _execute(self, fn, *args, **kwargs):
"""Execute the given function on the shared connection's thread."""
return await self._conn._execute(fn, *args, **kwargs)
async def execute(self, sql: str, parameters: Iterable[Any] = None) -> None:
"""Execute the given query."""
if parameters is None:
parameters = []
await self._execute(self._cursor.execute, sql, parameters)
async def executemany(self, sql: str, parameters: Iterable[Iterable[Any]]) -> None:
"""Execute the given multiquery."""
await self._execute(self._cursor.executemany, sql, parameters)
async def executescript(self, sql_script: str) -> None:
"""Execute a user script."""
await self._execute(self._cursor.executescript, sql_script)
async def fetchone(self) -> Optional[sqlite3.Row]:
"""Fetch a single row."""
return await self._execute(self._cursor.fetchone)
async def fetchmany(self, size: int = None) -> Iterable[sqlite3.Row]:
"""Fetch up to `cursor.arraysize` number of rows."""
args = () # type: Tuple[int, ...]
if size is not None:
args = (size,)
return await self._execute(self._cursor.fetchmany, *args)
async def fetchall(self) -> Iterable[sqlite3.Row]:
"""Fetch all remaining rows."""
return await self._execute(self._cursor.fetchall)
async def close(self) -> None:
"""Close the cursor."""
await self._execute(self._cursor.close)
@property
def rowcount(self) -> int:
return self._cursor.rowcount
@property
def lastrowid(self) -> int:
return self._cursor.lastrowid
@property
def arraysize(self) -> int:
return self._cursor.arraysize
@arraysize.setter
def arraysize(self, value: int) -> None:
self._cursor.arraysize = value
@property
def description(self) -> Tuple[Tuple]:
return self._cursor.description
@property
def connection(self) -> sqlite3.Connection:
return self._cursor.connection
async def __aenter__(self):
return self
async def __aexit__(self, exc_type, exc_val, exc_tb):
await self.close()
class Connection(Thread):
def __init__(
self,
connector: Callable[[], sqlite3.Connection],
loop: asyncio.AbstractEventLoop,
) -> None:
super().__init__()
self._running = True
self._connection = None # type: Optional[sqlite3.Connection]
self._connector = connector
self._loop = loop
self._tx = Queue() # type: Queue
@property
def _conn(self) -> sqlite3.Connection:
if self._connection is None:
raise ValueError("no active connection")
return self._connection
def _execute_insert(
self, sql: str, parameters: Iterable[Any]
) -> Optional[sqlite3.Row]:
cursor = self._conn.execute(sql, parameters)
cursor.execute("SELECT last_insert_rowid()")
return cursor.fetchone()
def _execute_fetchall(
self, sql: str, parameters: Iterable[Any]
) -> Iterable[sqlite3.Row]:
cursor = self._conn.execute(sql, parameters)
return cursor.fetchall()
def run(self) -> None:
"""Execute function calls on a separate thread."""
while self._running:
try:
future, function = self._tx.get(timeout=0.1)
except Empty:
continue
try:
LOG.debug("executing %s", function)
result = function()
LOG.debug("returning %s", result)
self._loop.call_soon_threadsafe(future.set_result, result)
except BaseException as e:
LOG.exception("returning exception %s", e)
self._loop.call_soon_threadsafe(future.set_exception, e)
async def _execute(self, fn, *args, **kwargs):
"""Queue a function with the given arguments for execution."""
function = partial(fn, *args, **kwargs)
future = self._loop.create_future()
self._tx.put_nowait((future, function))
return await future
async def _connect(self) -> "Connection":
"""Connect to the actual sqlite database."""
if self._connection is None:
self._connection = await self._execute(self._connector)
return self
def __await__(self) -> Generator[Any, None, "Connection"]:
self.start()
return self._connect().__await__()
async def __aenter__(self) -> "Connection":
return await self
async def __aexit__(self, exc_type, exc_val, exc_tb) -> None:
await self.close()
@contextmanager
async def cursor(self) -> Cursor:
"""Create an aiosqlite cursor wrapping a sqlite3 cursor object."""
return Cursor(self, await self._execute(self._conn.cursor))
async def commit(self) -> None:
"""Commit the current transaction."""
await self._execute(self._conn.commit)
async def rollback(self) -> None:
"""Roll back the current transaction."""
await self._execute(self._conn.rollback)
async def close(self) -> None:
"""Complete queued queries/cursors and close the connection."""
await self._execute(self._conn.close)
self._running = False
self._connection = None
@contextmanager
async def execute(self, sql: str, parameters: Iterable[Any] = None) -> Cursor:
"""Helper to create a cursor and execute the given query."""
if parameters is None:
parameters = []
cursor = await self._execute(self._conn.execute, sql, parameters)
return Cursor(self, cursor)
@contextmanager
async def execute_insert(
self, sql: str, parameters: Iterable[Any] = None
) -> Optional[sqlite3.Row]:
"""Helper to insert and get the last_insert_rowid."""
if parameters is None:
parameters = []
return await self._execute(self._execute_insert, sql, parameters)
@contextmanager
async def execute_fetchall(
self, sql: str, parameters: Iterable[Any] = None
) -> Iterable[sqlite3.Row]:
"""Helper to execute a query and return all the data."""
if parameters is None:
parameters = []
return await self._execute(self._execute_fetchall, sql, parameters)
@contextmanager
async def executemany(
self, sql: str, parameters: Iterable[Iterable[Any]]
) -> Cursor:
"""Helper to create a cursor and execute the given multiquery."""
cursor = await self._execute(self._conn.executemany, sql, parameters)
return Cursor(self, cursor)
@contextmanager
async def executescript(self, sql_script: str) -> Cursor:
"""Helper to create a cursor and execute a user script."""
cursor = await self._execute(self._conn.executescript, sql_script)
return Cursor(self, cursor)
async def interrupt(self) -> None:
"""Interrupt pending queries."""
return self._conn.interrupt()
@property
def in_transaction(self) -> bool:
return self._conn.in_transaction
@property
def isolation_level(self) -> str:
return self._conn.isolation_level
@isolation_level.setter
def isolation_level(self, value: str) -> None:
self._conn.isolation_level = value
@property
def row_factory(self) -> "Optional[Type]": # py3.5.2 compat (#24)
return self._conn.row_factory
@row_factory.setter
def row_factory(self, factory: "Optional[Type]") -> None: # py3.5.2 compat (#24)
self._conn.row_factory = factory
@property
def text_factory(self) -> Type:
return self._conn.text_factory
@text_factory.setter
def text_factory(self, factory: Type) -> None:
self._conn.text_factory = factory
@property
def total_changes(self) -> int:
return self._conn.total_changes
async def enable_load_extension(self, value: bool) -> None:
await self._execute(self._conn.enable_load_extension, value) # type: ignore
async def load_extension(self, path: str):
await self._execute(self._conn.load_extension, path) # type: ignore
|
jreese/aiosqlite
|
aiosqlite/core.py
|
Cursor._execute
|
python
|
async def _execute(self, fn, *args, **kwargs):
return await self._conn._execute(fn, *args, **kwargs)
|
Execute the given function on the shared connection's thread.
|
train
|
https://github.com/jreese/aiosqlite/blob/3f548b568b8db9a57022b6e2c9627f5cdefb983f/aiosqlite/core.py#L42-L44
| null |
class Cursor:
def __init__(self, conn: "Connection", cursor: sqlite3.Cursor) -> None:
self._conn = conn
self._cursor = cursor
def __aiter__(self) -> "Cursor":
"""The cursor proxy is also an async iterator."""
return self
async def __anext__(self) -> sqlite3.Row:
"""Use `cursor.fetchone()` to provide an async iterable."""
row = await self.fetchone()
if row is None:
raise StopAsyncIteration
return row
async def execute(self, sql: str, parameters: Iterable[Any] = None) -> None:
"""Execute the given query."""
if parameters is None:
parameters = []
await self._execute(self._cursor.execute, sql, parameters)
async def executemany(self, sql: str, parameters: Iterable[Iterable[Any]]) -> None:
"""Execute the given multiquery."""
await self._execute(self._cursor.executemany, sql, parameters)
async def executescript(self, sql_script: str) -> None:
"""Execute a user script."""
await self._execute(self._cursor.executescript, sql_script)
async def fetchone(self) -> Optional[sqlite3.Row]:
"""Fetch a single row."""
return await self._execute(self._cursor.fetchone)
async def fetchmany(self, size: int = None) -> Iterable[sqlite3.Row]:
"""Fetch up to `cursor.arraysize` number of rows."""
args = () # type: Tuple[int, ...]
if size is not None:
args = (size,)
return await self._execute(self._cursor.fetchmany, *args)
async def fetchall(self) -> Iterable[sqlite3.Row]:
"""Fetch all remaining rows."""
return await self._execute(self._cursor.fetchall)
async def close(self) -> None:
"""Close the cursor."""
await self._execute(self._cursor.close)
@property
def rowcount(self) -> int:
return self._cursor.rowcount
@property
def lastrowid(self) -> int:
return self._cursor.lastrowid
@property
def arraysize(self) -> int:
return self._cursor.arraysize
@arraysize.setter
def arraysize(self, value: int) -> None:
self._cursor.arraysize = value
@property
def description(self) -> Tuple[Tuple]:
return self._cursor.description
@property
def connection(self) -> sqlite3.Connection:
return self._cursor.connection
async def __aenter__(self):
return self
async def __aexit__(self, exc_type, exc_val, exc_tb):
await self.close()
|
jreese/aiosqlite
|
aiosqlite/core.py
|
Cursor.execute
|
python
|
async def execute(self, sql: str, parameters: Iterable[Any] = None) -> None:
if parameters is None:
parameters = []
await self._execute(self._cursor.execute, sql, parameters)
|
Execute the given query.
|
train
|
https://github.com/jreese/aiosqlite/blob/3f548b568b8db9a57022b6e2c9627f5cdefb983f/aiosqlite/core.py#L46-L50
|
[
"async def _execute(self, fn, *args, **kwargs):\n \"\"\"Execute the given function on the shared connection's thread.\"\"\"\n return await self._conn._execute(fn, *args, **kwargs)\n"
] |
class Cursor:
def __init__(self, conn: "Connection", cursor: sqlite3.Cursor) -> None:
self._conn = conn
self._cursor = cursor
def __aiter__(self) -> "Cursor":
"""The cursor proxy is also an async iterator."""
return self
async def __anext__(self) -> sqlite3.Row:
"""Use `cursor.fetchone()` to provide an async iterable."""
row = await self.fetchone()
if row is None:
raise StopAsyncIteration
return row
async def _execute(self, fn, *args, **kwargs):
"""Execute the given function on the shared connection's thread."""
return await self._conn._execute(fn, *args, **kwargs)
async def executemany(self, sql: str, parameters: Iterable[Iterable[Any]]) -> None:
"""Execute the given multiquery."""
await self._execute(self._cursor.executemany, sql, parameters)
async def executescript(self, sql_script: str) -> None:
"""Execute a user script."""
await self._execute(self._cursor.executescript, sql_script)
async def fetchone(self) -> Optional[sqlite3.Row]:
"""Fetch a single row."""
return await self._execute(self._cursor.fetchone)
async def fetchmany(self, size: int = None) -> Iterable[sqlite3.Row]:
"""Fetch up to `cursor.arraysize` number of rows."""
args = () # type: Tuple[int, ...]
if size is not None:
args = (size,)
return await self._execute(self._cursor.fetchmany, *args)
async def fetchall(self) -> Iterable[sqlite3.Row]:
"""Fetch all remaining rows."""
return await self._execute(self._cursor.fetchall)
async def close(self) -> None:
"""Close the cursor."""
await self._execute(self._cursor.close)
@property
def rowcount(self) -> int:
return self._cursor.rowcount
@property
def lastrowid(self) -> int:
return self._cursor.lastrowid
@property
def arraysize(self) -> int:
return self._cursor.arraysize
@arraysize.setter
def arraysize(self, value: int) -> None:
self._cursor.arraysize = value
@property
def description(self) -> Tuple[Tuple]:
return self._cursor.description
@property
def connection(self) -> sqlite3.Connection:
return self._cursor.connection
async def __aenter__(self):
return self
async def __aexit__(self, exc_type, exc_val, exc_tb):
await self.close()
|
jreese/aiosqlite
|
aiosqlite/core.py
|
Cursor.executemany
|
python
|
async def executemany(self, sql: str, parameters: Iterable[Iterable[Any]]) -> None:
await self._execute(self._cursor.executemany, sql, parameters)
|
Execute the given multiquery.
|
train
|
https://github.com/jreese/aiosqlite/blob/3f548b568b8db9a57022b6e2c9627f5cdefb983f/aiosqlite/core.py#L52-L54
|
[
"async def _execute(self, fn, *args, **kwargs):\n \"\"\"Execute the given function on the shared connection's thread.\"\"\"\n return await self._conn._execute(fn, *args, **kwargs)\n"
] |
class Cursor:
def __init__(self, conn: "Connection", cursor: sqlite3.Cursor) -> None:
self._conn = conn
self._cursor = cursor
def __aiter__(self) -> "Cursor":
"""The cursor proxy is also an async iterator."""
return self
async def __anext__(self) -> sqlite3.Row:
"""Use `cursor.fetchone()` to provide an async iterable."""
row = await self.fetchone()
if row is None:
raise StopAsyncIteration
return row
async def _execute(self, fn, *args, **kwargs):
"""Execute the given function on the shared connection's thread."""
return await self._conn._execute(fn, *args, **kwargs)
async def execute(self, sql: str, parameters: Iterable[Any] = None) -> None:
"""Execute the given query."""
if parameters is None:
parameters = []
await self._execute(self._cursor.execute, sql, parameters)
async def executescript(self, sql_script: str) -> None:
"""Execute a user script."""
await self._execute(self._cursor.executescript, sql_script)
async def fetchone(self) -> Optional[sqlite3.Row]:
"""Fetch a single row."""
return await self._execute(self._cursor.fetchone)
async def fetchmany(self, size: int = None) -> Iterable[sqlite3.Row]:
"""Fetch up to `cursor.arraysize` number of rows."""
args = () # type: Tuple[int, ...]
if size is not None:
args = (size,)
return await self._execute(self._cursor.fetchmany, *args)
async def fetchall(self) -> Iterable[sqlite3.Row]:
"""Fetch all remaining rows."""
return await self._execute(self._cursor.fetchall)
async def close(self) -> None:
"""Close the cursor."""
await self._execute(self._cursor.close)
@property
def rowcount(self) -> int:
return self._cursor.rowcount
@property
def lastrowid(self) -> int:
return self._cursor.lastrowid
@property
def arraysize(self) -> int:
return self._cursor.arraysize
@arraysize.setter
def arraysize(self, value: int) -> None:
self._cursor.arraysize = value
@property
def description(self) -> Tuple[Tuple]:
return self._cursor.description
@property
def connection(self) -> sqlite3.Connection:
return self._cursor.connection
async def __aenter__(self):
return self
async def __aexit__(self, exc_type, exc_val, exc_tb):
await self.close()
|
jreese/aiosqlite
|
aiosqlite/core.py
|
Cursor.executescript
|
python
|
async def executescript(self, sql_script: str) -> None:
await self._execute(self._cursor.executescript, sql_script)
|
Execute a user script.
|
train
|
https://github.com/jreese/aiosqlite/blob/3f548b568b8db9a57022b6e2c9627f5cdefb983f/aiosqlite/core.py#L56-L58
|
[
"async def _execute(self, fn, *args, **kwargs):\n \"\"\"Execute the given function on the shared connection's thread.\"\"\"\n return await self._conn._execute(fn, *args, **kwargs)\n"
] |
class Cursor:
def __init__(self, conn: "Connection", cursor: sqlite3.Cursor) -> None:
self._conn = conn
self._cursor = cursor
def __aiter__(self) -> "Cursor":
"""The cursor proxy is also an async iterator."""
return self
async def __anext__(self) -> sqlite3.Row:
"""Use `cursor.fetchone()` to provide an async iterable."""
row = await self.fetchone()
if row is None:
raise StopAsyncIteration
return row
async def _execute(self, fn, *args, **kwargs):
"""Execute the given function on the shared connection's thread."""
return await self._conn._execute(fn, *args, **kwargs)
async def execute(self, sql: str, parameters: Iterable[Any] = None) -> None:
"""Execute the given query."""
if parameters is None:
parameters = []
await self._execute(self._cursor.execute, sql, parameters)
async def executemany(self, sql: str, parameters: Iterable[Iterable[Any]]) -> None:
"""Execute the given multiquery."""
await self._execute(self._cursor.executemany, sql, parameters)
async def fetchone(self) -> Optional[sqlite3.Row]:
"""Fetch a single row."""
return await self._execute(self._cursor.fetchone)
async def fetchmany(self, size: int = None) -> Iterable[sqlite3.Row]:
"""Fetch up to `cursor.arraysize` number of rows."""
args = () # type: Tuple[int, ...]
if size is not None:
args = (size,)
return await self._execute(self._cursor.fetchmany, *args)
async def fetchall(self) -> Iterable[sqlite3.Row]:
"""Fetch all remaining rows."""
return await self._execute(self._cursor.fetchall)
async def close(self) -> None:
"""Close the cursor."""
await self._execute(self._cursor.close)
@property
def rowcount(self) -> int:
return self._cursor.rowcount
@property
def lastrowid(self) -> int:
return self._cursor.lastrowid
@property
def arraysize(self) -> int:
return self._cursor.arraysize
@arraysize.setter
def arraysize(self, value: int) -> None:
self._cursor.arraysize = value
@property
def description(self) -> Tuple[Tuple]:
return self._cursor.description
@property
def connection(self) -> sqlite3.Connection:
return self._cursor.connection
async def __aenter__(self):
return self
async def __aexit__(self, exc_type, exc_val, exc_tb):
await self.close()
|
jreese/aiosqlite
|
aiosqlite/core.py
|
Cursor.fetchone
|
python
|
async def fetchone(self) -> Optional[sqlite3.Row]:
return await self._execute(self._cursor.fetchone)
|
Fetch a single row.
|
train
|
https://github.com/jreese/aiosqlite/blob/3f548b568b8db9a57022b6e2c9627f5cdefb983f/aiosqlite/core.py#L60-L62
|
[
"async def _execute(self, fn, *args, **kwargs):\n \"\"\"Execute the given function on the shared connection's thread.\"\"\"\n return await self._conn._execute(fn, *args, **kwargs)\n"
] |
class Cursor:
def __init__(self, conn: "Connection", cursor: sqlite3.Cursor) -> None:
self._conn = conn
self._cursor = cursor
def __aiter__(self) -> "Cursor":
"""The cursor proxy is also an async iterator."""
return self
async def __anext__(self) -> sqlite3.Row:
"""Use `cursor.fetchone()` to provide an async iterable."""
row = await self.fetchone()
if row is None:
raise StopAsyncIteration
return row
async def _execute(self, fn, *args, **kwargs):
"""Execute the given function on the shared connection's thread."""
return await self._conn._execute(fn, *args, **kwargs)
async def execute(self, sql: str, parameters: Iterable[Any] = None) -> None:
"""Execute the given query."""
if parameters is None:
parameters = []
await self._execute(self._cursor.execute, sql, parameters)
async def executemany(self, sql: str, parameters: Iterable[Iterable[Any]]) -> None:
"""Execute the given multiquery."""
await self._execute(self._cursor.executemany, sql, parameters)
async def executescript(self, sql_script: str) -> None:
"""Execute a user script."""
await self._execute(self._cursor.executescript, sql_script)
async def fetchmany(self, size: int = None) -> Iterable[sqlite3.Row]:
"""Fetch up to `cursor.arraysize` number of rows."""
args = () # type: Tuple[int, ...]
if size is not None:
args = (size,)
return await self._execute(self._cursor.fetchmany, *args)
async def fetchall(self) -> Iterable[sqlite3.Row]:
"""Fetch all remaining rows."""
return await self._execute(self._cursor.fetchall)
async def close(self) -> None:
"""Close the cursor."""
await self._execute(self._cursor.close)
@property
def rowcount(self) -> int:
return self._cursor.rowcount
@property
def lastrowid(self) -> int:
return self._cursor.lastrowid
@property
def arraysize(self) -> int:
return self._cursor.arraysize
@arraysize.setter
def arraysize(self, value: int) -> None:
self._cursor.arraysize = value
@property
def description(self) -> Tuple[Tuple]:
return self._cursor.description
@property
def connection(self) -> sqlite3.Connection:
return self._cursor.connection
async def __aenter__(self):
return self
async def __aexit__(self, exc_type, exc_val, exc_tb):
await self.close()
|
jreese/aiosqlite
|
aiosqlite/core.py
|
Cursor.fetchmany
|
python
|
async def fetchmany(self, size: int = None) -> Iterable[sqlite3.Row]:
args = () # type: Tuple[int, ...]
if size is not None:
args = (size,)
return await self._execute(self._cursor.fetchmany, *args)
|
Fetch up to `cursor.arraysize` number of rows.
|
train
|
https://github.com/jreese/aiosqlite/blob/3f548b568b8db9a57022b6e2c9627f5cdefb983f/aiosqlite/core.py#L64-L69
|
[
"async def _execute(self, fn, *args, **kwargs):\n \"\"\"Execute the given function on the shared connection's thread.\"\"\"\n return await self._conn._execute(fn, *args, **kwargs)\n"
] |
class Cursor:
def __init__(self, conn: "Connection", cursor: sqlite3.Cursor) -> None:
self._conn = conn
self._cursor = cursor
def __aiter__(self) -> "Cursor":
"""The cursor proxy is also an async iterator."""
return self
async def __anext__(self) -> sqlite3.Row:
"""Use `cursor.fetchone()` to provide an async iterable."""
row = await self.fetchone()
if row is None:
raise StopAsyncIteration
return row
async def _execute(self, fn, *args, **kwargs):
"""Execute the given function on the shared connection's thread."""
return await self._conn._execute(fn, *args, **kwargs)
async def execute(self, sql: str, parameters: Iterable[Any] = None) -> None:
"""Execute the given query."""
if parameters is None:
parameters = []
await self._execute(self._cursor.execute, sql, parameters)
async def executemany(self, sql: str, parameters: Iterable[Iterable[Any]]) -> None:
"""Execute the given multiquery."""
await self._execute(self._cursor.executemany, sql, parameters)
async def executescript(self, sql_script: str) -> None:
"""Execute a user script."""
await self._execute(self._cursor.executescript, sql_script)
async def fetchone(self) -> Optional[sqlite3.Row]:
"""Fetch a single row."""
return await self._execute(self._cursor.fetchone)
async def fetchall(self) -> Iterable[sqlite3.Row]:
"""Fetch all remaining rows."""
return await self._execute(self._cursor.fetchall)
async def close(self) -> None:
"""Close the cursor."""
await self._execute(self._cursor.close)
@property
def rowcount(self) -> int:
return self._cursor.rowcount
@property
def lastrowid(self) -> int:
return self._cursor.lastrowid
@property
def arraysize(self) -> int:
return self._cursor.arraysize
@arraysize.setter
def arraysize(self, value: int) -> None:
self._cursor.arraysize = value
@property
def description(self) -> Tuple[Tuple]:
return self._cursor.description
@property
def connection(self) -> sqlite3.Connection:
return self._cursor.connection
async def __aenter__(self):
return self
async def __aexit__(self, exc_type, exc_val, exc_tb):
await self.close()
|
jreese/aiosqlite
|
aiosqlite/core.py
|
Cursor.fetchall
|
python
|
async def fetchall(self) -> Iterable[sqlite3.Row]:
return await self._execute(self._cursor.fetchall)
|
Fetch all remaining rows.
|
train
|
https://github.com/jreese/aiosqlite/blob/3f548b568b8db9a57022b6e2c9627f5cdefb983f/aiosqlite/core.py#L71-L73
|
[
"async def _execute(self, fn, *args, **kwargs):\n \"\"\"Execute the given function on the shared connection's thread.\"\"\"\n return await self._conn._execute(fn, *args, **kwargs)\n"
] |
class Cursor:
def __init__(self, conn: "Connection", cursor: sqlite3.Cursor) -> None:
self._conn = conn
self._cursor = cursor
def __aiter__(self) -> "Cursor":
"""The cursor proxy is also an async iterator."""
return self
async def __anext__(self) -> sqlite3.Row:
"""Use `cursor.fetchone()` to provide an async iterable."""
row = await self.fetchone()
if row is None:
raise StopAsyncIteration
return row
async def _execute(self, fn, *args, **kwargs):
"""Execute the given function on the shared connection's thread."""
return await self._conn._execute(fn, *args, **kwargs)
async def execute(self, sql: str, parameters: Iterable[Any] = None) -> None:
"""Execute the given query."""
if parameters is None:
parameters = []
await self._execute(self._cursor.execute, sql, parameters)
async def executemany(self, sql: str, parameters: Iterable[Iterable[Any]]) -> None:
"""Execute the given multiquery."""
await self._execute(self._cursor.executemany, sql, parameters)
async def executescript(self, sql_script: str) -> None:
"""Execute a user script."""
await self._execute(self._cursor.executescript, sql_script)
async def fetchone(self) -> Optional[sqlite3.Row]:
"""Fetch a single row."""
return await self._execute(self._cursor.fetchone)
async def fetchmany(self, size: int = None) -> Iterable[sqlite3.Row]:
"""Fetch up to `cursor.arraysize` number of rows."""
args = () # type: Tuple[int, ...]
if size is not None:
args = (size,)
return await self._execute(self._cursor.fetchmany, *args)
async def close(self) -> None:
"""Close the cursor."""
await self._execute(self._cursor.close)
@property
def rowcount(self) -> int:
return self._cursor.rowcount
@property
def lastrowid(self) -> int:
return self._cursor.lastrowid
@property
def arraysize(self) -> int:
return self._cursor.arraysize
@arraysize.setter
def arraysize(self, value: int) -> None:
self._cursor.arraysize = value
@property
def description(self) -> Tuple[Tuple]:
return self._cursor.description
@property
def connection(self) -> sqlite3.Connection:
return self._cursor.connection
async def __aenter__(self):
return self
async def __aexit__(self, exc_type, exc_val, exc_tb):
await self.close()
|
jreese/aiosqlite
|
aiosqlite/core.py
|
Connection.run
|
python
|
def run(self) -> None:
while self._running:
try:
future, function = self._tx.get(timeout=0.1)
except Empty:
continue
try:
LOG.debug("executing %s", function)
result = function()
LOG.debug("returning %s", result)
self._loop.call_soon_threadsafe(future.set_result, result)
except BaseException as e:
LOG.exception("returning exception %s", e)
self._loop.call_soon_threadsafe(future.set_exception, e)
|
Execute function calls on a separate thread.
|
train
|
https://github.com/jreese/aiosqlite/blob/3f548b568b8db9a57022b6e2c9627f5cdefb983f/aiosqlite/core.py#L143-L158
| null |
class Connection(Thread):
def __init__(
self,
connector: Callable[[], sqlite3.Connection],
loop: asyncio.AbstractEventLoop,
) -> None:
super().__init__()
self._running = True
self._connection = None # type: Optional[sqlite3.Connection]
self._connector = connector
self._loop = loop
self._tx = Queue() # type: Queue
@property
def _conn(self) -> sqlite3.Connection:
if self._connection is None:
raise ValueError("no active connection")
return self._connection
def _execute_insert(
self, sql: str, parameters: Iterable[Any]
) -> Optional[sqlite3.Row]:
cursor = self._conn.execute(sql, parameters)
cursor.execute("SELECT last_insert_rowid()")
return cursor.fetchone()
def _execute_fetchall(
self, sql: str, parameters: Iterable[Any]
) -> Iterable[sqlite3.Row]:
cursor = self._conn.execute(sql, parameters)
return cursor.fetchall()
async def _execute(self, fn, *args, **kwargs):
"""Queue a function with the given arguments for execution."""
function = partial(fn, *args, **kwargs)
future = self._loop.create_future()
self._tx.put_nowait((future, function))
return await future
async def _connect(self) -> "Connection":
"""Connect to the actual sqlite database."""
if self._connection is None:
self._connection = await self._execute(self._connector)
return self
def __await__(self) -> Generator[Any, None, "Connection"]:
self.start()
return self._connect().__await__()
async def __aenter__(self) -> "Connection":
return await self
async def __aexit__(self, exc_type, exc_val, exc_tb) -> None:
await self.close()
@contextmanager
async def cursor(self) -> Cursor:
"""Create an aiosqlite cursor wrapping a sqlite3 cursor object."""
return Cursor(self, await self._execute(self._conn.cursor))
async def commit(self) -> None:
"""Commit the current transaction."""
await self._execute(self._conn.commit)
async def rollback(self) -> None:
"""Roll back the current transaction."""
await self._execute(self._conn.rollback)
async def close(self) -> None:
"""Complete queued queries/cursors and close the connection."""
await self._execute(self._conn.close)
self._running = False
self._connection = None
@contextmanager
async def execute(self, sql: str, parameters: Iterable[Any] = None) -> Cursor:
"""Helper to create a cursor and execute the given query."""
if parameters is None:
parameters = []
cursor = await self._execute(self._conn.execute, sql, parameters)
return Cursor(self, cursor)
@contextmanager
async def execute_insert(
self, sql: str, parameters: Iterable[Any] = None
) -> Optional[sqlite3.Row]:
"""Helper to insert and get the last_insert_rowid."""
if parameters is None:
parameters = []
return await self._execute(self._execute_insert, sql, parameters)
@contextmanager
async def execute_fetchall(
self, sql: str, parameters: Iterable[Any] = None
) -> Iterable[sqlite3.Row]:
"""Helper to execute a query and return all the data."""
if parameters is None:
parameters = []
return await self._execute(self._execute_fetchall, sql, parameters)
@contextmanager
async def executemany(
self, sql: str, parameters: Iterable[Iterable[Any]]
) -> Cursor:
"""Helper to create a cursor and execute the given multiquery."""
cursor = await self._execute(self._conn.executemany, sql, parameters)
return Cursor(self, cursor)
@contextmanager
async def executescript(self, sql_script: str) -> Cursor:
"""Helper to create a cursor and execute a user script."""
cursor = await self._execute(self._conn.executescript, sql_script)
return Cursor(self, cursor)
async def interrupt(self) -> None:
"""Interrupt pending queries."""
return self._conn.interrupt()
@property
def in_transaction(self) -> bool:
return self._conn.in_transaction
@property
def isolation_level(self) -> str:
return self._conn.isolation_level
@isolation_level.setter
def isolation_level(self, value: str) -> None:
self._conn.isolation_level = value
@property
def row_factory(self) -> "Optional[Type]": # py3.5.2 compat (#24)
return self._conn.row_factory
@row_factory.setter
def row_factory(self, factory: "Optional[Type]") -> None: # py3.5.2 compat (#24)
self._conn.row_factory = factory
@property
def text_factory(self) -> Type:
return self._conn.text_factory
@text_factory.setter
def text_factory(self, factory: Type) -> None:
self._conn.text_factory = factory
@property
def total_changes(self) -> int:
return self._conn.total_changes
async def enable_load_extension(self, value: bool) -> None:
await self._execute(self._conn.enable_load_extension, value) # type: ignore
async def load_extension(self, path: str):
await self._execute(self._conn.load_extension, path) # type: ignore
|
jreese/aiosqlite
|
aiosqlite/core.py
|
Connection._execute
|
python
|
async def _execute(self, fn, *args, **kwargs):
function = partial(fn, *args, **kwargs)
future = self._loop.create_future()
self._tx.put_nowait((future, function))
return await future
|
Queue a function with the given arguments for execution.
|
train
|
https://github.com/jreese/aiosqlite/blob/3f548b568b8db9a57022b6e2c9627f5cdefb983f/aiosqlite/core.py#L160-L167
| null |
class Connection(Thread):
def __init__(
self,
connector: Callable[[], sqlite3.Connection],
loop: asyncio.AbstractEventLoop,
) -> None:
super().__init__()
self._running = True
self._connection = None # type: Optional[sqlite3.Connection]
self._connector = connector
self._loop = loop
self._tx = Queue() # type: Queue
@property
def _conn(self) -> sqlite3.Connection:
if self._connection is None:
raise ValueError("no active connection")
return self._connection
def _execute_insert(
self, sql: str, parameters: Iterable[Any]
) -> Optional[sqlite3.Row]:
cursor = self._conn.execute(sql, parameters)
cursor.execute("SELECT last_insert_rowid()")
return cursor.fetchone()
def _execute_fetchall(
self, sql: str, parameters: Iterable[Any]
) -> Iterable[sqlite3.Row]:
cursor = self._conn.execute(sql, parameters)
return cursor.fetchall()
def run(self) -> None:
"""Execute function calls on a separate thread."""
while self._running:
try:
future, function = self._tx.get(timeout=0.1)
except Empty:
continue
try:
LOG.debug("executing %s", function)
result = function()
LOG.debug("returning %s", result)
self._loop.call_soon_threadsafe(future.set_result, result)
except BaseException as e:
LOG.exception("returning exception %s", e)
self._loop.call_soon_threadsafe(future.set_exception, e)
async def _connect(self) -> "Connection":
"""Connect to the actual sqlite database."""
if self._connection is None:
self._connection = await self._execute(self._connector)
return self
def __await__(self) -> Generator[Any, None, "Connection"]:
self.start()
return self._connect().__await__()
async def __aenter__(self) -> "Connection":
return await self
async def __aexit__(self, exc_type, exc_val, exc_tb) -> None:
await self.close()
@contextmanager
async def cursor(self) -> Cursor:
"""Create an aiosqlite cursor wrapping a sqlite3 cursor object."""
return Cursor(self, await self._execute(self._conn.cursor))
async def commit(self) -> None:
"""Commit the current transaction."""
await self._execute(self._conn.commit)
async def rollback(self) -> None:
"""Roll back the current transaction."""
await self._execute(self._conn.rollback)
async def close(self) -> None:
"""Complete queued queries/cursors and close the connection."""
await self._execute(self._conn.close)
self._running = False
self._connection = None
@contextmanager
async def execute(self, sql: str, parameters: Iterable[Any] = None) -> Cursor:
"""Helper to create a cursor and execute the given query."""
if parameters is None:
parameters = []
cursor = await self._execute(self._conn.execute, sql, parameters)
return Cursor(self, cursor)
@contextmanager
async def execute_insert(
self, sql: str, parameters: Iterable[Any] = None
) -> Optional[sqlite3.Row]:
"""Helper to insert and get the last_insert_rowid."""
if parameters is None:
parameters = []
return await self._execute(self._execute_insert, sql, parameters)
@contextmanager
async def execute_fetchall(
self, sql: str, parameters: Iterable[Any] = None
) -> Iterable[sqlite3.Row]:
"""Helper to execute a query and return all the data."""
if parameters is None:
parameters = []
return await self._execute(self._execute_fetchall, sql, parameters)
@contextmanager
async def executemany(
self, sql: str, parameters: Iterable[Iterable[Any]]
) -> Cursor:
"""Helper to create a cursor and execute the given multiquery."""
cursor = await self._execute(self._conn.executemany, sql, parameters)
return Cursor(self, cursor)
@contextmanager
async def executescript(self, sql_script: str) -> Cursor:
"""Helper to create a cursor and execute a user script."""
cursor = await self._execute(self._conn.executescript, sql_script)
return Cursor(self, cursor)
async def interrupt(self) -> None:
"""Interrupt pending queries."""
return self._conn.interrupt()
@property
def in_transaction(self) -> bool:
return self._conn.in_transaction
@property
def isolation_level(self) -> str:
return self._conn.isolation_level
@isolation_level.setter
def isolation_level(self, value: str) -> None:
self._conn.isolation_level = value
@property
def row_factory(self) -> "Optional[Type]": # py3.5.2 compat (#24)
return self._conn.row_factory
@row_factory.setter
def row_factory(self, factory: "Optional[Type]") -> None: # py3.5.2 compat (#24)
self._conn.row_factory = factory
@property
def text_factory(self) -> Type:
return self._conn.text_factory
@text_factory.setter
def text_factory(self, factory: Type) -> None:
self._conn.text_factory = factory
@property
def total_changes(self) -> int:
return self._conn.total_changes
async def enable_load_extension(self, value: bool) -> None:
await self._execute(self._conn.enable_load_extension, value) # type: ignore
async def load_extension(self, path: str):
await self._execute(self._conn.load_extension, path) # type: ignore
|
jreese/aiosqlite
|
aiosqlite/core.py
|
Connection._connect
|
python
|
async def _connect(self) -> "Connection":
if self._connection is None:
self._connection = await self._execute(self._connector)
return self
|
Connect to the actual sqlite database.
|
train
|
https://github.com/jreese/aiosqlite/blob/3f548b568b8db9a57022b6e2c9627f5cdefb983f/aiosqlite/core.py#L169-L173
|
[
"async def _execute(self, fn, *args, **kwargs):\n \"\"\"Queue a function with the given arguments for execution.\"\"\"\n function = partial(fn, *args, **kwargs)\n future = self._loop.create_future()\n\n self._tx.put_nowait((future, function))\n\n return await future\n"
] |
class Connection(Thread):
def __init__(
self,
connector: Callable[[], sqlite3.Connection],
loop: asyncio.AbstractEventLoop,
) -> None:
super().__init__()
self._running = True
self._connection = None # type: Optional[sqlite3.Connection]
self._connector = connector
self._loop = loop
self._tx = Queue() # type: Queue
@property
def _conn(self) -> sqlite3.Connection:
if self._connection is None:
raise ValueError("no active connection")
return self._connection
def _execute_insert(
self, sql: str, parameters: Iterable[Any]
) -> Optional[sqlite3.Row]:
cursor = self._conn.execute(sql, parameters)
cursor.execute("SELECT last_insert_rowid()")
return cursor.fetchone()
def _execute_fetchall(
self, sql: str, parameters: Iterable[Any]
) -> Iterable[sqlite3.Row]:
cursor = self._conn.execute(sql, parameters)
return cursor.fetchall()
def run(self) -> None:
"""Execute function calls on a separate thread."""
while self._running:
try:
future, function = self._tx.get(timeout=0.1)
except Empty:
continue
try:
LOG.debug("executing %s", function)
result = function()
LOG.debug("returning %s", result)
self._loop.call_soon_threadsafe(future.set_result, result)
except BaseException as e:
LOG.exception("returning exception %s", e)
self._loop.call_soon_threadsafe(future.set_exception, e)
async def _execute(self, fn, *args, **kwargs):
"""Queue a function with the given arguments for execution."""
function = partial(fn, *args, **kwargs)
future = self._loop.create_future()
self._tx.put_nowait((future, function))
return await future
def __await__(self) -> Generator[Any, None, "Connection"]:
self.start()
return self._connect().__await__()
async def __aenter__(self) -> "Connection":
return await self
async def __aexit__(self, exc_type, exc_val, exc_tb) -> None:
await self.close()
@contextmanager
async def cursor(self) -> Cursor:
"""Create an aiosqlite cursor wrapping a sqlite3 cursor object."""
return Cursor(self, await self._execute(self._conn.cursor))
async def commit(self) -> None:
"""Commit the current transaction."""
await self._execute(self._conn.commit)
async def rollback(self) -> None:
"""Roll back the current transaction."""
await self._execute(self._conn.rollback)
async def close(self) -> None:
"""Complete queued queries/cursors and close the connection."""
await self._execute(self._conn.close)
self._running = False
self._connection = None
@contextmanager
async def execute(self, sql: str, parameters: Iterable[Any] = None) -> Cursor:
"""Helper to create a cursor and execute the given query."""
if parameters is None:
parameters = []
cursor = await self._execute(self._conn.execute, sql, parameters)
return Cursor(self, cursor)
@contextmanager
async def execute_insert(
self, sql: str, parameters: Iterable[Any] = None
) -> Optional[sqlite3.Row]:
"""Helper to insert and get the last_insert_rowid."""
if parameters is None:
parameters = []
return await self._execute(self._execute_insert, sql, parameters)
@contextmanager
async def execute_fetchall(
self, sql: str, parameters: Iterable[Any] = None
) -> Iterable[sqlite3.Row]:
"""Helper to execute a query and return all the data."""
if parameters is None:
parameters = []
return await self._execute(self._execute_fetchall, sql, parameters)
@contextmanager
async def executemany(
self, sql: str, parameters: Iterable[Iterable[Any]]
) -> Cursor:
"""Helper to create a cursor and execute the given multiquery."""
cursor = await self._execute(self._conn.executemany, sql, parameters)
return Cursor(self, cursor)
@contextmanager
async def executescript(self, sql_script: str) -> Cursor:
"""Helper to create a cursor and execute a user script."""
cursor = await self._execute(self._conn.executescript, sql_script)
return Cursor(self, cursor)
async def interrupt(self) -> None:
"""Interrupt pending queries."""
return self._conn.interrupt()
@property
def in_transaction(self) -> bool:
return self._conn.in_transaction
@property
def isolation_level(self) -> str:
return self._conn.isolation_level
@isolation_level.setter
def isolation_level(self, value: str) -> None:
self._conn.isolation_level = value
@property
def row_factory(self) -> "Optional[Type]": # py3.5.2 compat (#24)
return self._conn.row_factory
@row_factory.setter
def row_factory(self, factory: "Optional[Type]") -> None: # py3.5.2 compat (#24)
self._conn.row_factory = factory
@property
def text_factory(self) -> Type:
return self._conn.text_factory
@text_factory.setter
def text_factory(self, factory: Type) -> None:
self._conn.text_factory = factory
@property
def total_changes(self) -> int:
return self._conn.total_changes
async def enable_load_extension(self, value: bool) -> None:
await self._execute(self._conn.enable_load_extension, value) # type: ignore
async def load_extension(self, path: str):
await self._execute(self._conn.load_extension, path) # type: ignore
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.